coretypes.h: Include machmode.h...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "vec.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "options.h"
29 #include "inchash.h"
30 #include "tree.h"
31 #include "fold-const.h"
32 #include "predict.h"
33 #include "tm.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimple-expr.h"
44 #include "is-a.h"
45 #include "gimple.h"
46 #include "hashtab.h"
47 #include "rtl.h"
48 #include "flags.h"
49 #include "statistics.h"
50 #include "insn-config.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "varasm.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "stor-layout.h"
60 #include "print-tree.h"
61 #include "gimplify.h"
62 #include "gimple-iterator.h"
63 #include "gimplify-me.h"
64 #include "gimple-walk.h"
65 #include "langhooks.h"
66 #include "target.h"
67 #include "hash-map.h"
68 #include "plugin-api.h"
69 #include "ipa-ref.h"
70 #include "cgraph.h"
71 #include "alloc-pool.h"
72 #include "symbol-summary.h"
73 #include "ipa-prop.h"
74 #include "bitmap.h"
75 #include "gimple-ssa.h"
76 #include "tree-cfg.h"
77 #include "tree-phinodes.h"
78 #include "ssa-iterators.h"
79 #include "tree-into-ssa.h"
80 #include "tree-dfa.h"
81 #include "tree-pass.h"
82 #include "tree-inline.h"
83 #include "ipa-inline.h"
84 #include "diagnostic.h"
85 #include "gimple-pretty-print.h"
86 #include "lto-streamer.h"
87 #include "data-streamer.h"
88 #include "tree-streamer.h"
89 #include "params.h"
90 #include "ipa-utils.h"
91 #include "stringpool.h"
92 #include "tree-ssanames.h"
93 #include "dbgcnt.h"
94 #include "domwalk.h"
95 #include "builtins.h"
96
97 /* Intermediate information that we get from alias analysis about a particular
98 parameter in a particular basic_block. When a parameter or the memory it
99 references is marked modified, we use that information in all dominatd
100 blocks without cosulting alias analysis oracle. */
101
102 struct param_aa_status
103 {
104 /* Set when this structure contains meaningful information. If not, the
105 structure describing a dominating BB should be used instead. */
106 bool valid;
107
108 /* Whether we have seen something which might have modified the data in
109 question. PARM is for the parameter itself, REF is for data it points to
110 but using the alias type of individual accesses and PT is the same thing
111 but for computing aggregate pass-through functions using a very inclusive
112 ao_ref. */
113 bool parm_modified, ref_modified, pt_modified;
114 };
115
116 /* Information related to a given BB that used only when looking at function
117 body. */
118
119 struct ipa_bb_info
120 {
121 /* Call graph edges going out of this BB. */
122 vec<cgraph_edge *> cg_edges;
123 /* Alias analysis statuses of each formal parameter at this bb. */
124 vec<param_aa_status> param_aa_statuses;
125 };
126
127 /* Structure with global information that is only used when looking at function
128 body. */
129
130 struct func_body_info
131 {
132 /* The node that is being analyzed. */
133 cgraph_node *node;
134
135 /* Its info. */
136 struct ipa_node_params *info;
137
138 /* Information about individual BBs. */
139 vec<ipa_bb_info> bb_infos;
140
141 /* Number of parameters. */
142 int param_count;
143
144 /* Number of statements already walked by when analyzing this function. */
145 unsigned int aa_walked;
146 };
147
148 /* Function summary where the parameter infos are actually stored. */
149 ipa_node_params_t *ipa_node_params_sum = NULL;
150 /* Vector of IPA-CP transformation data for each clone. */
151 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
152 /* Vector where the parameter infos are actually stored. */
153 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
154
155 /* Holders of ipa cgraph hooks: */
156 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
157 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
158 static struct cgraph_node_hook_list *function_insertion_hook_holder;
159
160 /* Description of a reference to an IPA constant. */
161 struct ipa_cst_ref_desc
162 {
163 /* Edge that corresponds to the statement which took the reference. */
164 struct cgraph_edge *cs;
165 /* Linked list of duplicates created when call graph edges are cloned. */
166 struct ipa_cst_ref_desc *next_duplicate;
167 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
168 if out of control. */
169 int refcount;
170 };
171
172 /* Allocation pool for reference descriptions. */
173
174 static pool_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
175 ("IPA-PROP ref descriptions", 32);
176
177 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
178 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
179
180 static bool
181 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
182 {
183 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
184
185 if (!fs_opts)
186 return false;
187 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
188 }
189
190 /* Return index of the formal whose tree is PTREE in function which corresponds
191 to INFO. */
192
193 static int
194 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
195 {
196 int i, count;
197
198 count = descriptors.length ();
199 for (i = 0; i < count; i++)
200 if (descriptors[i].decl == ptree)
201 return i;
202
203 return -1;
204 }
205
206 /* Return index of the formal whose tree is PTREE in function which corresponds
207 to INFO. */
208
209 int
210 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
211 {
212 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
213 }
214
215 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
216 NODE. */
217
218 static void
219 ipa_populate_param_decls (struct cgraph_node *node,
220 vec<ipa_param_descriptor> &descriptors)
221 {
222 tree fndecl;
223 tree fnargs;
224 tree parm;
225 int param_num;
226
227 fndecl = node->decl;
228 gcc_assert (gimple_has_body_p (fndecl));
229 fnargs = DECL_ARGUMENTS (fndecl);
230 param_num = 0;
231 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
232 {
233 descriptors[param_num].decl = parm;
234 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
235 true);
236 param_num++;
237 }
238 }
239
240 /* Return how many formal parameters FNDECL has. */
241
242 int
243 count_formal_params (tree fndecl)
244 {
245 tree parm;
246 int count = 0;
247 gcc_assert (gimple_has_body_p (fndecl));
248
249 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
250 count++;
251
252 return count;
253 }
254
255 /* Return the declaration of Ith formal parameter of the function corresponding
256 to INFO. Note there is no setter function as this array is built just once
257 using ipa_initialize_node_params. */
258
259 void
260 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
261 {
262 fprintf (file, "param #%i", i);
263 if (info->descriptors[i].decl)
264 {
265 fprintf (file, " ");
266 print_generic_expr (file, info->descriptors[i].decl, 0);
267 }
268 }
269
270 /* Initialize the ipa_node_params structure associated with NODE
271 to hold PARAM_COUNT parameters. */
272
273 void
274 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
275 {
276 struct ipa_node_params *info = IPA_NODE_REF (node);
277
278 if (!info->descriptors.exists () && param_count)
279 info->descriptors.safe_grow_cleared (param_count);
280 }
281
282 /* Initialize the ipa_node_params structure associated with NODE by counting
283 the function parameters, creating the descriptors and populating their
284 param_decls. */
285
286 void
287 ipa_initialize_node_params (struct cgraph_node *node)
288 {
289 struct ipa_node_params *info = IPA_NODE_REF (node);
290
291 if (!info->descriptors.exists ())
292 {
293 ipa_alloc_node_params (node, count_formal_params (node->decl));
294 ipa_populate_param_decls (node, info->descriptors);
295 }
296 }
297
298 /* Print the jump functions associated with call graph edge CS to file F. */
299
300 static void
301 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302 {
303 int i, count;
304
305 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
306 for (i = 0; i < count; i++)
307 {
308 struct ipa_jump_func *jump_func;
309 enum jump_func_type type;
310
311 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
312 type = jump_func->type;
313
314 fprintf (f, " param %d: ", i);
315 if (type == IPA_JF_UNKNOWN)
316 fprintf (f, "UNKNOWN\n");
317 else if (type == IPA_JF_CONST)
318 {
319 tree val = jump_func->value.constant.value;
320 fprintf (f, "CONST: ");
321 print_generic_expr (f, val, 0);
322 if (TREE_CODE (val) == ADDR_EXPR
323 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 {
325 fprintf (f, " -> ");
326 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
327 0);
328 }
329 fprintf (f, "\n");
330 }
331 else if (type == IPA_JF_PASS_THROUGH)
332 {
333 fprintf (f, "PASS THROUGH: ");
334 fprintf (f, "%d, op %s",
335 jump_func->value.pass_through.formal_id,
336 get_tree_code_name(jump_func->value.pass_through.operation));
337 if (jump_func->value.pass_through.operation != NOP_EXPR)
338 {
339 fprintf (f, " ");
340 print_generic_expr (f,
341 jump_func->value.pass_through.operand, 0);
342 }
343 if (jump_func->value.pass_through.agg_preserved)
344 fprintf (f, ", agg_preserved");
345 fprintf (f, "\n");
346 }
347 else if (type == IPA_JF_ANCESTOR)
348 {
349 fprintf (f, "ANCESTOR: ");
350 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
351 jump_func->value.ancestor.formal_id,
352 jump_func->value.ancestor.offset);
353 if (jump_func->value.ancestor.agg_preserved)
354 fprintf (f, ", agg_preserved");
355 fprintf (f, "\n");
356 }
357
358 if (jump_func->agg.items)
359 {
360 struct ipa_agg_jf_item *item;
361 int j;
362
363 fprintf (f, " Aggregate passed by %s:\n",
364 jump_func->agg.by_ref ? "reference" : "value");
365 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
366 {
367 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
368 item->offset);
369 if (TYPE_P (item->value))
370 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
371 tree_to_uhwi (TYPE_SIZE (item->value)));
372 else
373 {
374 fprintf (f, "cst: ");
375 print_generic_expr (f, item->value, 0);
376 }
377 fprintf (f, "\n");
378 }
379 }
380
381 struct ipa_polymorphic_call_context *ctx
382 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
383 if (ctx && !ctx->useless_p ())
384 {
385 fprintf (f, " Context: ");
386 ctx->dump (dump_file);
387 }
388
389 if (jump_func->alignment.known)
390 {
391 fprintf (f, " Alignment: %u, misalignment: %u\n",
392 jump_func->alignment.align,
393 jump_func->alignment.misalign);
394 }
395 else
396 fprintf (f, " Unknown alignment\n");
397 }
398 }
399
400
401 /* Print the jump functions of all arguments on all call graph edges going from
402 NODE to file F. */
403
404 void
405 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
406 {
407 struct cgraph_edge *cs;
408
409 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
410 node->order);
411 for (cs = node->callees; cs; cs = cs->next_callee)
412 {
413 if (!ipa_edge_args_info_available_for_edge_p (cs))
414 continue;
415
416 fprintf (f, " callsite %s/%i -> %s/%i : \n",
417 xstrdup_for_dump (node->name ()), node->order,
418 xstrdup_for_dump (cs->callee->name ()),
419 cs->callee->order);
420 ipa_print_node_jump_functions_for_edge (f, cs);
421 }
422
423 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
424 {
425 struct cgraph_indirect_call_info *ii;
426 if (!ipa_edge_args_info_available_for_edge_p (cs))
427 continue;
428
429 ii = cs->indirect_info;
430 if (ii->agg_contents)
431 fprintf (f, " indirect %s callsite, calling param %i, "
432 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
433 ii->member_ptr ? "member ptr" : "aggregate",
434 ii->param_index, ii->offset,
435 ii->by_ref ? "by reference" : "by_value");
436 else
437 fprintf (f, " indirect %s callsite, calling param %i, "
438 "offset " HOST_WIDE_INT_PRINT_DEC,
439 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
440 ii->offset);
441
442 if (cs->call_stmt)
443 {
444 fprintf (f, ", for stmt ");
445 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
446 }
447 else
448 fprintf (f, "\n");
449 if (ii->polymorphic)
450 ii->context.dump (f);
451 ipa_print_node_jump_functions_for_edge (f, cs);
452 }
453 }
454
455 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
456
457 void
458 ipa_print_all_jump_functions (FILE *f)
459 {
460 struct cgraph_node *node;
461
462 fprintf (f, "\nJump functions:\n");
463 FOR_EACH_FUNCTION (node)
464 {
465 ipa_print_node_jump_functions (f, node);
466 }
467 }
468
469 /* Set jfunc to be a know-really nothing jump function. */
470
471 static void
472 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
473 {
474 jfunc->type = IPA_JF_UNKNOWN;
475 jfunc->alignment.known = false;
476 }
477
478 /* Set JFUNC to be a copy of another jmp (to be used by jump function
479 combination code). The two functions will share their rdesc. */
480
481 static void
482 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
483 struct ipa_jump_func *src)
484
485 {
486 gcc_checking_assert (src->type == IPA_JF_CONST);
487 dst->type = IPA_JF_CONST;
488 dst->value.constant = src->value.constant;
489 }
490
491 /* Set JFUNC to be a constant jmp function. */
492
493 static void
494 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
495 struct cgraph_edge *cs)
496 {
497 constant = unshare_expr (constant);
498 if (constant && EXPR_P (constant))
499 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
500 jfunc->type = IPA_JF_CONST;
501 jfunc->value.constant.value = unshare_expr_without_location (constant);
502
503 if (TREE_CODE (constant) == ADDR_EXPR
504 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
505 {
506 struct ipa_cst_ref_desc *rdesc;
507
508 rdesc = ipa_refdesc_pool.allocate ();
509 rdesc->cs = cs;
510 rdesc->next_duplicate = NULL;
511 rdesc->refcount = 1;
512 jfunc->value.constant.rdesc = rdesc;
513 }
514 else
515 jfunc->value.constant.rdesc = NULL;
516 }
517
518 /* Set JFUNC to be a simple pass-through jump function. */
519 static void
520 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
521 bool agg_preserved)
522 {
523 jfunc->type = IPA_JF_PASS_THROUGH;
524 jfunc->value.pass_through.operand = NULL_TREE;
525 jfunc->value.pass_through.formal_id = formal_id;
526 jfunc->value.pass_through.operation = NOP_EXPR;
527 jfunc->value.pass_through.agg_preserved = agg_preserved;
528 }
529
530 /* Set JFUNC to be an arithmetic pass through jump function. */
531
532 static void
533 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
534 tree operand, enum tree_code operation)
535 {
536 jfunc->type = IPA_JF_PASS_THROUGH;
537 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
538 jfunc->value.pass_through.formal_id = formal_id;
539 jfunc->value.pass_through.operation = operation;
540 jfunc->value.pass_through.agg_preserved = false;
541 }
542
543 /* Set JFUNC to be an ancestor jump function. */
544
545 static void
546 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
547 int formal_id, bool agg_preserved)
548 {
549 jfunc->type = IPA_JF_ANCESTOR;
550 jfunc->value.ancestor.formal_id = formal_id;
551 jfunc->value.ancestor.offset = offset;
552 jfunc->value.ancestor.agg_preserved = agg_preserved;
553 }
554
555 /* Get IPA BB information about the given BB. FBI is the context of analyzis
556 of this function body. */
557
558 static struct ipa_bb_info *
559 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
560 {
561 gcc_checking_assert (fbi);
562 return &fbi->bb_infos[bb->index];
563 }
564
565 /* Structure to be passed in between detect_type_change and
566 check_stmt_for_type_change. */
567
568 struct prop_type_change_info
569 {
570 /* Offset into the object where there is the virtual method pointer we are
571 looking for. */
572 HOST_WIDE_INT offset;
573 /* The declaration or SSA_NAME pointer of the base that we are checking for
574 type change. */
575 tree object;
576 /* Set to true if dynamic type change has been detected. */
577 bool type_maybe_changed;
578 };
579
580 /* Return true if STMT can modify a virtual method table pointer.
581
582 This function makes special assumptions about both constructors and
583 destructors which are all the functions that are allowed to alter the VMT
584 pointers. It assumes that destructors begin with assignment into all VMT
585 pointers and that constructors essentially look in the following way:
586
587 1) The very first thing they do is that they call constructors of ancestor
588 sub-objects that have them.
589
590 2) Then VMT pointers of this and all its ancestors is set to new values
591 corresponding to the type corresponding to the constructor.
592
593 3) Only afterwards, other stuff such as constructor of member sub-objects
594 and the code written by the user is run. Only this may include calling
595 virtual functions, directly or indirectly.
596
597 There is no way to call a constructor of an ancestor sub-object in any
598 other way.
599
600 This means that we do not have to care whether constructors get the correct
601 type information because they will always change it (in fact, if we define
602 the type to be given by the VMT pointer, it is undefined).
603
604 The most important fact to derive from the above is that if, for some
605 statement in the section 3, we try to detect whether the dynamic type has
606 changed, we can safely ignore all calls as we examine the function body
607 backwards until we reach statements in section 2 because these calls cannot
608 be ancestor constructors or destructors (if the input is not bogus) and so
609 do not change the dynamic type (this holds true only for automatically
610 allocated objects but at the moment we devirtualize only these). We then
611 must detect that statements in section 2 change the dynamic type and can try
612 to derive the new type. That is enough and we can stop, we will never see
613 the calls into constructors of sub-objects in this code. Therefore we can
614 safely ignore all call statements that we traverse.
615 */
616
617 static bool
618 stmt_may_be_vtbl_ptr_store (gimple stmt)
619 {
620 if (is_gimple_call (stmt))
621 return false;
622 if (gimple_clobber_p (stmt))
623 return false;
624 else if (is_gimple_assign (stmt))
625 {
626 tree lhs = gimple_assign_lhs (stmt);
627
628 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
629 {
630 if (flag_strict_aliasing
631 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
632 return false;
633
634 if (TREE_CODE (lhs) == COMPONENT_REF
635 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
636 return false;
637 /* In the future we might want to use get_base_ref_and_offset to find
638 if there is a field corresponding to the offset and if so, proceed
639 almost like if it was a component ref. */
640 }
641 }
642 return true;
643 }
644
645 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
646 to check whether a particular statement may modify the virtual table
647 pointerIt stores its result into DATA, which points to a
648 prop_type_change_info structure. */
649
650 static bool
651 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
652 {
653 gimple stmt = SSA_NAME_DEF_STMT (vdef);
654 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
655
656 if (stmt_may_be_vtbl_ptr_store (stmt))
657 {
658 tci->type_maybe_changed = true;
659 return true;
660 }
661 else
662 return false;
663 }
664
665 /* See if ARG is PARAM_DECl describing instance passed by pointer
666 or reference in FUNCTION. Return false if the dynamic type may change
667 in between beggining of the function until CALL is invoked.
668
669 Generally functions are not allowed to change type of such instances,
670 but they call destructors. We assume that methods can not destroy the THIS
671 pointer. Also as a special cases, constructor and destructors may change
672 type of the THIS pointer. */
673
674 static bool
675 param_type_may_change_p (tree function, tree arg, gimple call)
676 {
677 /* Pure functions can not do any changes on the dynamic type;
678 that require writting to memory. */
679 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
680 return false;
681 /* We need to check if we are within inlined consturctor
682 or destructor (ideally we would have way to check that the
683 inline cdtor is actually working on ARG, but we don't have
684 easy tie on this, so punt on all non-pure cdtors.
685 We may also record the types of cdtors and once we know type
686 of the instance match them.
687
688 Also code unification optimizations may merge calls from
689 different blocks making return values unreliable. So
690 do nothing during late optimization. */
691 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
692 return true;
693 if (TREE_CODE (arg) == SSA_NAME
694 && SSA_NAME_IS_DEFAULT_DEF (arg)
695 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
696 {
697 /* Normal (non-THIS) argument. */
698 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
699 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
700 /* THIS pointer of an method - here we we want to watch constructors
701 and destructors as those definitely may change the dynamic
702 type. */
703 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
704 && !DECL_CXX_CONSTRUCTOR_P (function)
705 && !DECL_CXX_DESTRUCTOR_P (function)
706 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
707 {
708 /* Walk the inline stack and watch out for ctors/dtors. */
709 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
710 block = BLOCK_SUPERCONTEXT (block))
711 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
712 return true;
713 return false;
714 }
715 }
716 return true;
717 }
718
719 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
720 callsite CALL) by looking for assignments to its virtual table pointer. If
721 it is, return true and fill in the jump function JFUNC with relevant type
722 information or set it to unknown. ARG is the object itself (not a pointer
723 to it, unless dereferenced). BASE is the base of the memory access as
724 returned by get_ref_base_and_extent, as is the offset.
725
726 This is helper function for detect_type_change and detect_type_change_ssa
727 that does the heavy work which is usually unnecesary. */
728
729 static bool
730 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
731 gcall *call, struct ipa_jump_func *jfunc,
732 HOST_WIDE_INT offset)
733 {
734 struct prop_type_change_info tci;
735 ao_ref ao;
736 bool entry_reached = false;
737
738 gcc_checking_assert (DECL_P (arg)
739 || TREE_CODE (arg) == MEM_REF
740 || handled_component_p (arg));
741
742 comp_type = TYPE_MAIN_VARIANT (comp_type);
743
744 /* Const calls cannot call virtual methods through VMT and so type changes do
745 not matter. */
746 if (!flag_devirtualize || !gimple_vuse (call)
747 /* Be sure expected_type is polymorphic. */
748 || !comp_type
749 || TREE_CODE (comp_type) != RECORD_TYPE
750 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
751 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
752 return true;
753
754 ao_ref_init (&ao, arg);
755 ao.base = base;
756 ao.offset = offset;
757 ao.size = POINTER_SIZE;
758 ao.max_size = ao.size;
759
760 tci.offset = offset;
761 tci.object = get_base_address (arg);
762 tci.type_maybe_changed = false;
763
764 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
765 &tci, NULL, &entry_reached);
766 if (!tci.type_maybe_changed)
767 return false;
768
769 ipa_set_jf_unknown (jfunc);
770 return true;
771 }
772
773 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
774 If it is, return true and fill in the jump function JFUNC with relevant type
775 information or set it to unknown. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset. */
778
779 static bool
780 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
781 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
782 {
783 if (!flag_devirtualize)
784 return false;
785
786 if (TREE_CODE (base) == MEM_REF
787 && !param_type_may_change_p (current_function_decl,
788 TREE_OPERAND (base, 0),
789 call))
790 return false;
791 return detect_type_change_from_memory_writes (arg, base, comp_type,
792 call, jfunc, offset);
793 }
794
795 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
796 SSA name (its dereference will become the base and the offset is assumed to
797 be zero). */
798
799 static bool
800 detect_type_change_ssa (tree arg, tree comp_type,
801 gcall *call, struct ipa_jump_func *jfunc)
802 {
803 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
804 if (!flag_devirtualize
805 || !POINTER_TYPE_P (TREE_TYPE (arg)))
806 return false;
807
808 if (!param_type_may_change_p (current_function_decl, arg, call))
809 return false;
810
811 arg = build2 (MEM_REF, ptr_type_node, arg,
812 build_int_cst (ptr_type_node, 0));
813
814 return detect_type_change_from_memory_writes (arg, arg, comp_type,
815 call, jfunc, 0);
816 }
817
818 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
819 boolean variable pointed to by DATA. */
820
821 static bool
822 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
823 void *data)
824 {
825 bool *b = (bool *) data;
826 *b = true;
827 return true;
828 }
829
830 /* Return true if we have already walked so many statements in AA that we
831 should really just start giving up. */
832
833 static bool
834 aa_overwalked (struct func_body_info *fbi)
835 {
836 gcc_checking_assert (fbi);
837 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
838 }
839
840 /* Find the nearest valid aa status for parameter specified by INDEX that
841 dominates BB. */
842
843 static struct param_aa_status *
844 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
845 int index)
846 {
847 while (true)
848 {
849 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
850 if (!bb)
851 return NULL;
852 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
853 if (!bi->param_aa_statuses.is_empty ()
854 && bi->param_aa_statuses[index].valid)
855 return &bi->param_aa_statuses[index];
856 }
857 }
858
859 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
860 structures and/or intialize the result with a dominating description as
861 necessary. */
862
863 static struct param_aa_status *
864 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
865 int index)
866 {
867 gcc_checking_assert (fbi);
868 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
869 if (bi->param_aa_statuses.is_empty ())
870 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
871 struct param_aa_status *paa = &bi->param_aa_statuses[index];
872 if (!paa->valid)
873 {
874 gcc_checking_assert (!paa->parm_modified
875 && !paa->ref_modified
876 && !paa->pt_modified);
877 struct param_aa_status *dom_paa;
878 dom_paa = find_dominating_aa_status (fbi, bb, index);
879 if (dom_paa)
880 *paa = *dom_paa;
881 else
882 paa->valid = true;
883 }
884
885 return paa;
886 }
887
888 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
889 a value known not to be modified in this function before reaching the
890 statement STMT. FBI holds information about the function we have so far
891 gathered but do not survive the summary building stage. */
892
893 static bool
894 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
895 gimple stmt, tree parm_load)
896 {
897 struct param_aa_status *paa;
898 bool modified = false;
899 ao_ref refd;
900
901 /* FIXME: FBI can be NULL if we are being called from outside
902 ipa_node_analysis or ipcp_transform_function, which currently happens
903 during inlining analysis. It would be great to extend fbi's lifetime and
904 always have it. Currently, we are just not afraid of too much walking in
905 that case. */
906 if (fbi)
907 {
908 if (aa_overwalked (fbi))
909 return false;
910 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
911 if (paa->parm_modified)
912 return false;
913 }
914 else
915 paa = NULL;
916
917 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
918 ao_ref_init (&refd, parm_load);
919 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
920 &modified, NULL);
921 if (fbi)
922 fbi->aa_walked += walked;
923 if (paa && modified)
924 paa->parm_modified = true;
925 return !modified;
926 }
927
928 /* If STMT is an assignment that loads a value from an parameter declaration,
929 return the index of the parameter in ipa_node_params which has not been
930 modified. Otherwise return -1. */
931
932 static int
933 load_from_unmodified_param (struct func_body_info *fbi,
934 vec<ipa_param_descriptor> descriptors,
935 gimple stmt)
936 {
937 int index;
938 tree op1;
939
940 if (!gimple_assign_single_p (stmt))
941 return -1;
942
943 op1 = gimple_assign_rhs1 (stmt);
944 if (TREE_CODE (op1) != PARM_DECL)
945 return -1;
946
947 index = ipa_get_param_decl_index_1 (descriptors, op1);
948 if (index < 0
949 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
950 return -1;
951
952 return index;
953 }
954
955 /* Return true if memory reference REF (which must be a load through parameter
956 with INDEX) loads data that are known to be unmodified in this function
957 before reaching statement STMT. */
958
959 static bool
960 parm_ref_data_preserved_p (struct func_body_info *fbi,
961 int index, gimple stmt, tree ref)
962 {
963 struct param_aa_status *paa;
964 bool modified = false;
965 ao_ref refd;
966
967 /* FIXME: FBI can be NULL if we are being called from outside
968 ipa_node_analysis or ipcp_transform_function, which currently happens
969 during inlining analysis. It would be great to extend fbi's lifetime and
970 always have it. Currently, we are just not afraid of too much walking in
971 that case. */
972 if (fbi)
973 {
974 if (aa_overwalked (fbi))
975 return false;
976 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
977 if (paa->ref_modified)
978 return false;
979 }
980 else
981 paa = NULL;
982
983 gcc_checking_assert (gimple_vuse (stmt));
984 ao_ref_init (&refd, ref);
985 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
986 &modified, NULL);
987 if (fbi)
988 fbi->aa_walked += walked;
989 if (paa && modified)
990 paa->ref_modified = true;
991 return !modified;
992 }
993
994 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
995 is known to be unmodified in this function before reaching call statement
996 CALL into which it is passed. FBI describes the function body. */
997
998 static bool
999 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1000 gimple call, tree parm)
1001 {
1002 bool modified = false;
1003 ao_ref refd;
1004
1005 /* It's unnecessary to calculate anything about memory contnets for a const
1006 function because it is not goin to use it. But do not cache the result
1007 either. Also, no such calculations for non-pointers. */
1008 if (!gimple_vuse (call)
1009 || !POINTER_TYPE_P (TREE_TYPE (parm))
1010 || aa_overwalked (fbi))
1011 return false;
1012
1013 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1014 index);
1015 if (paa->pt_modified)
1016 return false;
1017
1018 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1019 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1020 &modified, NULL);
1021 fbi->aa_walked += walked;
1022 if (modified)
1023 paa->pt_modified = true;
1024 return !modified;
1025 }
1026
1027 /* Return true if we can prove that OP is a memory reference loading unmodified
1028 data from an aggregate passed as a parameter and if the aggregate is passed
1029 by reference, that the alias type of the load corresponds to the type of the
1030 formal parameter (so that we can rely on this type for TBAA in callers).
1031 INFO and PARMS_AINFO describe parameters of the current function (but the
1032 latter can be NULL), STMT is the load statement. If function returns true,
1033 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1034 within the aggregate and whether it is a load from a value passed by
1035 reference respectively. */
1036
1037 static bool
1038 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1039 vec<ipa_param_descriptor> descriptors,
1040 gimple stmt, tree op, int *index_p,
1041 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1042 bool *by_ref_p)
1043 {
1044 int index;
1045 HOST_WIDE_INT size, max_size;
1046 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1047
1048 if (max_size == -1 || max_size != size || *offset_p < 0)
1049 return false;
1050
1051 if (DECL_P (base))
1052 {
1053 int index = ipa_get_param_decl_index_1 (descriptors, base);
1054 if (index >= 0
1055 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1056 {
1057 *index_p = index;
1058 *by_ref_p = false;
1059 if (size_p)
1060 *size_p = size;
1061 return true;
1062 }
1063 return false;
1064 }
1065
1066 if (TREE_CODE (base) != MEM_REF
1067 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1068 || !integer_zerop (TREE_OPERAND (base, 1)))
1069 return false;
1070
1071 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1072 {
1073 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1074 index = ipa_get_param_decl_index_1 (descriptors, parm);
1075 }
1076 else
1077 {
1078 /* This branch catches situations where a pointer parameter is not a
1079 gimple register, for example:
1080
1081 void hip7(S*) (struct S * p)
1082 {
1083 void (*<T2e4>) (struct S *) D.1867;
1084 struct S * p.1;
1085
1086 <bb 2>:
1087 p.1_1 = p;
1088 D.1867_2 = p.1_1->f;
1089 D.1867_2 ();
1090 gdp = &p;
1091 */
1092
1093 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1094 index = load_from_unmodified_param (fbi, descriptors, def);
1095 }
1096
1097 if (index >= 0
1098 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1099 {
1100 *index_p = index;
1101 *by_ref_p = true;
1102 if (size_p)
1103 *size_p = size;
1104 return true;
1105 }
1106 return false;
1107 }
1108
1109 /* Just like the previous function, just without the param_analysis_info
1110 pointer, for users outside of this file. */
1111
1112 bool
1113 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1114 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1115 bool *by_ref_p)
1116 {
1117 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1118 offset_p, NULL, by_ref_p);
1119 }
1120
1121 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1122 of an assignment statement STMT, try to determine whether we are actually
1123 handling any of the following cases and construct an appropriate jump
1124 function into JFUNC if so:
1125
1126 1) The passed value is loaded from a formal parameter which is not a gimple
1127 register (most probably because it is addressable, the value has to be
1128 scalar) and we can guarantee the value has not changed. This case can
1129 therefore be described by a simple pass-through jump function. For example:
1130
1131 foo (int a)
1132 {
1133 int a.0;
1134
1135 a.0_2 = a;
1136 bar (a.0_2);
1137
1138 2) The passed value can be described by a simple arithmetic pass-through
1139 jump function. E.g.
1140
1141 foo (int a)
1142 {
1143 int D.2064;
1144
1145 D.2064_4 = a.1(D) + 4;
1146 bar (D.2064_4);
1147
1148 This case can also occur in combination of the previous one, e.g.:
1149
1150 foo (int a, int z)
1151 {
1152 int a.0;
1153 int D.2064;
1154
1155 a.0_3 = a;
1156 D.2064_4 = a.0_3 + 4;
1157 foo (D.2064_4);
1158
1159 3) The passed value is an address of an object within another one (which
1160 also passed by reference). Such situations are described by an ancestor
1161 jump function and describe situations such as:
1162
1163 B::foo() (struct B * const this)
1164 {
1165 struct A * D.1845;
1166
1167 D.1845_2 = &this_1(D)->D.1748;
1168 A::bar (D.1845_2);
1169
1170 INFO is the structure describing individual parameters access different
1171 stages of IPA optimizations. PARMS_AINFO contains the information that is
1172 only needed for intraprocedural analysis. */
1173
1174 static void
1175 compute_complex_assign_jump_func (struct func_body_info *fbi,
1176 struct ipa_node_params *info,
1177 struct ipa_jump_func *jfunc,
1178 gcall *call, gimple stmt, tree name,
1179 tree param_type)
1180 {
1181 HOST_WIDE_INT offset, size, max_size;
1182 tree op1, tc_ssa, base, ssa;
1183 int index;
1184
1185 op1 = gimple_assign_rhs1 (stmt);
1186
1187 if (TREE_CODE (op1) == SSA_NAME)
1188 {
1189 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1190 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1191 else
1192 index = load_from_unmodified_param (fbi, info->descriptors,
1193 SSA_NAME_DEF_STMT (op1));
1194 tc_ssa = op1;
1195 }
1196 else
1197 {
1198 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1199 tc_ssa = gimple_assign_lhs (stmt);
1200 }
1201
1202 if (index >= 0)
1203 {
1204 tree op2 = gimple_assign_rhs2 (stmt);
1205
1206 if (op2)
1207 {
1208 if (!is_gimple_ip_invariant (op2)
1209 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1210 && !useless_type_conversion_p (TREE_TYPE (name),
1211 TREE_TYPE (op1))))
1212 return;
1213
1214 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1215 gimple_assign_rhs_code (stmt));
1216 }
1217 else if (gimple_assign_single_p (stmt))
1218 {
1219 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1220 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1221 }
1222 return;
1223 }
1224
1225 if (TREE_CODE (op1) != ADDR_EXPR)
1226 return;
1227 op1 = TREE_OPERAND (op1, 0);
1228 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1229 return;
1230 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1231 if (TREE_CODE (base) != MEM_REF
1232 /* If this is a varying address, punt. */
1233 || max_size == -1
1234 || max_size != size)
1235 return;
1236 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1237 ssa = TREE_OPERAND (base, 0);
1238 if (TREE_CODE (ssa) != SSA_NAME
1239 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1240 || offset < 0)
1241 return;
1242
1243 /* Dynamic types are changed in constructors and destructors. */
1244 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1245 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1246 ipa_set_ancestor_jf (jfunc, offset, index,
1247 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1248 }
1249
1250 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1251 it looks like:
1252
1253 iftmp.1_3 = &obj_2(D)->D.1762;
1254
1255 The base of the MEM_REF must be a default definition SSA NAME of a
1256 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1257 whole MEM_REF expression is returned and the offset calculated from any
1258 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1259 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1260
1261 static tree
1262 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1263 {
1264 HOST_WIDE_INT size, max_size;
1265 tree expr, parm, obj;
1266
1267 if (!gimple_assign_single_p (assign))
1268 return NULL_TREE;
1269 expr = gimple_assign_rhs1 (assign);
1270
1271 if (TREE_CODE (expr) != ADDR_EXPR)
1272 return NULL_TREE;
1273 expr = TREE_OPERAND (expr, 0);
1274 obj = expr;
1275 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1276
1277 if (TREE_CODE (expr) != MEM_REF
1278 /* If this is a varying address, punt. */
1279 || max_size == -1
1280 || max_size != size
1281 || *offset < 0)
1282 return NULL_TREE;
1283 parm = TREE_OPERAND (expr, 0);
1284 if (TREE_CODE (parm) != SSA_NAME
1285 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1286 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1287 return NULL_TREE;
1288
1289 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1290 *obj_p = obj;
1291 return expr;
1292 }
1293
1294
1295 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1296 statement PHI, try to find out whether NAME is in fact a
1297 multiple-inheritance typecast from a descendant into an ancestor of a formal
1298 parameter and thus can be described by an ancestor jump function and if so,
1299 write the appropriate function into JFUNC.
1300
1301 Essentially we want to match the following pattern:
1302
1303 if (obj_2(D) != 0B)
1304 goto <bb 3>;
1305 else
1306 goto <bb 4>;
1307
1308 <bb 3>:
1309 iftmp.1_3 = &obj_2(D)->D.1762;
1310
1311 <bb 4>:
1312 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1313 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1314 return D.1879_6; */
1315
1316 static void
1317 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1318 struct ipa_node_params *info,
1319 struct ipa_jump_func *jfunc,
1320 gcall *call, gphi *phi)
1321 {
1322 HOST_WIDE_INT offset;
1323 gimple assign, cond;
1324 basic_block phi_bb, assign_bb, cond_bb;
1325 tree tmp, parm, expr, obj;
1326 int index, i;
1327
1328 if (gimple_phi_num_args (phi) != 2)
1329 return;
1330
1331 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1332 tmp = PHI_ARG_DEF (phi, 0);
1333 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1334 tmp = PHI_ARG_DEF (phi, 1);
1335 else
1336 return;
1337 if (TREE_CODE (tmp) != SSA_NAME
1338 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1339 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1340 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1341 return;
1342
1343 assign = SSA_NAME_DEF_STMT (tmp);
1344 assign_bb = gimple_bb (assign);
1345 if (!single_pred_p (assign_bb))
1346 return;
1347 expr = get_ancestor_addr_info (assign, &obj, &offset);
1348 if (!expr)
1349 return;
1350 parm = TREE_OPERAND (expr, 0);
1351 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1352 if (index < 0)
1353 return;
1354
1355 cond_bb = single_pred (assign_bb);
1356 cond = last_stmt (cond_bb);
1357 if (!cond
1358 || gimple_code (cond) != GIMPLE_COND
1359 || gimple_cond_code (cond) != NE_EXPR
1360 || gimple_cond_lhs (cond) != parm
1361 || !integer_zerop (gimple_cond_rhs (cond)))
1362 return;
1363
1364 phi_bb = gimple_bb (phi);
1365 for (i = 0; i < 2; i++)
1366 {
1367 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1368 if (pred != assign_bb && pred != cond_bb)
1369 return;
1370 }
1371
1372 ipa_set_ancestor_jf (jfunc, offset, index,
1373 parm_ref_data_pass_through_p (fbi, index, call, parm));
1374 }
1375
1376 /* Inspect the given TYPE and return true iff it has the same structure (the
1377 same number of fields of the same types) as a C++ member pointer. If
1378 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1379 corresponding fields there. */
1380
1381 static bool
1382 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1383 {
1384 tree fld;
1385
1386 if (TREE_CODE (type) != RECORD_TYPE)
1387 return false;
1388
1389 fld = TYPE_FIELDS (type);
1390 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1391 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1392 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1393 return false;
1394
1395 if (method_ptr)
1396 *method_ptr = fld;
1397
1398 fld = DECL_CHAIN (fld);
1399 if (!fld || INTEGRAL_TYPE_P (fld)
1400 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1401 return false;
1402 if (delta)
1403 *delta = fld;
1404
1405 if (DECL_CHAIN (fld))
1406 return false;
1407
1408 return true;
1409 }
1410
1411 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1412 return the rhs of its defining statement. Otherwise return RHS as it
1413 is. */
1414
1415 static inline tree
1416 get_ssa_def_if_simple_copy (tree rhs)
1417 {
1418 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1419 {
1420 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1421
1422 if (gimple_assign_single_p (def_stmt))
1423 rhs = gimple_assign_rhs1 (def_stmt);
1424 else
1425 break;
1426 }
1427 return rhs;
1428 }
1429
1430 /* Simple linked list, describing known contents of an aggregate beforere
1431 call. */
1432
1433 struct ipa_known_agg_contents_list
1434 {
1435 /* Offset and size of the described part of the aggregate. */
1436 HOST_WIDE_INT offset, size;
1437 /* Known constant value or NULL if the contents is known to be unknown. */
1438 tree constant;
1439 /* Pointer to the next structure in the list. */
1440 struct ipa_known_agg_contents_list *next;
1441 };
1442
1443 /* Find the proper place in linked list of ipa_known_agg_contents_list
1444 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1445 unless there is a partial overlap, in which case return NULL, or such
1446 element is already there, in which case set *ALREADY_THERE to true. */
1447
1448 static struct ipa_known_agg_contents_list **
1449 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1450 HOST_WIDE_INT lhs_offset,
1451 HOST_WIDE_INT lhs_size,
1452 bool *already_there)
1453 {
1454 struct ipa_known_agg_contents_list **p = list;
1455 while (*p && (*p)->offset < lhs_offset)
1456 {
1457 if ((*p)->offset + (*p)->size > lhs_offset)
1458 return NULL;
1459 p = &(*p)->next;
1460 }
1461
1462 if (*p && (*p)->offset < lhs_offset + lhs_size)
1463 {
1464 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1465 /* We already know this value is subsequently overwritten with
1466 something else. */
1467 *already_there = true;
1468 else
1469 /* Otherwise this is a partial overlap which we cannot
1470 represent. */
1471 return NULL;
1472 }
1473 return p;
1474 }
1475
1476 /* Build aggregate jump function from LIST, assuming there are exactly
1477 CONST_COUNT constant entries there and that th offset of the passed argument
1478 is ARG_OFFSET and store it into JFUNC. */
1479
1480 static void
1481 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1482 int const_count, HOST_WIDE_INT arg_offset,
1483 struct ipa_jump_func *jfunc)
1484 {
1485 vec_alloc (jfunc->agg.items, const_count);
1486 while (list)
1487 {
1488 if (list->constant)
1489 {
1490 struct ipa_agg_jf_item item;
1491 item.offset = list->offset - arg_offset;
1492 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1493 item.value = unshare_expr_without_location (list->constant);
1494 jfunc->agg.items->quick_push (item);
1495 }
1496 list = list->next;
1497 }
1498 }
1499
1500 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1501 in ARG is filled in with constant values. ARG can either be an aggregate
1502 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1503 aggregate. JFUNC is the jump function into which the constants are
1504 subsequently stored. */
1505
1506 static void
1507 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1508 tree arg_type,
1509 struct ipa_jump_func *jfunc)
1510 {
1511 struct ipa_known_agg_contents_list *list = NULL;
1512 int item_count = 0, const_count = 0;
1513 HOST_WIDE_INT arg_offset, arg_size;
1514 gimple_stmt_iterator gsi;
1515 tree arg_base;
1516 bool check_ref, by_ref;
1517 ao_ref r;
1518
1519 /* The function operates in three stages. First, we prepare check_ref, r,
1520 arg_base and arg_offset based on what is actually passed as an actual
1521 argument. */
1522
1523 if (POINTER_TYPE_P (arg_type))
1524 {
1525 by_ref = true;
1526 if (TREE_CODE (arg) == SSA_NAME)
1527 {
1528 tree type_size;
1529 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1530 return;
1531 check_ref = true;
1532 arg_base = arg;
1533 arg_offset = 0;
1534 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1535 arg_size = tree_to_uhwi (type_size);
1536 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1537 }
1538 else if (TREE_CODE (arg) == ADDR_EXPR)
1539 {
1540 HOST_WIDE_INT arg_max_size;
1541
1542 arg = TREE_OPERAND (arg, 0);
1543 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1544 &arg_max_size);
1545 if (arg_max_size == -1
1546 || arg_max_size != arg_size
1547 || arg_offset < 0)
1548 return;
1549 if (DECL_P (arg_base))
1550 {
1551 check_ref = false;
1552 ao_ref_init (&r, arg_base);
1553 }
1554 else
1555 return;
1556 }
1557 else
1558 return;
1559 }
1560 else
1561 {
1562 HOST_WIDE_INT arg_max_size;
1563
1564 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1565
1566 by_ref = false;
1567 check_ref = false;
1568 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1569 &arg_max_size);
1570 if (arg_max_size == -1
1571 || arg_max_size != arg_size
1572 || arg_offset < 0)
1573 return;
1574
1575 ao_ref_init (&r, arg);
1576 }
1577
1578 /* Second stage walks back the BB, looks at individual statements and as long
1579 as it is confident of how the statements affect contents of the
1580 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1581 describing it. */
1582 gsi = gsi_for_stmt (call);
1583 gsi_prev (&gsi);
1584 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1585 {
1586 struct ipa_known_agg_contents_list *n, **p;
1587 gimple stmt = gsi_stmt (gsi);
1588 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1589 tree lhs, rhs, lhs_base;
1590
1591 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1592 continue;
1593 if (!gimple_assign_single_p (stmt))
1594 break;
1595
1596 lhs = gimple_assign_lhs (stmt);
1597 rhs = gimple_assign_rhs1 (stmt);
1598 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1599 || TREE_CODE (lhs) == BIT_FIELD_REF
1600 || contains_bitfld_component_ref_p (lhs))
1601 break;
1602
1603 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1604 &lhs_max_size);
1605 if (lhs_max_size == -1
1606 || lhs_max_size != lhs_size)
1607 break;
1608
1609 if (check_ref)
1610 {
1611 if (TREE_CODE (lhs_base) != MEM_REF
1612 || TREE_OPERAND (lhs_base, 0) != arg_base
1613 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1614 break;
1615 }
1616 else if (lhs_base != arg_base)
1617 {
1618 if (DECL_P (lhs_base))
1619 continue;
1620 else
1621 break;
1622 }
1623
1624 bool already_there = false;
1625 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1626 &already_there);
1627 if (!p)
1628 break;
1629 if (already_there)
1630 continue;
1631
1632 rhs = get_ssa_def_if_simple_copy (rhs);
1633 n = XALLOCA (struct ipa_known_agg_contents_list);
1634 n->size = lhs_size;
1635 n->offset = lhs_offset;
1636 if (is_gimple_ip_invariant (rhs))
1637 {
1638 n->constant = rhs;
1639 const_count++;
1640 }
1641 else
1642 n->constant = NULL_TREE;
1643 n->next = *p;
1644 *p = n;
1645
1646 item_count++;
1647 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1648 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1649 break;
1650 }
1651
1652 /* Third stage just goes over the list and creates an appropriate vector of
1653 ipa_agg_jf_item structures out of it, of sourse only if there are
1654 any known constants to begin with. */
1655
1656 if (const_count)
1657 {
1658 jfunc->agg.by_ref = by_ref;
1659 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1660 }
1661 }
1662
1663 static tree
1664 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1665 {
1666 int n;
1667 tree type = (e->callee
1668 ? TREE_TYPE (e->callee->decl)
1669 : gimple_call_fntype (e->call_stmt));
1670 tree t = TYPE_ARG_TYPES (type);
1671
1672 for (n = 0; n < i; n++)
1673 {
1674 if (!t)
1675 break;
1676 t = TREE_CHAIN (t);
1677 }
1678 if (t)
1679 return TREE_VALUE (t);
1680 if (!e->callee)
1681 return NULL;
1682 t = DECL_ARGUMENTS (e->callee->decl);
1683 for (n = 0; n < i; n++)
1684 {
1685 if (!t)
1686 return NULL;
1687 t = TREE_CHAIN (t);
1688 }
1689 if (t)
1690 return TREE_TYPE (t);
1691 return NULL;
1692 }
1693
1694 /* Compute jump function for all arguments of callsite CS and insert the
1695 information in the jump_functions array in the ipa_edge_args corresponding
1696 to this callsite. */
1697
1698 static void
1699 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1700 struct cgraph_edge *cs)
1701 {
1702 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1703 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1704 gcall *call = cs->call_stmt;
1705 int n, arg_num = gimple_call_num_args (call);
1706 bool useful_context = false;
1707
1708 if (arg_num == 0 || args->jump_functions)
1709 return;
1710 vec_safe_grow_cleared (args->jump_functions, arg_num);
1711 if (flag_devirtualize)
1712 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1713
1714 if (gimple_call_internal_p (call))
1715 return;
1716 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1717 return;
1718
1719 for (n = 0; n < arg_num; n++)
1720 {
1721 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1722 tree arg = gimple_call_arg (call, n);
1723 tree param_type = ipa_get_callee_param_type (cs, n);
1724 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1725 {
1726 tree instance;
1727 struct ipa_polymorphic_call_context context (cs->caller->decl,
1728 arg, cs->call_stmt,
1729 &instance);
1730 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1731 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1732 if (!context.useless_p ())
1733 useful_context = true;
1734 }
1735
1736 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1737 {
1738 unsigned HOST_WIDE_INT hwi_bitpos;
1739 unsigned align;
1740
1741 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1742 && align % BITS_PER_UNIT == 0
1743 && hwi_bitpos % BITS_PER_UNIT == 0)
1744 {
1745 jfunc->alignment.known = true;
1746 jfunc->alignment.align = align / BITS_PER_UNIT;
1747 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1748 }
1749 else
1750 gcc_assert (!jfunc->alignment.known);
1751 }
1752 else
1753 gcc_assert (!jfunc->alignment.known);
1754
1755 if (is_gimple_ip_invariant (arg))
1756 ipa_set_jf_constant (jfunc, arg, cs);
1757 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1758 && TREE_CODE (arg) == PARM_DECL)
1759 {
1760 int index = ipa_get_param_decl_index (info, arg);
1761
1762 gcc_assert (index >=0);
1763 /* Aggregate passed by value, check for pass-through, otherwise we
1764 will attempt to fill in aggregate contents later in this
1765 for cycle. */
1766 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1767 {
1768 ipa_set_jf_simple_pass_through (jfunc, index, false);
1769 continue;
1770 }
1771 }
1772 else if (TREE_CODE (arg) == SSA_NAME)
1773 {
1774 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1775 {
1776 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1777 if (index >= 0)
1778 {
1779 bool agg_p;
1780 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1781 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1782 }
1783 }
1784 else
1785 {
1786 gimple stmt = SSA_NAME_DEF_STMT (arg);
1787 if (is_gimple_assign (stmt))
1788 compute_complex_assign_jump_func (fbi, info, jfunc,
1789 call, stmt, arg, param_type);
1790 else if (gimple_code (stmt) == GIMPLE_PHI)
1791 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1792 call,
1793 as_a <gphi *> (stmt));
1794 }
1795 }
1796
1797 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1798 passed (because type conversions are ignored in gimple). Usually we can
1799 safely get type from function declaration, but in case of K&R prototypes or
1800 variadic functions we can try our luck with type of the pointer passed.
1801 TODO: Since we look for actual initialization of the memory object, we may better
1802 work out the type based on the memory stores we find. */
1803 if (!param_type)
1804 param_type = TREE_TYPE (arg);
1805
1806 if ((jfunc->type != IPA_JF_PASS_THROUGH
1807 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1808 && (jfunc->type != IPA_JF_ANCESTOR
1809 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1810 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1811 || POINTER_TYPE_P (param_type)))
1812 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1813 }
1814 if (!useful_context)
1815 vec_free (args->polymorphic_call_contexts);
1816 }
1817
1818 /* Compute jump functions for all edges - both direct and indirect - outgoing
1819 from BB. */
1820
1821 static void
1822 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1823 {
1824 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1825 int i;
1826 struct cgraph_edge *cs;
1827
1828 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1829 {
1830 struct cgraph_node *callee = cs->callee;
1831
1832 if (callee)
1833 {
1834 callee->ultimate_alias_target ();
1835 /* We do not need to bother analyzing calls to unknown functions
1836 unless they may become known during lto/whopr. */
1837 if (!callee->definition && !flag_lto)
1838 continue;
1839 }
1840 ipa_compute_jump_functions_for_edge (fbi, cs);
1841 }
1842 }
1843
1844 /* If STMT looks like a statement loading a value from a member pointer formal
1845 parameter, return that parameter and store the offset of the field to
1846 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1847 might be clobbered). If USE_DELTA, then we look for a use of the delta
1848 field rather than the pfn. */
1849
1850 static tree
1851 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1852 HOST_WIDE_INT *offset_p)
1853 {
1854 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1855
1856 if (!gimple_assign_single_p (stmt))
1857 return NULL_TREE;
1858
1859 rhs = gimple_assign_rhs1 (stmt);
1860 if (TREE_CODE (rhs) == COMPONENT_REF)
1861 {
1862 ref_field = TREE_OPERAND (rhs, 1);
1863 rhs = TREE_OPERAND (rhs, 0);
1864 }
1865 else
1866 ref_field = NULL_TREE;
1867 if (TREE_CODE (rhs) != MEM_REF)
1868 return NULL_TREE;
1869 rec = TREE_OPERAND (rhs, 0);
1870 if (TREE_CODE (rec) != ADDR_EXPR)
1871 return NULL_TREE;
1872 rec = TREE_OPERAND (rec, 0);
1873 if (TREE_CODE (rec) != PARM_DECL
1874 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1875 return NULL_TREE;
1876 ref_offset = TREE_OPERAND (rhs, 1);
1877
1878 if (use_delta)
1879 fld = delta_field;
1880 else
1881 fld = ptr_field;
1882 if (offset_p)
1883 *offset_p = int_bit_position (fld);
1884
1885 if (ref_field)
1886 {
1887 if (integer_nonzerop (ref_offset))
1888 return NULL_TREE;
1889 return ref_field == fld ? rec : NULL_TREE;
1890 }
1891 else
1892 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1893 : NULL_TREE;
1894 }
1895
1896 /* Returns true iff T is an SSA_NAME defined by a statement. */
1897
1898 static bool
1899 ipa_is_ssa_with_stmt_def (tree t)
1900 {
1901 if (TREE_CODE (t) == SSA_NAME
1902 && !SSA_NAME_IS_DEFAULT_DEF (t))
1903 return true;
1904 else
1905 return false;
1906 }
1907
1908 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1909 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1910 indirect call graph edge. */
1911
1912 static struct cgraph_edge *
1913 ipa_note_param_call (struct cgraph_node *node, int param_index,
1914 gcall *stmt)
1915 {
1916 struct cgraph_edge *cs;
1917
1918 cs = node->get_edge (stmt);
1919 cs->indirect_info->param_index = param_index;
1920 cs->indirect_info->agg_contents = 0;
1921 cs->indirect_info->member_ptr = 0;
1922 return cs;
1923 }
1924
1925 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1926 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1927 intermediate information about each formal parameter. Currently it checks
1928 whether the call calls a pointer that is a formal parameter and if so, the
1929 parameter is marked with the called flag and an indirect call graph edge
1930 describing the call is created. This is very simple for ordinary pointers
1931 represented in SSA but not-so-nice when it comes to member pointers. The
1932 ugly part of this function does nothing more than trying to match the
1933 pattern of such a call. An example of such a pattern is the gimple dump
1934 below, the call is on the last line:
1935
1936 <bb 2>:
1937 f$__delta_5 = f.__delta;
1938 f$__pfn_24 = f.__pfn;
1939
1940 or
1941 <bb 2>:
1942 f$__delta_5 = MEM[(struct *)&f];
1943 f$__pfn_24 = MEM[(struct *)&f + 4B];
1944
1945 and a few lines below:
1946
1947 <bb 5>
1948 D.2496_3 = (int) f$__pfn_24;
1949 D.2497_4 = D.2496_3 & 1;
1950 if (D.2497_4 != 0)
1951 goto <bb 3>;
1952 else
1953 goto <bb 4>;
1954
1955 <bb 6>:
1956 D.2500_7 = (unsigned int) f$__delta_5;
1957 D.2501_8 = &S + D.2500_7;
1958 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1959 D.2503_10 = *D.2502_9;
1960 D.2504_12 = f$__pfn_24 + -1;
1961 D.2505_13 = (unsigned int) D.2504_12;
1962 D.2506_14 = D.2503_10 + D.2505_13;
1963 D.2507_15 = *D.2506_14;
1964 iftmp.11_16 = (String:: *) D.2507_15;
1965
1966 <bb 7>:
1967 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1968 D.2500_19 = (unsigned int) f$__delta_5;
1969 D.2508_20 = &S + D.2500_19;
1970 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1971
1972 Such patterns are results of simple calls to a member pointer:
1973
1974 int doprinting (int (MyString::* f)(int) const)
1975 {
1976 MyString S ("somestring");
1977
1978 return (S.*f)(4);
1979 }
1980
1981 Moreover, the function also looks for called pointers loaded from aggregates
1982 passed by value or reference. */
1983
1984 static void
1985 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1986 tree target)
1987 {
1988 struct ipa_node_params *info = fbi->info;
1989 HOST_WIDE_INT offset;
1990 bool by_ref;
1991
1992 if (SSA_NAME_IS_DEFAULT_DEF (target))
1993 {
1994 tree var = SSA_NAME_VAR (target);
1995 int index = ipa_get_param_decl_index (info, var);
1996 if (index >= 0)
1997 ipa_note_param_call (fbi->node, index, call);
1998 return;
1999 }
2000
2001 int index;
2002 gimple def = SSA_NAME_DEF_STMT (target);
2003 if (gimple_assign_single_p (def)
2004 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2005 gimple_assign_rhs1 (def), &index, &offset,
2006 NULL, &by_ref))
2007 {
2008 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2009 cs->indirect_info->offset = offset;
2010 cs->indirect_info->agg_contents = 1;
2011 cs->indirect_info->by_ref = by_ref;
2012 return;
2013 }
2014
2015 /* Now we need to try to match the complex pattern of calling a member
2016 pointer. */
2017 if (gimple_code (def) != GIMPLE_PHI
2018 || gimple_phi_num_args (def) != 2
2019 || !POINTER_TYPE_P (TREE_TYPE (target))
2020 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2021 return;
2022
2023 /* First, we need to check whether one of these is a load from a member
2024 pointer that is a parameter to this function. */
2025 tree n1 = PHI_ARG_DEF (def, 0);
2026 tree n2 = PHI_ARG_DEF (def, 1);
2027 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2028 return;
2029 gimple d1 = SSA_NAME_DEF_STMT (n1);
2030 gimple d2 = SSA_NAME_DEF_STMT (n2);
2031
2032 tree rec;
2033 basic_block bb, virt_bb;
2034 basic_block join = gimple_bb (def);
2035 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2036 {
2037 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2038 return;
2039
2040 bb = EDGE_PRED (join, 0)->src;
2041 virt_bb = gimple_bb (d2);
2042 }
2043 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2044 {
2045 bb = EDGE_PRED (join, 1)->src;
2046 virt_bb = gimple_bb (d1);
2047 }
2048 else
2049 return;
2050
2051 /* Second, we need to check that the basic blocks are laid out in the way
2052 corresponding to the pattern. */
2053
2054 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2055 || single_pred (virt_bb) != bb
2056 || single_succ (virt_bb) != join)
2057 return;
2058
2059 /* Third, let's see that the branching is done depending on the least
2060 significant bit of the pfn. */
2061
2062 gimple branch = last_stmt (bb);
2063 if (!branch || gimple_code (branch) != GIMPLE_COND)
2064 return;
2065
2066 if ((gimple_cond_code (branch) != NE_EXPR
2067 && gimple_cond_code (branch) != EQ_EXPR)
2068 || !integer_zerop (gimple_cond_rhs (branch)))
2069 return;
2070
2071 tree cond = gimple_cond_lhs (branch);
2072 if (!ipa_is_ssa_with_stmt_def (cond))
2073 return;
2074
2075 def = SSA_NAME_DEF_STMT (cond);
2076 if (!is_gimple_assign (def)
2077 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2078 || !integer_onep (gimple_assign_rhs2 (def)))
2079 return;
2080
2081 cond = gimple_assign_rhs1 (def);
2082 if (!ipa_is_ssa_with_stmt_def (cond))
2083 return;
2084
2085 def = SSA_NAME_DEF_STMT (cond);
2086
2087 if (is_gimple_assign (def)
2088 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2089 {
2090 cond = gimple_assign_rhs1 (def);
2091 if (!ipa_is_ssa_with_stmt_def (cond))
2092 return;
2093 def = SSA_NAME_DEF_STMT (cond);
2094 }
2095
2096 tree rec2;
2097 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2098 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2099 == ptrmemfunc_vbit_in_delta),
2100 NULL);
2101 if (rec != rec2)
2102 return;
2103
2104 index = ipa_get_param_decl_index (info, rec);
2105 if (index >= 0
2106 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2107 {
2108 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2109 cs->indirect_info->offset = offset;
2110 cs->indirect_info->agg_contents = 1;
2111 cs->indirect_info->member_ptr = 1;
2112 }
2113
2114 return;
2115 }
2116
2117 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2118 object referenced in the expression is a formal parameter of the caller
2119 FBI->node (described by FBI->info), create a call note for the
2120 statement. */
2121
2122 static void
2123 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2124 gcall *call, tree target)
2125 {
2126 tree obj = OBJ_TYPE_REF_OBJECT (target);
2127 int index;
2128 HOST_WIDE_INT anc_offset;
2129
2130 if (!flag_devirtualize)
2131 return;
2132
2133 if (TREE_CODE (obj) != SSA_NAME)
2134 return;
2135
2136 struct ipa_node_params *info = fbi->info;
2137 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2138 {
2139 struct ipa_jump_func jfunc;
2140 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2141 return;
2142
2143 anc_offset = 0;
2144 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2145 gcc_assert (index >= 0);
2146 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2147 call, &jfunc))
2148 return;
2149 }
2150 else
2151 {
2152 struct ipa_jump_func jfunc;
2153 gimple stmt = SSA_NAME_DEF_STMT (obj);
2154 tree expr;
2155
2156 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2157 if (!expr)
2158 return;
2159 index = ipa_get_param_decl_index (info,
2160 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2161 gcc_assert (index >= 0);
2162 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2163 call, &jfunc, anc_offset))
2164 return;
2165 }
2166
2167 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2168 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2169 ii->offset = anc_offset;
2170 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2171 ii->otr_type = obj_type_ref_class (target);
2172 ii->polymorphic = 1;
2173 }
2174
2175 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2176 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2177 containing intermediate information about each formal parameter. */
2178
2179 static void
2180 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2181 {
2182 tree target = gimple_call_fn (call);
2183
2184 if (!target
2185 || (TREE_CODE (target) != SSA_NAME
2186 && !virtual_method_call_p (target)))
2187 return;
2188
2189 struct cgraph_edge *cs = fbi->node->get_edge (call);
2190 /* If we previously turned the call into a direct call, there is
2191 no need to analyze. */
2192 if (cs && !cs->indirect_unknown_callee)
2193 return;
2194
2195 if (cs->indirect_info->polymorphic && flag_devirtualize)
2196 {
2197 tree instance;
2198 tree target = gimple_call_fn (call);
2199 ipa_polymorphic_call_context context (current_function_decl,
2200 target, call, &instance);
2201
2202 gcc_checking_assert (cs->indirect_info->otr_type
2203 == obj_type_ref_class (target));
2204 gcc_checking_assert (cs->indirect_info->otr_token
2205 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2206
2207 cs->indirect_info->vptr_changed
2208 = !context.get_dynamic_type (instance,
2209 OBJ_TYPE_REF_OBJECT (target),
2210 obj_type_ref_class (target), call);
2211 cs->indirect_info->context = context;
2212 }
2213
2214 if (TREE_CODE (target) == SSA_NAME)
2215 ipa_analyze_indirect_call_uses (fbi, call, target);
2216 else if (virtual_method_call_p (target))
2217 ipa_analyze_virtual_call_uses (fbi, call, target);
2218 }
2219
2220
2221 /* Analyze the call statement STMT with respect to formal parameters (described
2222 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2223 formal parameters are called. */
2224
2225 static void
2226 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2227 {
2228 if (is_gimple_call (stmt))
2229 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2230 }
2231
2232 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2233 If OP is a parameter declaration, mark it as used in the info structure
2234 passed in DATA. */
2235
2236 static bool
2237 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2238 {
2239 struct ipa_node_params *info = (struct ipa_node_params *) data;
2240
2241 op = get_base_address (op);
2242 if (op
2243 && TREE_CODE (op) == PARM_DECL)
2244 {
2245 int index = ipa_get_param_decl_index (info, op);
2246 gcc_assert (index >= 0);
2247 ipa_set_param_used (info, index, true);
2248 }
2249
2250 return false;
2251 }
2252
2253 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2254 the findings in various structures of the associated ipa_node_params
2255 structure, such as parameter flags, notes etc. FBI holds various data about
2256 the function being analyzed. */
2257
2258 static void
2259 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2260 {
2261 gimple_stmt_iterator gsi;
2262 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2263 {
2264 gimple stmt = gsi_stmt (gsi);
2265
2266 if (is_gimple_debug (stmt))
2267 continue;
2268
2269 ipa_analyze_stmt_uses (fbi, stmt);
2270 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2271 visit_ref_for_mod_analysis,
2272 visit_ref_for_mod_analysis,
2273 visit_ref_for_mod_analysis);
2274 }
2275 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2276 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2277 visit_ref_for_mod_analysis,
2278 visit_ref_for_mod_analysis,
2279 visit_ref_for_mod_analysis);
2280 }
2281
2282 /* Calculate controlled uses of parameters of NODE. */
2283
2284 static void
2285 ipa_analyze_controlled_uses (struct cgraph_node *node)
2286 {
2287 struct ipa_node_params *info = IPA_NODE_REF (node);
2288
2289 for (int i = 0; i < ipa_get_param_count (info); i++)
2290 {
2291 tree parm = ipa_get_param (info, i);
2292 int controlled_uses = 0;
2293
2294 /* For SSA regs see if parameter is used. For non-SSA we compute
2295 the flag during modification analysis. */
2296 if (is_gimple_reg (parm))
2297 {
2298 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2299 parm);
2300 if (ddef && !has_zero_uses (ddef))
2301 {
2302 imm_use_iterator imm_iter;
2303 use_operand_p use_p;
2304
2305 ipa_set_param_used (info, i, true);
2306 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2307 if (!is_gimple_call (USE_STMT (use_p)))
2308 {
2309 if (!is_gimple_debug (USE_STMT (use_p)))
2310 {
2311 controlled_uses = IPA_UNDESCRIBED_USE;
2312 break;
2313 }
2314 }
2315 else
2316 controlled_uses++;
2317 }
2318 else
2319 controlled_uses = 0;
2320 }
2321 else
2322 controlled_uses = IPA_UNDESCRIBED_USE;
2323 ipa_set_controlled_uses (info, i, controlled_uses);
2324 }
2325 }
2326
2327 /* Free stuff in BI. */
2328
2329 static void
2330 free_ipa_bb_info (struct ipa_bb_info *bi)
2331 {
2332 bi->cg_edges.release ();
2333 bi->param_aa_statuses.release ();
2334 }
2335
2336 /* Dominator walker driving the analysis. */
2337
2338 class analysis_dom_walker : public dom_walker
2339 {
2340 public:
2341 analysis_dom_walker (struct func_body_info *fbi)
2342 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2343
2344 virtual void before_dom_children (basic_block);
2345
2346 private:
2347 struct func_body_info *m_fbi;
2348 };
2349
2350 void
2351 analysis_dom_walker::before_dom_children (basic_block bb)
2352 {
2353 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2354 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2355 }
2356
2357 /* Initialize the array describing properties of of formal parameters
2358 of NODE, analyze their uses and compute jump functions associated
2359 with actual arguments of calls from within NODE. */
2360
2361 void
2362 ipa_analyze_node (struct cgraph_node *node)
2363 {
2364 struct func_body_info fbi;
2365 struct ipa_node_params *info;
2366
2367 ipa_check_create_node_params ();
2368 ipa_check_create_edge_args ();
2369 info = IPA_NODE_REF (node);
2370
2371 if (info->analysis_done)
2372 return;
2373 info->analysis_done = 1;
2374
2375 if (ipa_func_spec_opts_forbid_analysis_p (node))
2376 {
2377 for (int i = 0; i < ipa_get_param_count (info); i++)
2378 {
2379 ipa_set_param_used (info, i, true);
2380 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2381 }
2382 return;
2383 }
2384
2385 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2386 push_cfun (func);
2387 calculate_dominance_info (CDI_DOMINATORS);
2388 ipa_initialize_node_params (node);
2389 ipa_analyze_controlled_uses (node);
2390
2391 fbi.node = node;
2392 fbi.info = IPA_NODE_REF (node);
2393 fbi.bb_infos = vNULL;
2394 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2395 fbi.param_count = ipa_get_param_count (info);
2396 fbi.aa_walked = 0;
2397
2398 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2399 {
2400 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2401 bi->cg_edges.safe_push (cs);
2402 }
2403
2404 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2405 {
2406 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2407 bi->cg_edges.safe_push (cs);
2408 }
2409
2410 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2411
2412 int i;
2413 struct ipa_bb_info *bi;
2414 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2415 free_ipa_bb_info (bi);
2416 fbi.bb_infos.release ();
2417 free_dominance_info (CDI_DOMINATORS);
2418 pop_cfun ();
2419 }
2420
2421 /* Update the jump functions associated with call graph edge E when the call
2422 graph edge CS is being inlined, assuming that E->caller is already (possibly
2423 indirectly) inlined into CS->callee and that E has not been inlined. */
2424
2425 static void
2426 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2427 struct cgraph_edge *e)
2428 {
2429 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2430 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2431 int count = ipa_get_cs_argument_count (args);
2432 int i;
2433
2434 for (i = 0; i < count; i++)
2435 {
2436 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2437 struct ipa_polymorphic_call_context *dst_ctx
2438 = ipa_get_ith_polymorhic_call_context (args, i);
2439
2440 if (dst->type == IPA_JF_ANCESTOR)
2441 {
2442 struct ipa_jump_func *src;
2443 int dst_fid = dst->value.ancestor.formal_id;
2444 struct ipa_polymorphic_call_context *src_ctx
2445 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2446
2447 /* Variable number of arguments can cause havoc if we try to access
2448 one that does not exist in the inlined edge. So make sure we
2449 don't. */
2450 if (dst_fid >= ipa_get_cs_argument_count (top))
2451 {
2452 ipa_set_jf_unknown (dst);
2453 continue;
2454 }
2455
2456 src = ipa_get_ith_jump_func (top, dst_fid);
2457
2458 if (src_ctx && !src_ctx->useless_p ())
2459 {
2460 struct ipa_polymorphic_call_context ctx = *src_ctx;
2461
2462 /* TODO: Make type preserved safe WRT contexts. */
2463 if (!ipa_get_jf_ancestor_type_preserved (dst))
2464 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2465 ctx.offset_by (dst->value.ancestor.offset);
2466 if (!ctx.useless_p ())
2467 {
2468 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2469 count);
2470 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2471 }
2472 dst_ctx->combine_with (ctx);
2473 }
2474
2475 if (src->agg.items
2476 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2477 {
2478 struct ipa_agg_jf_item *item;
2479 int j;
2480
2481 /* Currently we do not produce clobber aggregate jump functions,
2482 replace with merging when we do. */
2483 gcc_assert (!dst->agg.items);
2484
2485 dst->agg.items = vec_safe_copy (src->agg.items);
2486 dst->agg.by_ref = src->agg.by_ref;
2487 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2488 item->offset -= dst->value.ancestor.offset;
2489 }
2490
2491 if (src->type == IPA_JF_PASS_THROUGH
2492 && src->value.pass_through.operation == NOP_EXPR)
2493 {
2494 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2495 dst->value.ancestor.agg_preserved &=
2496 src->value.pass_through.agg_preserved;
2497 }
2498 else if (src->type == IPA_JF_ANCESTOR)
2499 {
2500 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2501 dst->value.ancestor.offset += src->value.ancestor.offset;
2502 dst->value.ancestor.agg_preserved &=
2503 src->value.ancestor.agg_preserved;
2504 }
2505 else
2506 ipa_set_jf_unknown (dst);
2507 }
2508 else if (dst->type == IPA_JF_PASS_THROUGH)
2509 {
2510 struct ipa_jump_func *src;
2511 /* We must check range due to calls with variable number of arguments
2512 and we cannot combine jump functions with operations. */
2513 if (dst->value.pass_through.operation == NOP_EXPR
2514 && (dst->value.pass_through.formal_id
2515 < ipa_get_cs_argument_count (top)))
2516 {
2517 int dst_fid = dst->value.pass_through.formal_id;
2518 src = ipa_get_ith_jump_func (top, dst_fid);
2519 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2520 struct ipa_polymorphic_call_context *src_ctx
2521 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2522
2523 if (src_ctx && !src_ctx->useless_p ())
2524 {
2525 struct ipa_polymorphic_call_context ctx = *src_ctx;
2526
2527 /* TODO: Make type preserved safe WRT contexts. */
2528 if (!ipa_get_jf_pass_through_type_preserved (dst))
2529 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2530 if (!ctx.useless_p ())
2531 {
2532 if (!dst_ctx)
2533 {
2534 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2535 count);
2536 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2537 }
2538 dst_ctx->combine_with (ctx);
2539 }
2540 }
2541 switch (src->type)
2542 {
2543 case IPA_JF_UNKNOWN:
2544 ipa_set_jf_unknown (dst);
2545 break;
2546 case IPA_JF_CONST:
2547 ipa_set_jf_cst_copy (dst, src);
2548 break;
2549
2550 case IPA_JF_PASS_THROUGH:
2551 {
2552 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2553 enum tree_code operation;
2554 operation = ipa_get_jf_pass_through_operation (src);
2555
2556 if (operation == NOP_EXPR)
2557 {
2558 bool agg_p;
2559 agg_p = dst_agg_p
2560 && ipa_get_jf_pass_through_agg_preserved (src);
2561 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2562 }
2563 else
2564 {
2565 tree operand = ipa_get_jf_pass_through_operand (src);
2566 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2567 operation);
2568 }
2569 break;
2570 }
2571 case IPA_JF_ANCESTOR:
2572 {
2573 bool agg_p;
2574 agg_p = dst_agg_p
2575 && ipa_get_jf_ancestor_agg_preserved (src);
2576 ipa_set_ancestor_jf (dst,
2577 ipa_get_jf_ancestor_offset (src),
2578 ipa_get_jf_ancestor_formal_id (src),
2579 agg_p);
2580 break;
2581 }
2582 default:
2583 gcc_unreachable ();
2584 }
2585
2586 if (src->agg.items
2587 && (dst_agg_p || !src->agg.by_ref))
2588 {
2589 /* Currently we do not produce clobber aggregate jump
2590 functions, replace with merging when we do. */
2591 gcc_assert (!dst->agg.items);
2592
2593 dst->agg.by_ref = src->agg.by_ref;
2594 dst->agg.items = vec_safe_copy (src->agg.items);
2595 }
2596 }
2597 else
2598 ipa_set_jf_unknown (dst);
2599 }
2600 }
2601 }
2602
2603 /* If TARGET is an addr_expr of a function declaration, make it the
2604 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2605 Otherwise, return NULL. */
2606
2607 struct cgraph_edge *
2608 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2609 bool speculative)
2610 {
2611 struct cgraph_node *callee;
2612 struct inline_edge_summary *es = inline_edge_summary (ie);
2613 bool unreachable = false;
2614
2615 if (TREE_CODE (target) == ADDR_EXPR)
2616 target = TREE_OPERAND (target, 0);
2617 if (TREE_CODE (target) != FUNCTION_DECL)
2618 {
2619 target = canonicalize_constructor_val (target, NULL);
2620 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2621 {
2622 /* Member pointer call that goes through a VMT lookup. */
2623 if (ie->indirect_info->member_ptr
2624 /* Or if target is not an invariant expression and we do not
2625 know if it will evaulate to function at runtime.
2626 This can happen when folding through &VAR, where &VAR
2627 is IP invariant, but VAR itself is not.
2628
2629 TODO: Revisit this when GCC 5 is branched. It seems that
2630 member_ptr check is not needed and that we may try to fold
2631 the expression and see if VAR is readonly. */
2632 || !is_gimple_ip_invariant (target))
2633 {
2634 if (dump_enabled_p ())
2635 {
2636 location_t loc = gimple_location_safe (ie->call_stmt);
2637 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2638 "discovered direct call non-invariant "
2639 "%s/%i\n",
2640 ie->caller->name (), ie->caller->order);
2641 }
2642 return NULL;
2643 }
2644
2645
2646 if (dump_enabled_p ())
2647 {
2648 location_t loc = gimple_location_safe (ie->call_stmt);
2649 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2650 "discovered direct call to non-function in %s/%i, "
2651 "making it __builtin_unreachable\n",
2652 ie->caller->name (), ie->caller->order);
2653 }
2654
2655 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2656 callee = cgraph_node::get_create (target);
2657 unreachable = true;
2658 }
2659 else
2660 callee = cgraph_node::get (target);
2661 }
2662 else
2663 callee = cgraph_node::get (target);
2664
2665 /* Because may-edges are not explicitely represented and vtable may be external,
2666 we may create the first reference to the object in the unit. */
2667 if (!callee || callee->global.inlined_to)
2668 {
2669
2670 /* We are better to ensure we can refer to it.
2671 In the case of static functions we are out of luck, since we already
2672 removed its body. In the case of public functions we may or may
2673 not introduce the reference. */
2674 if (!canonicalize_constructor_val (target, NULL)
2675 || !TREE_PUBLIC (target))
2676 {
2677 if (dump_file)
2678 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2679 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2680 xstrdup_for_dump (ie->caller->name ()),
2681 ie->caller->order,
2682 xstrdup_for_dump (ie->callee->name ()),
2683 ie->callee->order);
2684 return NULL;
2685 }
2686 callee = cgraph_node::get_create (target);
2687 }
2688
2689 /* If the edge is already speculated. */
2690 if (speculative && ie->speculative)
2691 {
2692 struct cgraph_edge *e2;
2693 struct ipa_ref *ref;
2694 ie->speculative_call_info (e2, ie, ref);
2695 if (e2->callee->ultimate_alias_target ()
2696 != callee->ultimate_alias_target ())
2697 {
2698 if (dump_file)
2699 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2700 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2701 xstrdup_for_dump (ie->caller->name ()),
2702 ie->caller->order,
2703 xstrdup_for_dump (callee->name ()),
2704 callee->order,
2705 xstrdup_for_dump (e2->callee->name ()),
2706 e2->callee->order);
2707 }
2708 else
2709 {
2710 if (dump_file)
2711 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2712 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2713 xstrdup_for_dump (ie->caller->name ()),
2714 ie->caller->order,
2715 xstrdup_for_dump (callee->name ()),
2716 callee->order);
2717 }
2718 return NULL;
2719 }
2720
2721 if (!dbg_cnt (devirt))
2722 return NULL;
2723
2724 ipa_check_create_node_params ();
2725
2726 /* We can not make edges to inline clones. It is bug that someone removed
2727 the cgraph node too early. */
2728 gcc_assert (!callee->global.inlined_to);
2729
2730 if (dump_file && !unreachable)
2731 {
2732 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2733 "(%s/%i -> %s/%i), for stmt ",
2734 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2735 speculative ? "speculative" : "known",
2736 xstrdup_for_dump (ie->caller->name ()),
2737 ie->caller->order,
2738 xstrdup_for_dump (callee->name ()),
2739 callee->order);
2740 if (ie->call_stmt)
2741 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2742 else
2743 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2744 }
2745 if (dump_enabled_p ())
2746 {
2747 location_t loc = gimple_location_safe (ie->call_stmt);
2748
2749 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2750 "converting indirect call in %s to direct call to %s\n",
2751 ie->caller->name (), callee->name ());
2752 }
2753 if (!speculative)
2754 {
2755 struct cgraph_edge *orig = ie;
2756 ie = ie->make_direct (callee);
2757 /* If we resolved speculative edge the cost is already up to date
2758 for direct call (adjusted by inline_edge_duplication_hook). */
2759 if (ie == orig)
2760 {
2761 es = inline_edge_summary (ie);
2762 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2763 - eni_size_weights.call_cost);
2764 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2765 - eni_time_weights.call_cost);
2766 }
2767 }
2768 else
2769 {
2770 if (!callee->can_be_discarded_p ())
2771 {
2772 cgraph_node *alias;
2773 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2774 if (alias)
2775 callee = alias;
2776 }
2777 /* make_speculative will update ie's cost to direct call cost. */
2778 ie = ie->make_speculative
2779 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2780 }
2781
2782 return ie;
2783 }
2784
2785 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2786 return NULL if there is not any. BY_REF specifies whether the value has to
2787 be passed by reference or by value. */
2788
2789 tree
2790 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2791 HOST_WIDE_INT offset, bool by_ref)
2792 {
2793 struct ipa_agg_jf_item *item;
2794 int i;
2795
2796 if (by_ref != agg->by_ref)
2797 return NULL;
2798
2799 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2800 if (item->offset == offset)
2801 {
2802 /* Currently we do not have clobber values, return NULL for them once
2803 we do. */
2804 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2805 return item->value;
2806 }
2807 return NULL;
2808 }
2809
2810 /* Remove a reference to SYMBOL from the list of references of a node given by
2811 reference description RDESC. Return true if the reference has been
2812 successfully found and removed. */
2813
2814 static bool
2815 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2816 {
2817 struct ipa_ref *to_del;
2818 struct cgraph_edge *origin;
2819
2820 origin = rdesc->cs;
2821 if (!origin)
2822 return false;
2823 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2824 origin->lto_stmt_uid);
2825 if (!to_del)
2826 return false;
2827
2828 to_del->remove_reference ();
2829 if (dump_file)
2830 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2831 xstrdup_for_dump (origin->caller->name ()),
2832 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2833 return true;
2834 }
2835
2836 /* If JFUNC has a reference description with refcount different from
2837 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2838 NULL. JFUNC must be a constant jump function. */
2839
2840 static struct ipa_cst_ref_desc *
2841 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2842 {
2843 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2844 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2845 return rdesc;
2846 else
2847 return NULL;
2848 }
2849
2850 /* If the value of constant jump function JFUNC is an address of a function
2851 declaration, return the associated call graph node. Otherwise return
2852 NULL. */
2853
2854 static cgraph_node *
2855 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2856 {
2857 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2858 tree cst = ipa_get_jf_constant (jfunc);
2859 if (TREE_CODE (cst) != ADDR_EXPR
2860 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2861 return NULL;
2862
2863 return cgraph_node::get (TREE_OPERAND (cst, 0));
2864 }
2865
2866
2867 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2868 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2869 the edge specified in the rdesc. Return false if either the symbol or the
2870 reference could not be found, otherwise return true. */
2871
2872 static bool
2873 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2874 {
2875 struct ipa_cst_ref_desc *rdesc;
2876 if (jfunc->type == IPA_JF_CONST
2877 && (rdesc = jfunc_rdesc_usable (jfunc))
2878 && --rdesc->refcount == 0)
2879 {
2880 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2881 if (!symbol)
2882 return false;
2883
2884 return remove_described_reference (symbol, rdesc);
2885 }
2886 return true;
2887 }
2888
2889 /* Try to find a destination for indirect edge IE that corresponds to a simple
2890 call or a call of a member function pointer and where the destination is a
2891 pointer formal parameter described by jump function JFUNC. If it can be
2892 determined, return the newly direct edge, otherwise return NULL.
2893 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2894
2895 static struct cgraph_edge *
2896 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2897 struct ipa_jump_func *jfunc,
2898 struct ipa_node_params *new_root_info)
2899 {
2900 struct cgraph_edge *cs;
2901 tree target;
2902 bool agg_contents = ie->indirect_info->agg_contents;
2903
2904 if (ie->indirect_info->agg_contents)
2905 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2906 ie->indirect_info->offset,
2907 ie->indirect_info->by_ref);
2908 else
2909 target = ipa_value_from_jfunc (new_root_info, jfunc);
2910 if (!target)
2911 return NULL;
2912 cs = ipa_make_edge_direct_to_target (ie, target);
2913
2914 if (cs && !agg_contents)
2915 {
2916 bool ok;
2917 gcc_checking_assert (cs->callee
2918 && (cs != ie
2919 || jfunc->type != IPA_JF_CONST
2920 || !cgraph_node_for_jfunc (jfunc)
2921 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2922 ok = try_decrement_rdesc_refcount (jfunc);
2923 gcc_checking_assert (ok);
2924 }
2925
2926 return cs;
2927 }
2928
2929 /* Return the target to be used in cases of impossible devirtualization. IE
2930 and target (the latter can be NULL) are dumped when dumping is enabled. */
2931
2932 tree
2933 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2934 {
2935 if (dump_file)
2936 {
2937 if (target)
2938 fprintf (dump_file,
2939 "Type inconsistent devirtualization: %s/%i->%s\n",
2940 ie->caller->name (), ie->caller->order,
2941 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2942 else
2943 fprintf (dump_file,
2944 "No devirtualization target in %s/%i\n",
2945 ie->caller->name (), ie->caller->order);
2946 }
2947 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2948 cgraph_node::get_create (new_target);
2949 return new_target;
2950 }
2951
2952 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2953 call based on a formal parameter which is described by jump function JFUNC
2954 and if it can be determined, make it direct and return the direct edge.
2955 Otherwise, return NULL. CTX describes the polymorphic context that the
2956 parameter the call is based on brings along with it. */
2957
2958 static struct cgraph_edge *
2959 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2960 struct ipa_jump_func *jfunc,
2961 struct ipa_polymorphic_call_context ctx)
2962 {
2963 tree target = NULL;
2964 bool speculative = false;
2965
2966 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2967 return NULL;
2968
2969 gcc_assert (!ie->indirect_info->by_ref);
2970
2971 /* Try to do lookup via known virtual table pointer value. */
2972 if (!ie->indirect_info->vptr_changed
2973 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2974 {
2975 tree vtable;
2976 unsigned HOST_WIDE_INT offset;
2977 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2978 ie->indirect_info->offset,
2979 true);
2980 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2981 {
2982 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2983 vtable, offset);
2984 if (t)
2985 {
2986 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2987 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2988 || !possible_polymorphic_call_target_p
2989 (ie, cgraph_node::get (t)))
2990 {
2991 /* Do not speculate builtin_unreachable, it is stupid! */
2992 if (!ie->indirect_info->vptr_changed)
2993 target = ipa_impossible_devirt_target (ie, target);
2994 }
2995 else
2996 {
2997 target = t;
2998 speculative = ie->indirect_info->vptr_changed;
2999 }
3000 }
3001 }
3002 }
3003
3004 ipa_polymorphic_call_context ie_context (ie);
3005 vec <cgraph_node *>targets;
3006 bool final;
3007
3008 ctx.offset_by (ie->indirect_info->offset);
3009 if (ie->indirect_info->vptr_changed)
3010 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3011 ie->indirect_info->otr_type);
3012 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3013 targets = possible_polymorphic_call_targets
3014 (ie->indirect_info->otr_type,
3015 ie->indirect_info->otr_token,
3016 ctx, &final);
3017 if (final && targets.length () <= 1)
3018 {
3019 speculative = false;
3020 if (targets.length () == 1)
3021 target = targets[0]->decl;
3022 else
3023 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3024 }
3025 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3026 && !ie->speculative && ie->maybe_hot_p ())
3027 {
3028 cgraph_node *n;
3029 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3030 ie->indirect_info->otr_token,
3031 ie->indirect_info->context);
3032 if (n)
3033 {
3034 target = n->decl;
3035 speculative = true;
3036 }
3037 }
3038
3039 if (target)
3040 {
3041 if (!possible_polymorphic_call_target_p
3042 (ie, cgraph_node::get_create (target)))
3043 {
3044 if (speculative)
3045 return NULL;
3046 target = ipa_impossible_devirt_target (ie, target);
3047 }
3048 return ipa_make_edge_direct_to_target (ie, target, speculative);
3049 }
3050 else
3051 return NULL;
3052 }
3053
3054 /* Update the param called notes associated with NODE when CS is being inlined,
3055 assuming NODE is (potentially indirectly) inlined into CS->callee.
3056 Moreover, if the callee is discovered to be constant, create a new cgraph
3057 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3058 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3059
3060 static bool
3061 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3062 struct cgraph_node *node,
3063 vec<cgraph_edge *> *new_edges)
3064 {
3065 struct ipa_edge_args *top;
3066 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3067 struct ipa_node_params *new_root_info;
3068 bool res = false;
3069
3070 ipa_check_create_edge_args ();
3071 top = IPA_EDGE_REF (cs);
3072 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3073 ? cs->caller->global.inlined_to
3074 : cs->caller);
3075
3076 for (ie = node->indirect_calls; ie; ie = next_ie)
3077 {
3078 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3079 struct ipa_jump_func *jfunc;
3080 int param_index;
3081 cgraph_node *spec_target = NULL;
3082
3083 next_ie = ie->next_callee;
3084
3085 if (ici->param_index == -1)
3086 continue;
3087
3088 /* We must check range due to calls with variable number of arguments: */
3089 if (ici->param_index >= ipa_get_cs_argument_count (top))
3090 {
3091 ici->param_index = -1;
3092 continue;
3093 }
3094
3095 param_index = ici->param_index;
3096 jfunc = ipa_get_ith_jump_func (top, param_index);
3097
3098 if (ie->speculative)
3099 {
3100 struct cgraph_edge *de;
3101 struct ipa_ref *ref;
3102 ie->speculative_call_info (de, ie, ref);
3103 spec_target = de->callee;
3104 }
3105
3106 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3107 new_direct_edge = NULL;
3108 else if (ici->polymorphic)
3109 {
3110 ipa_polymorphic_call_context ctx;
3111 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3112 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3113 }
3114 else
3115 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3116 new_root_info);
3117 /* If speculation was removed, then we need to do nothing. */
3118 if (new_direct_edge && new_direct_edge != ie
3119 && new_direct_edge->callee == spec_target)
3120 {
3121 new_direct_edge->indirect_inlining_edge = 1;
3122 top = IPA_EDGE_REF (cs);
3123 res = true;
3124 if (!new_direct_edge->speculative)
3125 continue;
3126 }
3127 else if (new_direct_edge)
3128 {
3129 new_direct_edge->indirect_inlining_edge = 1;
3130 if (new_direct_edge->call_stmt)
3131 new_direct_edge->call_stmt_cannot_inline_p
3132 = !gimple_check_call_matching_types (
3133 new_direct_edge->call_stmt,
3134 new_direct_edge->callee->decl, false);
3135 if (new_edges)
3136 {
3137 new_edges->safe_push (new_direct_edge);
3138 res = true;
3139 }
3140 top = IPA_EDGE_REF (cs);
3141 /* If speculative edge was introduced we still need to update
3142 call info of the indirect edge. */
3143 if (!new_direct_edge->speculative)
3144 continue;
3145 }
3146 if (jfunc->type == IPA_JF_PASS_THROUGH
3147 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3148 {
3149 if (ici->agg_contents
3150 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3151 && !ici->polymorphic)
3152 ici->param_index = -1;
3153 else
3154 {
3155 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3156 if (ici->polymorphic
3157 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3158 ici->vptr_changed = true;
3159 }
3160 }
3161 else if (jfunc->type == IPA_JF_ANCESTOR)
3162 {
3163 if (ici->agg_contents
3164 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3165 && !ici->polymorphic)
3166 ici->param_index = -1;
3167 else
3168 {
3169 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3170 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3171 if (ici->polymorphic
3172 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3173 ici->vptr_changed = true;
3174 }
3175 }
3176 else
3177 /* Either we can find a destination for this edge now or never. */
3178 ici->param_index = -1;
3179 }
3180
3181 return res;
3182 }
3183
3184 /* Recursively traverse subtree of NODE (including node) made of inlined
3185 cgraph_edges when CS has been inlined and invoke
3186 update_indirect_edges_after_inlining on all nodes and
3187 update_jump_functions_after_inlining on all non-inlined edges that lead out
3188 of this subtree. Newly discovered indirect edges will be added to
3189 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3190 created. */
3191
3192 static bool
3193 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3194 struct cgraph_node *node,
3195 vec<cgraph_edge *> *new_edges)
3196 {
3197 struct cgraph_edge *e;
3198 bool res;
3199
3200 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3201
3202 for (e = node->callees; e; e = e->next_callee)
3203 if (!e->inline_failed)
3204 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3205 else
3206 update_jump_functions_after_inlining (cs, e);
3207 for (e = node->indirect_calls; e; e = e->next_callee)
3208 update_jump_functions_after_inlining (cs, e);
3209
3210 return res;
3211 }
3212
3213 /* Combine two controlled uses counts as done during inlining. */
3214
3215 static int
3216 combine_controlled_uses_counters (int c, int d)
3217 {
3218 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3219 return IPA_UNDESCRIBED_USE;
3220 else
3221 return c + d - 1;
3222 }
3223
3224 /* Propagate number of controlled users from CS->caleee to the new root of the
3225 tree of inlined nodes. */
3226
3227 static void
3228 propagate_controlled_uses (struct cgraph_edge *cs)
3229 {
3230 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3231 struct cgraph_node *new_root = cs->caller->global.inlined_to
3232 ? cs->caller->global.inlined_to : cs->caller;
3233 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3234 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3235 int count, i;
3236
3237 count = MIN (ipa_get_cs_argument_count (args),
3238 ipa_get_param_count (old_root_info));
3239 for (i = 0; i < count; i++)
3240 {
3241 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3242 struct ipa_cst_ref_desc *rdesc;
3243
3244 if (jf->type == IPA_JF_PASS_THROUGH)
3245 {
3246 int src_idx, c, d;
3247 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3248 c = ipa_get_controlled_uses (new_root_info, src_idx);
3249 d = ipa_get_controlled_uses (old_root_info, i);
3250
3251 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3252 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3253 c = combine_controlled_uses_counters (c, d);
3254 ipa_set_controlled_uses (new_root_info, src_idx, c);
3255 if (c == 0 && new_root_info->ipcp_orig_node)
3256 {
3257 struct cgraph_node *n;
3258 struct ipa_ref *ref;
3259 tree t = new_root_info->known_csts[src_idx];
3260
3261 if (t && TREE_CODE (t) == ADDR_EXPR
3262 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3263 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3264 && (ref = new_root->find_reference (n, NULL, 0)))
3265 {
3266 if (dump_file)
3267 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3268 "reference from %s/%i to %s/%i.\n",
3269 xstrdup_for_dump (new_root->name ()),
3270 new_root->order,
3271 xstrdup_for_dump (n->name ()), n->order);
3272 ref->remove_reference ();
3273 }
3274 }
3275 }
3276 else if (jf->type == IPA_JF_CONST
3277 && (rdesc = jfunc_rdesc_usable (jf)))
3278 {
3279 int d = ipa_get_controlled_uses (old_root_info, i);
3280 int c = rdesc->refcount;
3281 rdesc->refcount = combine_controlled_uses_counters (c, d);
3282 if (rdesc->refcount == 0)
3283 {
3284 tree cst = ipa_get_jf_constant (jf);
3285 struct cgraph_node *n;
3286 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3287 && TREE_CODE (TREE_OPERAND (cst, 0))
3288 == FUNCTION_DECL);
3289 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3290 if (n)
3291 {
3292 struct cgraph_node *clone;
3293 bool ok;
3294 ok = remove_described_reference (n, rdesc);
3295 gcc_checking_assert (ok);
3296
3297 clone = cs->caller;
3298 while (clone->global.inlined_to
3299 && clone != rdesc->cs->caller
3300 && IPA_NODE_REF (clone)->ipcp_orig_node)
3301 {
3302 struct ipa_ref *ref;
3303 ref = clone->find_reference (n, NULL, 0);
3304 if (ref)
3305 {
3306 if (dump_file)
3307 fprintf (dump_file, "ipa-prop: Removing "
3308 "cloning-created reference "
3309 "from %s/%i to %s/%i.\n",
3310 xstrdup_for_dump (clone->name ()),
3311 clone->order,
3312 xstrdup_for_dump (n->name ()),
3313 n->order);
3314 ref->remove_reference ();
3315 }
3316 clone = clone->callers->caller;
3317 }
3318 }
3319 }
3320 }
3321 }
3322
3323 for (i = ipa_get_param_count (old_root_info);
3324 i < ipa_get_cs_argument_count (args);
3325 i++)
3326 {
3327 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3328
3329 if (jf->type == IPA_JF_CONST)
3330 {
3331 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3332 if (rdesc)
3333 rdesc->refcount = IPA_UNDESCRIBED_USE;
3334 }
3335 else if (jf->type == IPA_JF_PASS_THROUGH)
3336 ipa_set_controlled_uses (new_root_info,
3337 jf->value.pass_through.formal_id,
3338 IPA_UNDESCRIBED_USE);
3339 }
3340 }
3341
3342 /* Update jump functions and call note functions on inlining the call site CS.
3343 CS is expected to lead to a node already cloned by
3344 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3345 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3346 created. */
3347
3348 bool
3349 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3350 vec<cgraph_edge *> *new_edges)
3351 {
3352 bool changed;
3353 /* Do nothing if the preparation phase has not been carried out yet
3354 (i.e. during early inlining). */
3355 if (!ipa_node_params_sum)
3356 return false;
3357 gcc_assert (ipa_edge_args_vector);
3358
3359 propagate_controlled_uses (cs);
3360 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3361
3362 return changed;
3363 }
3364
3365 /* Frees all dynamically allocated structures that the argument info points
3366 to. */
3367
3368 void
3369 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3370 {
3371 vec_free (args->jump_functions);
3372 memset (args, 0, sizeof (*args));
3373 }
3374
3375 /* Free all ipa_edge structures. */
3376
3377 void
3378 ipa_free_all_edge_args (void)
3379 {
3380 int i;
3381 struct ipa_edge_args *args;
3382
3383 if (!ipa_edge_args_vector)
3384 return;
3385
3386 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3387 ipa_free_edge_args_substructures (args);
3388
3389 vec_free (ipa_edge_args_vector);
3390 }
3391
3392 /* Frees all dynamically allocated structures that the param info points
3393 to. */
3394
3395 ipa_node_params::~ipa_node_params ()
3396 {
3397 descriptors.release ();
3398 free (lattices);
3399 /* Lattice values and their sources are deallocated with their alocation
3400 pool. */
3401 known_contexts.release ();
3402
3403 lattices = NULL;
3404 ipcp_orig_node = NULL;
3405 analysis_done = 0;
3406 node_enqueued = 0;
3407 do_clone_for_all_contexts = 0;
3408 is_all_contexts_clone = 0;
3409 node_dead = 0;
3410 }
3411
3412 /* Free all ipa_node_params structures. */
3413
3414 void
3415 ipa_free_all_node_params (void)
3416 {
3417 delete ipa_node_params_sum;
3418 ipa_node_params_sum = NULL;
3419 }
3420
3421 /* Grow ipcp_transformations if necessary. */
3422
3423 void
3424 ipcp_grow_transformations_if_necessary (void)
3425 {
3426 if (vec_safe_length (ipcp_transformations)
3427 <= (unsigned) symtab->cgraph_max_uid)
3428 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3429 }
3430
3431 /* Set the aggregate replacements of NODE to be AGGVALS. */
3432
3433 void
3434 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3435 struct ipa_agg_replacement_value *aggvals)
3436 {
3437 ipcp_grow_transformations_if_necessary ();
3438 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3439 }
3440
3441 /* Hook that is called by cgraph.c when an edge is removed. */
3442
3443 static void
3444 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3445 {
3446 struct ipa_edge_args *args;
3447
3448 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3449 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3450 return;
3451
3452 args = IPA_EDGE_REF (cs);
3453 if (args->jump_functions)
3454 {
3455 struct ipa_jump_func *jf;
3456 int i;
3457 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3458 {
3459 struct ipa_cst_ref_desc *rdesc;
3460 try_decrement_rdesc_refcount (jf);
3461 if (jf->type == IPA_JF_CONST
3462 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3463 && rdesc->cs == cs)
3464 rdesc->cs = NULL;
3465 }
3466 }
3467
3468 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3469 }
3470
3471 /* Hook that is called by cgraph.c when an edge is duplicated. */
3472
3473 static void
3474 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3475 void *)
3476 {
3477 struct ipa_edge_args *old_args, *new_args;
3478 unsigned int i;
3479
3480 ipa_check_create_edge_args ();
3481
3482 old_args = IPA_EDGE_REF (src);
3483 new_args = IPA_EDGE_REF (dst);
3484
3485 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3486 if (old_args->polymorphic_call_contexts)
3487 new_args->polymorphic_call_contexts
3488 = vec_safe_copy (old_args->polymorphic_call_contexts);
3489
3490 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3491 {
3492 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3493 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3494
3495 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3496
3497 if (src_jf->type == IPA_JF_CONST)
3498 {
3499 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3500
3501 if (!src_rdesc)
3502 dst_jf->value.constant.rdesc = NULL;
3503 else if (src->caller == dst->caller)
3504 {
3505 struct ipa_ref *ref;
3506 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3507 gcc_checking_assert (n);
3508 ref = src->caller->find_reference (n, src->call_stmt,
3509 src->lto_stmt_uid);
3510 gcc_checking_assert (ref);
3511 dst->caller->clone_reference (ref, ref->stmt);
3512
3513 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3514 dst_rdesc->cs = dst;
3515 dst_rdesc->refcount = src_rdesc->refcount;
3516 dst_rdesc->next_duplicate = NULL;
3517 dst_jf->value.constant.rdesc = dst_rdesc;
3518 }
3519 else if (src_rdesc->cs == src)
3520 {
3521 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3522 dst_rdesc->cs = dst;
3523 dst_rdesc->refcount = src_rdesc->refcount;
3524 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3525 src_rdesc->next_duplicate = dst_rdesc;
3526 dst_jf->value.constant.rdesc = dst_rdesc;
3527 }
3528 else
3529 {
3530 struct ipa_cst_ref_desc *dst_rdesc;
3531 /* This can happen during inlining, when a JFUNC can refer to a
3532 reference taken in a function up in the tree of inline clones.
3533 We need to find the duplicate that refers to our tree of
3534 inline clones. */
3535
3536 gcc_assert (dst->caller->global.inlined_to);
3537 for (dst_rdesc = src_rdesc->next_duplicate;
3538 dst_rdesc;
3539 dst_rdesc = dst_rdesc->next_duplicate)
3540 {
3541 struct cgraph_node *top;
3542 top = dst_rdesc->cs->caller->global.inlined_to
3543 ? dst_rdesc->cs->caller->global.inlined_to
3544 : dst_rdesc->cs->caller;
3545 if (dst->caller->global.inlined_to == top)
3546 break;
3547 }
3548 gcc_assert (dst_rdesc);
3549 dst_jf->value.constant.rdesc = dst_rdesc;
3550 }
3551 }
3552 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3553 && src->caller == dst->caller)
3554 {
3555 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3556 ? dst->caller->global.inlined_to : dst->caller;
3557 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3558 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3559
3560 int c = ipa_get_controlled_uses (root_info, idx);
3561 if (c != IPA_UNDESCRIBED_USE)
3562 {
3563 c++;
3564 ipa_set_controlled_uses (root_info, idx, c);
3565 }
3566 }
3567 }
3568 }
3569
3570 /* Analyze newly added function into callgraph. */
3571
3572 static void
3573 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3574 {
3575 if (node->has_gimple_body_p ())
3576 ipa_analyze_node (node);
3577 }
3578
3579 /* Hook that is called by summary when a node is duplicated. */
3580
3581 void
3582 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3583 ipa_node_params *old_info,
3584 ipa_node_params *new_info)
3585 {
3586 ipa_agg_replacement_value *old_av, *new_av;
3587
3588 new_info->descriptors = old_info->descriptors.copy ();
3589 new_info->lattices = NULL;
3590 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3591
3592 new_info->analysis_done = old_info->analysis_done;
3593 new_info->node_enqueued = old_info->node_enqueued;
3594
3595 old_av = ipa_get_agg_replacements_for_node (src);
3596 if (old_av)
3597 {
3598 new_av = NULL;
3599 while (old_av)
3600 {
3601 struct ipa_agg_replacement_value *v;
3602
3603 v = ggc_alloc<ipa_agg_replacement_value> ();
3604 memcpy (v, old_av, sizeof (*v));
3605 v->next = new_av;
3606 new_av = v;
3607 old_av = old_av->next;
3608 }
3609 ipa_set_node_agg_value_chain (dst, new_av);
3610 }
3611
3612 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3613
3614 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3615 {
3616 ipcp_grow_transformations_if_necessary ();
3617 src_trans = ipcp_get_transformation_summary (src);
3618 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3619 vec<ipa_alignment, va_gc> *&dst_alignments
3620 = ipcp_get_transformation_summary (dst)->alignments;
3621 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3622 for (unsigned i = 0; i < src_alignments->length (); ++i)
3623 dst_alignments->quick_push ((*src_alignments)[i]);
3624 }
3625 }
3626
3627 /* Register our cgraph hooks if they are not already there. */
3628
3629 void
3630 ipa_register_cgraph_hooks (void)
3631 {
3632 ipa_check_create_node_params ();
3633
3634 if (!edge_removal_hook_holder)
3635 edge_removal_hook_holder =
3636 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3637 if (!edge_duplication_hook_holder)
3638 edge_duplication_hook_holder =
3639 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3640 function_insertion_hook_holder =
3641 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3642 }
3643
3644 /* Unregister our cgraph hooks if they are not already there. */
3645
3646 static void
3647 ipa_unregister_cgraph_hooks (void)
3648 {
3649 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3650 edge_removal_hook_holder = NULL;
3651 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3652 edge_duplication_hook_holder = NULL;
3653 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3654 function_insertion_hook_holder = NULL;
3655 }
3656
3657 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3658 longer needed after ipa-cp. */
3659
3660 void
3661 ipa_free_all_structures_after_ipa_cp (void)
3662 {
3663 if (!optimize && !in_lto_p)
3664 {
3665 ipa_free_all_edge_args ();
3666 ipa_free_all_node_params ();
3667 ipcp_sources_pool.release ();
3668 ipcp_cst_values_pool.release ();
3669 ipcp_poly_ctx_values_pool.release ();
3670 ipcp_agg_lattice_pool.release ();
3671 ipa_unregister_cgraph_hooks ();
3672 ipa_refdesc_pool.release ();
3673 }
3674 }
3675
3676 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3677 longer needed after indirect inlining. */
3678
3679 void
3680 ipa_free_all_structures_after_iinln (void)
3681 {
3682 ipa_free_all_edge_args ();
3683 ipa_free_all_node_params ();
3684 ipa_unregister_cgraph_hooks ();
3685 ipcp_sources_pool.release ();
3686 ipcp_cst_values_pool.release ();
3687 ipcp_poly_ctx_values_pool.release ();
3688 ipcp_agg_lattice_pool.release ();
3689 ipa_refdesc_pool.release ();
3690 }
3691
3692 /* Print ipa_tree_map data structures of all functions in the
3693 callgraph to F. */
3694
3695 void
3696 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3697 {
3698 int i, count;
3699 struct ipa_node_params *info;
3700
3701 if (!node->definition)
3702 return;
3703 info = IPA_NODE_REF (node);
3704 fprintf (f, " function %s/%i parameter descriptors:\n",
3705 node->name (), node->order);
3706 count = ipa_get_param_count (info);
3707 for (i = 0; i < count; i++)
3708 {
3709 int c;
3710
3711 fprintf (f, " ");
3712 ipa_dump_param (f, info, i);
3713 if (ipa_is_param_used (info, i))
3714 fprintf (f, " used");
3715 c = ipa_get_controlled_uses (info, i);
3716 if (c == IPA_UNDESCRIBED_USE)
3717 fprintf (f, " undescribed_use");
3718 else
3719 fprintf (f, " controlled_uses=%i", c);
3720 fprintf (f, "\n");
3721 }
3722 }
3723
3724 /* Print ipa_tree_map data structures of all functions in the
3725 callgraph to F. */
3726
3727 void
3728 ipa_print_all_params (FILE * f)
3729 {
3730 struct cgraph_node *node;
3731
3732 fprintf (f, "\nFunction parameters:\n");
3733 FOR_EACH_FUNCTION (node)
3734 ipa_print_node_params (f, node);
3735 }
3736
3737 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3738
3739 vec<tree>
3740 ipa_get_vector_of_formal_parms (tree fndecl)
3741 {
3742 vec<tree> args;
3743 int count;
3744 tree parm;
3745
3746 gcc_assert (!flag_wpa);
3747 count = count_formal_params (fndecl);
3748 args.create (count);
3749 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3750 args.quick_push (parm);
3751
3752 return args;
3753 }
3754
3755 /* Return a heap allocated vector containing types of formal parameters of
3756 function type FNTYPE. */
3757
3758 vec<tree>
3759 ipa_get_vector_of_formal_parm_types (tree fntype)
3760 {
3761 vec<tree> types;
3762 int count = 0;
3763 tree t;
3764
3765 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3766 count++;
3767
3768 types.create (count);
3769 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3770 types.quick_push (TREE_VALUE (t));
3771
3772 return types;
3773 }
3774
3775 /* Modify the function declaration FNDECL and its type according to the plan in
3776 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3777 to reflect the actual parameters being modified which are determined by the
3778 base_index field. */
3779
3780 void
3781 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3782 {
3783 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3784 tree orig_type = TREE_TYPE (fndecl);
3785 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3786
3787 /* The following test is an ugly hack, some functions simply don't have any
3788 arguments in their type. This is probably a bug but well... */
3789 bool care_for_types = (old_arg_types != NULL_TREE);
3790 bool last_parm_void;
3791 vec<tree> otypes;
3792 if (care_for_types)
3793 {
3794 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3795 == void_type_node);
3796 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3797 if (last_parm_void)
3798 gcc_assert (oparms.length () + 1 == otypes.length ());
3799 else
3800 gcc_assert (oparms.length () == otypes.length ());
3801 }
3802 else
3803 {
3804 last_parm_void = false;
3805 otypes.create (0);
3806 }
3807
3808 int len = adjustments.length ();
3809 tree *link = &DECL_ARGUMENTS (fndecl);
3810 tree new_arg_types = NULL;
3811 for (int i = 0; i < len; i++)
3812 {
3813 struct ipa_parm_adjustment *adj;
3814 gcc_assert (link);
3815
3816 adj = &adjustments[i];
3817 tree parm;
3818 if (adj->op == IPA_PARM_OP_NEW)
3819 parm = NULL;
3820 else
3821 parm = oparms[adj->base_index];
3822 adj->base = parm;
3823
3824 if (adj->op == IPA_PARM_OP_COPY)
3825 {
3826 if (care_for_types)
3827 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3828 new_arg_types);
3829 *link = parm;
3830 link = &DECL_CHAIN (parm);
3831 }
3832 else if (adj->op != IPA_PARM_OP_REMOVE)
3833 {
3834 tree new_parm;
3835 tree ptype;
3836
3837 if (adj->by_ref)
3838 ptype = build_pointer_type (adj->type);
3839 else
3840 {
3841 ptype = adj->type;
3842 if (is_gimple_reg_type (ptype))
3843 {
3844 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3845 if (TYPE_ALIGN (ptype) < malign)
3846 ptype = build_aligned_type (ptype, malign);
3847 }
3848 }
3849
3850 if (care_for_types)
3851 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3852
3853 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3854 ptype);
3855 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3856 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3857 DECL_ARTIFICIAL (new_parm) = 1;
3858 DECL_ARG_TYPE (new_parm) = ptype;
3859 DECL_CONTEXT (new_parm) = fndecl;
3860 TREE_USED (new_parm) = 1;
3861 DECL_IGNORED_P (new_parm) = 1;
3862 layout_decl (new_parm, 0);
3863
3864 if (adj->op == IPA_PARM_OP_NEW)
3865 adj->base = NULL;
3866 else
3867 adj->base = parm;
3868 adj->new_decl = new_parm;
3869
3870 *link = new_parm;
3871 link = &DECL_CHAIN (new_parm);
3872 }
3873 }
3874
3875 *link = NULL_TREE;
3876
3877 tree new_reversed = NULL;
3878 if (care_for_types)
3879 {
3880 new_reversed = nreverse (new_arg_types);
3881 if (last_parm_void)
3882 {
3883 if (new_reversed)
3884 TREE_CHAIN (new_arg_types) = void_list_node;
3885 else
3886 new_reversed = void_list_node;
3887 }
3888 }
3889
3890 /* Use copy_node to preserve as much as possible from original type
3891 (debug info, attribute lists etc.)
3892 Exception is METHOD_TYPEs must have THIS argument.
3893 When we are asked to remove it, we need to build new FUNCTION_TYPE
3894 instead. */
3895 tree new_type = NULL;
3896 if (TREE_CODE (orig_type) != METHOD_TYPE
3897 || (adjustments[0].op == IPA_PARM_OP_COPY
3898 && adjustments[0].base_index == 0))
3899 {
3900 new_type = build_distinct_type_copy (orig_type);
3901 TYPE_ARG_TYPES (new_type) = new_reversed;
3902 }
3903 else
3904 {
3905 new_type
3906 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3907 new_reversed));
3908 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3909 DECL_VINDEX (fndecl) = NULL_TREE;
3910 }
3911
3912 /* When signature changes, we need to clear builtin info. */
3913 if (DECL_BUILT_IN (fndecl))
3914 {
3915 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3916 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3917 }
3918
3919 TREE_TYPE (fndecl) = new_type;
3920 DECL_VIRTUAL_P (fndecl) = 0;
3921 DECL_LANG_SPECIFIC (fndecl) = NULL;
3922 otypes.release ();
3923 oparms.release ();
3924 }
3925
3926 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3927 If this is a directly recursive call, CS must be NULL. Otherwise it must
3928 contain the corresponding call graph edge. */
3929
3930 void
3931 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3932 ipa_parm_adjustment_vec adjustments)
3933 {
3934 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3935 vec<tree> vargs;
3936 vec<tree, va_gc> **debug_args = NULL;
3937 gcall *new_stmt;
3938 gimple_stmt_iterator gsi, prev_gsi;
3939 tree callee_decl;
3940 int i, len;
3941
3942 len = adjustments.length ();
3943 vargs.create (len);
3944 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3945 current_node->remove_stmt_references (stmt);
3946
3947 gsi = gsi_for_stmt (stmt);
3948 prev_gsi = gsi;
3949 gsi_prev (&prev_gsi);
3950 for (i = 0; i < len; i++)
3951 {
3952 struct ipa_parm_adjustment *adj;
3953
3954 adj = &adjustments[i];
3955
3956 if (adj->op == IPA_PARM_OP_COPY)
3957 {
3958 tree arg = gimple_call_arg (stmt, adj->base_index);
3959
3960 vargs.quick_push (arg);
3961 }
3962 else if (adj->op != IPA_PARM_OP_REMOVE)
3963 {
3964 tree expr, base, off;
3965 location_t loc;
3966 unsigned int deref_align = 0;
3967 bool deref_base = false;
3968
3969 /* We create a new parameter out of the value of the old one, we can
3970 do the following kind of transformations:
3971
3972 - A scalar passed by reference is converted to a scalar passed by
3973 value. (adj->by_ref is false and the type of the original
3974 actual argument is a pointer to a scalar).
3975
3976 - A part of an aggregate is passed instead of the whole aggregate.
3977 The part can be passed either by value or by reference, this is
3978 determined by value of adj->by_ref. Moreover, the code below
3979 handles both situations when the original aggregate is passed by
3980 value (its type is not a pointer) and when it is passed by
3981 reference (it is a pointer to an aggregate).
3982
3983 When the new argument is passed by reference (adj->by_ref is true)
3984 it must be a part of an aggregate and therefore we form it by
3985 simply taking the address of a reference inside the original
3986 aggregate. */
3987
3988 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3989 base = gimple_call_arg (stmt, adj->base_index);
3990 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3991 : EXPR_LOCATION (base);
3992
3993 if (TREE_CODE (base) != ADDR_EXPR
3994 && POINTER_TYPE_P (TREE_TYPE (base)))
3995 off = build_int_cst (adj->alias_ptr_type,
3996 adj->offset / BITS_PER_UNIT);
3997 else
3998 {
3999 HOST_WIDE_INT base_offset;
4000 tree prev_base;
4001 bool addrof;
4002
4003 if (TREE_CODE (base) == ADDR_EXPR)
4004 {
4005 base = TREE_OPERAND (base, 0);
4006 addrof = true;
4007 }
4008 else
4009 addrof = false;
4010 prev_base = base;
4011 base = get_addr_base_and_unit_offset (base, &base_offset);
4012 /* Aggregate arguments can have non-invariant addresses. */
4013 if (!base)
4014 {
4015 base = build_fold_addr_expr (prev_base);
4016 off = build_int_cst (adj->alias_ptr_type,
4017 adj->offset / BITS_PER_UNIT);
4018 }
4019 else if (TREE_CODE (base) == MEM_REF)
4020 {
4021 if (!addrof)
4022 {
4023 deref_base = true;
4024 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4025 }
4026 off = build_int_cst (adj->alias_ptr_type,
4027 base_offset
4028 + adj->offset / BITS_PER_UNIT);
4029 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4030 off);
4031 base = TREE_OPERAND (base, 0);
4032 }
4033 else
4034 {
4035 off = build_int_cst (adj->alias_ptr_type,
4036 base_offset
4037 + adj->offset / BITS_PER_UNIT);
4038 base = build_fold_addr_expr (base);
4039 }
4040 }
4041
4042 if (!adj->by_ref)
4043 {
4044 tree type = adj->type;
4045 unsigned int align;
4046 unsigned HOST_WIDE_INT misalign;
4047
4048 if (deref_base)
4049 {
4050 align = deref_align;
4051 misalign = 0;
4052 }
4053 else
4054 {
4055 get_pointer_alignment_1 (base, &align, &misalign);
4056 if (TYPE_ALIGN (type) > align)
4057 align = TYPE_ALIGN (type);
4058 }
4059 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4060 * BITS_PER_UNIT);
4061 misalign = misalign & (align - 1);
4062 if (misalign != 0)
4063 align = (misalign & -misalign);
4064 if (align < TYPE_ALIGN (type))
4065 type = build_aligned_type (type, align);
4066 base = force_gimple_operand_gsi (&gsi, base,
4067 true, NULL, true, GSI_SAME_STMT);
4068 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4069 /* If expr is not a valid gimple call argument emit
4070 a load into a temporary. */
4071 if (is_gimple_reg_type (TREE_TYPE (expr)))
4072 {
4073 gimple tem = gimple_build_assign (NULL_TREE, expr);
4074 if (gimple_in_ssa_p (cfun))
4075 {
4076 gimple_set_vuse (tem, gimple_vuse (stmt));
4077 expr = make_ssa_name (TREE_TYPE (expr), tem);
4078 }
4079 else
4080 expr = create_tmp_reg (TREE_TYPE (expr));
4081 gimple_assign_set_lhs (tem, expr);
4082 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4083 }
4084 }
4085 else
4086 {
4087 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4088 expr = build_fold_addr_expr (expr);
4089 expr = force_gimple_operand_gsi (&gsi, expr,
4090 true, NULL, true, GSI_SAME_STMT);
4091 }
4092 vargs.quick_push (expr);
4093 }
4094 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4095 {
4096 unsigned int ix;
4097 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4098 gimple def_temp;
4099
4100 arg = gimple_call_arg (stmt, adj->base_index);
4101 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4102 {
4103 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4104 continue;
4105 arg = fold_convert_loc (gimple_location (stmt),
4106 TREE_TYPE (origin), arg);
4107 }
4108 if (debug_args == NULL)
4109 debug_args = decl_debug_args_insert (callee_decl);
4110 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4111 if (ddecl == origin)
4112 {
4113 ddecl = (**debug_args)[ix + 1];
4114 break;
4115 }
4116 if (ddecl == NULL)
4117 {
4118 ddecl = make_node (DEBUG_EXPR_DECL);
4119 DECL_ARTIFICIAL (ddecl) = 1;
4120 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4121 DECL_MODE (ddecl) = DECL_MODE (origin);
4122
4123 vec_safe_push (*debug_args, origin);
4124 vec_safe_push (*debug_args, ddecl);
4125 }
4126 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4127 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4128 }
4129 }
4130
4131 if (dump_file && (dump_flags & TDF_DETAILS))
4132 {
4133 fprintf (dump_file, "replacing stmt:");
4134 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4135 }
4136
4137 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4138 vargs.release ();
4139 if (gimple_call_lhs (stmt))
4140 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4141
4142 gimple_set_block (new_stmt, gimple_block (stmt));
4143 if (gimple_has_location (stmt))
4144 gimple_set_location (new_stmt, gimple_location (stmt));
4145 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4146 gimple_call_copy_flags (new_stmt, stmt);
4147 if (gimple_in_ssa_p (cfun))
4148 {
4149 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4150 if (gimple_vdef (stmt))
4151 {
4152 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4153 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4154 }
4155 }
4156
4157 if (dump_file && (dump_flags & TDF_DETAILS))
4158 {
4159 fprintf (dump_file, "with stmt:");
4160 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4161 fprintf (dump_file, "\n");
4162 }
4163 gsi_replace (&gsi, new_stmt, true);
4164 if (cs)
4165 cs->set_call_stmt (new_stmt);
4166 do
4167 {
4168 current_node->record_stmt_references (gsi_stmt (gsi));
4169 gsi_prev (&gsi);
4170 }
4171 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4172 }
4173
4174 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4175 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4176 specifies whether the function should care about type incompatibility the
4177 current and new expressions. If it is false, the function will leave
4178 incompatibility issues to the caller. Return true iff the expression
4179 was modified. */
4180
4181 bool
4182 ipa_modify_expr (tree *expr, bool convert,
4183 ipa_parm_adjustment_vec adjustments)
4184 {
4185 struct ipa_parm_adjustment *cand
4186 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4187 if (!cand)
4188 return false;
4189
4190 tree src;
4191 if (cand->by_ref)
4192 src = build_simple_mem_ref (cand->new_decl);
4193 else
4194 src = cand->new_decl;
4195
4196 if (dump_file && (dump_flags & TDF_DETAILS))
4197 {
4198 fprintf (dump_file, "About to replace expr ");
4199 print_generic_expr (dump_file, *expr, 0);
4200 fprintf (dump_file, " with ");
4201 print_generic_expr (dump_file, src, 0);
4202 fprintf (dump_file, "\n");
4203 }
4204
4205 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4206 {
4207 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4208 *expr = vce;
4209 }
4210 else
4211 *expr = src;
4212 return true;
4213 }
4214
4215 /* If T is an SSA_NAME, return NULL if it is not a default def or
4216 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4217 the base variable is always returned, regardless if it is a default
4218 def. Return T if it is not an SSA_NAME. */
4219
4220 static tree
4221 get_ssa_base_param (tree t, bool ignore_default_def)
4222 {
4223 if (TREE_CODE (t) == SSA_NAME)
4224 {
4225 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4226 return SSA_NAME_VAR (t);
4227 else
4228 return NULL_TREE;
4229 }
4230 return t;
4231 }
4232
4233 /* Given an expression, return an adjustment entry specifying the
4234 transformation to be done on EXPR. If no suitable adjustment entry
4235 was found, returns NULL.
4236
4237 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4238 default def, otherwise bail on them.
4239
4240 If CONVERT is non-NULL, this function will set *CONVERT if the
4241 expression provided is a component reference. ADJUSTMENTS is the
4242 adjustments vector. */
4243
4244 ipa_parm_adjustment *
4245 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4246 ipa_parm_adjustment_vec adjustments,
4247 bool ignore_default_def)
4248 {
4249 if (TREE_CODE (**expr) == BIT_FIELD_REF
4250 || TREE_CODE (**expr) == IMAGPART_EXPR
4251 || TREE_CODE (**expr) == REALPART_EXPR)
4252 {
4253 *expr = &TREE_OPERAND (**expr, 0);
4254 if (convert)
4255 *convert = true;
4256 }
4257
4258 HOST_WIDE_INT offset, size, max_size;
4259 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4260 if (!base || size == -1 || max_size == -1)
4261 return NULL;
4262
4263 if (TREE_CODE (base) == MEM_REF)
4264 {
4265 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4266 base = TREE_OPERAND (base, 0);
4267 }
4268
4269 base = get_ssa_base_param (base, ignore_default_def);
4270 if (!base || TREE_CODE (base) != PARM_DECL)
4271 return NULL;
4272
4273 struct ipa_parm_adjustment *cand = NULL;
4274 unsigned int len = adjustments.length ();
4275 for (unsigned i = 0; i < len; i++)
4276 {
4277 struct ipa_parm_adjustment *adj = &adjustments[i];
4278
4279 if (adj->base == base
4280 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4281 {
4282 cand = adj;
4283 break;
4284 }
4285 }
4286
4287 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4288 return NULL;
4289 return cand;
4290 }
4291
4292 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4293
4294 static bool
4295 index_in_adjustments_multiple_times_p (int base_index,
4296 ipa_parm_adjustment_vec adjustments)
4297 {
4298 int i, len = adjustments.length ();
4299 bool one = false;
4300
4301 for (i = 0; i < len; i++)
4302 {
4303 struct ipa_parm_adjustment *adj;
4304 adj = &adjustments[i];
4305
4306 if (adj->base_index == base_index)
4307 {
4308 if (one)
4309 return true;
4310 else
4311 one = true;
4312 }
4313 }
4314 return false;
4315 }
4316
4317
4318 /* Return adjustments that should have the same effect on function parameters
4319 and call arguments as if they were first changed according to adjustments in
4320 INNER and then by adjustments in OUTER. */
4321
4322 ipa_parm_adjustment_vec
4323 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4324 ipa_parm_adjustment_vec outer)
4325 {
4326 int i, outlen = outer.length ();
4327 int inlen = inner.length ();
4328 int removals = 0;
4329 ipa_parm_adjustment_vec adjustments, tmp;
4330
4331 tmp.create (inlen);
4332 for (i = 0; i < inlen; i++)
4333 {
4334 struct ipa_parm_adjustment *n;
4335 n = &inner[i];
4336
4337 if (n->op == IPA_PARM_OP_REMOVE)
4338 removals++;
4339 else
4340 {
4341 /* FIXME: Handling of new arguments are not implemented yet. */
4342 gcc_assert (n->op != IPA_PARM_OP_NEW);
4343 tmp.quick_push (*n);
4344 }
4345 }
4346
4347 adjustments.create (outlen + removals);
4348 for (i = 0; i < outlen; i++)
4349 {
4350 struct ipa_parm_adjustment r;
4351 struct ipa_parm_adjustment *out = &outer[i];
4352 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4353
4354 memset (&r, 0, sizeof (r));
4355 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4356 if (out->op == IPA_PARM_OP_REMOVE)
4357 {
4358 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4359 {
4360 r.op = IPA_PARM_OP_REMOVE;
4361 adjustments.quick_push (r);
4362 }
4363 continue;
4364 }
4365 else
4366 {
4367 /* FIXME: Handling of new arguments are not implemented yet. */
4368 gcc_assert (out->op != IPA_PARM_OP_NEW);
4369 }
4370
4371 r.base_index = in->base_index;
4372 r.type = out->type;
4373
4374 /* FIXME: Create nonlocal value too. */
4375
4376 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4377 r.op = IPA_PARM_OP_COPY;
4378 else if (in->op == IPA_PARM_OP_COPY)
4379 r.offset = out->offset;
4380 else if (out->op == IPA_PARM_OP_COPY)
4381 r.offset = in->offset;
4382 else
4383 r.offset = in->offset + out->offset;
4384 adjustments.quick_push (r);
4385 }
4386
4387 for (i = 0; i < inlen; i++)
4388 {
4389 struct ipa_parm_adjustment *n = &inner[i];
4390
4391 if (n->op == IPA_PARM_OP_REMOVE)
4392 adjustments.quick_push (*n);
4393 }
4394
4395 tmp.release ();
4396 return adjustments;
4397 }
4398
4399 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4400 friendly way, assuming they are meant to be applied to FNDECL. */
4401
4402 void
4403 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4404 tree fndecl)
4405 {
4406 int i, len = adjustments.length ();
4407 bool first = true;
4408 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4409
4410 fprintf (file, "IPA param adjustments: ");
4411 for (i = 0; i < len; i++)
4412 {
4413 struct ipa_parm_adjustment *adj;
4414 adj = &adjustments[i];
4415
4416 if (!first)
4417 fprintf (file, " ");
4418 else
4419 first = false;
4420
4421 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4422 print_generic_expr (file, parms[adj->base_index], 0);
4423 if (adj->base)
4424 {
4425 fprintf (file, ", base: ");
4426 print_generic_expr (file, adj->base, 0);
4427 }
4428 if (adj->new_decl)
4429 {
4430 fprintf (file, ", new_decl: ");
4431 print_generic_expr (file, adj->new_decl, 0);
4432 }
4433 if (adj->new_ssa_base)
4434 {
4435 fprintf (file, ", new_ssa_base: ");
4436 print_generic_expr (file, adj->new_ssa_base, 0);
4437 }
4438
4439 if (adj->op == IPA_PARM_OP_COPY)
4440 fprintf (file, ", copy_param");
4441 else if (adj->op == IPA_PARM_OP_REMOVE)
4442 fprintf (file, ", remove_param");
4443 else
4444 fprintf (file, ", offset %li", (long) adj->offset);
4445 if (adj->by_ref)
4446 fprintf (file, ", by_ref");
4447 print_node_brief (file, ", type: ", adj->type, 0);
4448 fprintf (file, "\n");
4449 }
4450 parms.release ();
4451 }
4452
4453 /* Dump the AV linked list. */
4454
4455 void
4456 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4457 {
4458 bool comma = false;
4459 fprintf (f, " Aggregate replacements:");
4460 for (; av; av = av->next)
4461 {
4462 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4463 av->index, av->offset);
4464 print_generic_expr (f, av->value, 0);
4465 comma = true;
4466 }
4467 fprintf (f, "\n");
4468 }
4469
4470 /* Stream out jump function JUMP_FUNC to OB. */
4471
4472 static void
4473 ipa_write_jump_function (struct output_block *ob,
4474 struct ipa_jump_func *jump_func)
4475 {
4476 struct ipa_agg_jf_item *item;
4477 struct bitpack_d bp;
4478 int i, count;
4479
4480 streamer_write_uhwi (ob, jump_func->type);
4481 switch (jump_func->type)
4482 {
4483 case IPA_JF_UNKNOWN:
4484 break;
4485 case IPA_JF_CONST:
4486 gcc_assert (
4487 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4488 stream_write_tree (ob, jump_func->value.constant.value, true);
4489 break;
4490 case IPA_JF_PASS_THROUGH:
4491 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4492 if (jump_func->value.pass_through.operation == NOP_EXPR)
4493 {
4494 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4495 bp = bitpack_create (ob->main_stream);
4496 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4497 streamer_write_bitpack (&bp);
4498 }
4499 else
4500 {
4501 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4502 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4503 }
4504 break;
4505 case IPA_JF_ANCESTOR:
4506 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4507 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4508 bp = bitpack_create (ob->main_stream);
4509 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4510 streamer_write_bitpack (&bp);
4511 break;
4512 }
4513
4514 count = vec_safe_length (jump_func->agg.items);
4515 streamer_write_uhwi (ob, count);
4516 if (count)
4517 {
4518 bp = bitpack_create (ob->main_stream);
4519 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4520 streamer_write_bitpack (&bp);
4521 }
4522
4523 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4524 {
4525 streamer_write_uhwi (ob, item->offset);
4526 stream_write_tree (ob, item->value, true);
4527 }
4528
4529 bp = bitpack_create (ob->main_stream);
4530 bp_pack_value (&bp, jump_func->alignment.known, 1);
4531 streamer_write_bitpack (&bp);
4532 if (jump_func->alignment.known)
4533 {
4534 streamer_write_uhwi (ob, jump_func->alignment.align);
4535 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4536 }
4537 }
4538
4539 /* Read in jump function JUMP_FUNC from IB. */
4540
4541 static void
4542 ipa_read_jump_function (struct lto_input_block *ib,
4543 struct ipa_jump_func *jump_func,
4544 struct cgraph_edge *cs,
4545 struct data_in *data_in)
4546 {
4547 enum jump_func_type jftype;
4548 enum tree_code operation;
4549 int i, count;
4550
4551 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4552 switch (jftype)
4553 {
4554 case IPA_JF_UNKNOWN:
4555 ipa_set_jf_unknown (jump_func);
4556 break;
4557 case IPA_JF_CONST:
4558 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4559 break;
4560 case IPA_JF_PASS_THROUGH:
4561 operation = (enum tree_code) streamer_read_uhwi (ib);
4562 if (operation == NOP_EXPR)
4563 {
4564 int formal_id = streamer_read_uhwi (ib);
4565 struct bitpack_d bp = streamer_read_bitpack (ib);
4566 bool agg_preserved = bp_unpack_value (&bp, 1);
4567 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4568 }
4569 else
4570 {
4571 tree operand = stream_read_tree (ib, data_in);
4572 int formal_id = streamer_read_uhwi (ib);
4573 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4574 operation);
4575 }
4576 break;
4577 case IPA_JF_ANCESTOR:
4578 {
4579 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4580 int formal_id = streamer_read_uhwi (ib);
4581 struct bitpack_d bp = streamer_read_bitpack (ib);
4582 bool agg_preserved = bp_unpack_value (&bp, 1);
4583 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4584 break;
4585 }
4586 }
4587
4588 count = streamer_read_uhwi (ib);
4589 vec_alloc (jump_func->agg.items, count);
4590 if (count)
4591 {
4592 struct bitpack_d bp = streamer_read_bitpack (ib);
4593 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4594 }
4595 for (i = 0; i < count; i++)
4596 {
4597 struct ipa_agg_jf_item item;
4598 item.offset = streamer_read_uhwi (ib);
4599 item.value = stream_read_tree (ib, data_in);
4600 jump_func->agg.items->quick_push (item);
4601 }
4602
4603 struct bitpack_d bp = streamer_read_bitpack (ib);
4604 bool alignment_known = bp_unpack_value (&bp, 1);
4605 if (alignment_known)
4606 {
4607 jump_func->alignment.known = true;
4608 jump_func->alignment.align = streamer_read_uhwi (ib);
4609 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4610 }
4611 else
4612 jump_func->alignment.known = false;
4613 }
4614
4615 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4616 relevant to indirect inlining to OB. */
4617
4618 static void
4619 ipa_write_indirect_edge_info (struct output_block *ob,
4620 struct cgraph_edge *cs)
4621 {
4622 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4623 struct bitpack_d bp;
4624
4625 streamer_write_hwi (ob, ii->param_index);
4626 bp = bitpack_create (ob->main_stream);
4627 bp_pack_value (&bp, ii->polymorphic, 1);
4628 bp_pack_value (&bp, ii->agg_contents, 1);
4629 bp_pack_value (&bp, ii->member_ptr, 1);
4630 bp_pack_value (&bp, ii->by_ref, 1);
4631 bp_pack_value (&bp, ii->vptr_changed, 1);
4632 streamer_write_bitpack (&bp);
4633 if (ii->agg_contents || ii->polymorphic)
4634 streamer_write_hwi (ob, ii->offset);
4635 else
4636 gcc_assert (ii->offset == 0);
4637
4638 if (ii->polymorphic)
4639 {
4640 streamer_write_hwi (ob, ii->otr_token);
4641 stream_write_tree (ob, ii->otr_type, true);
4642 ii->context.stream_out (ob);
4643 }
4644 }
4645
4646 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4647 relevant to indirect inlining from IB. */
4648
4649 static void
4650 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4651 struct data_in *data_in,
4652 struct cgraph_edge *cs)
4653 {
4654 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4655 struct bitpack_d bp;
4656
4657 ii->param_index = (int) streamer_read_hwi (ib);
4658 bp = streamer_read_bitpack (ib);
4659 ii->polymorphic = bp_unpack_value (&bp, 1);
4660 ii->agg_contents = bp_unpack_value (&bp, 1);
4661 ii->member_ptr = bp_unpack_value (&bp, 1);
4662 ii->by_ref = bp_unpack_value (&bp, 1);
4663 ii->vptr_changed = bp_unpack_value (&bp, 1);
4664 if (ii->agg_contents || ii->polymorphic)
4665 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4666 else
4667 ii->offset = 0;
4668 if (ii->polymorphic)
4669 {
4670 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4671 ii->otr_type = stream_read_tree (ib, data_in);
4672 ii->context.stream_in (ib, data_in);
4673 }
4674 }
4675
4676 /* Stream out NODE info to OB. */
4677
4678 static void
4679 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4680 {
4681 int node_ref;
4682 lto_symtab_encoder_t encoder;
4683 struct ipa_node_params *info = IPA_NODE_REF (node);
4684 int j;
4685 struct cgraph_edge *e;
4686 struct bitpack_d bp;
4687
4688 encoder = ob->decl_state->symtab_node_encoder;
4689 node_ref = lto_symtab_encoder_encode (encoder, node);
4690 streamer_write_uhwi (ob, node_ref);
4691
4692 streamer_write_uhwi (ob, ipa_get_param_count (info));
4693 for (j = 0; j < ipa_get_param_count (info); j++)
4694 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4695 bp = bitpack_create (ob->main_stream);
4696 gcc_assert (info->analysis_done
4697 || ipa_get_param_count (info) == 0);
4698 gcc_assert (!info->node_enqueued);
4699 gcc_assert (!info->ipcp_orig_node);
4700 for (j = 0; j < ipa_get_param_count (info); j++)
4701 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4702 streamer_write_bitpack (&bp);
4703 for (j = 0; j < ipa_get_param_count (info); j++)
4704 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4705 for (e = node->callees; e; e = e->next_callee)
4706 {
4707 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4708
4709 streamer_write_uhwi (ob,
4710 ipa_get_cs_argument_count (args) * 2
4711 + (args->polymorphic_call_contexts != NULL));
4712 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4713 {
4714 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4715 if (args->polymorphic_call_contexts != NULL)
4716 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4717 }
4718 }
4719 for (e = node->indirect_calls; e; e = e->next_callee)
4720 {
4721 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4722
4723 streamer_write_uhwi (ob,
4724 ipa_get_cs_argument_count (args) * 2
4725 + (args->polymorphic_call_contexts != NULL));
4726 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4727 {
4728 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4729 if (args->polymorphic_call_contexts != NULL)
4730 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4731 }
4732 ipa_write_indirect_edge_info (ob, e);
4733 }
4734 }
4735
4736 /* Stream in NODE info from IB. */
4737
4738 static void
4739 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4740 struct data_in *data_in)
4741 {
4742 struct ipa_node_params *info = IPA_NODE_REF (node);
4743 int k;
4744 struct cgraph_edge *e;
4745 struct bitpack_d bp;
4746
4747 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4748
4749 for (k = 0; k < ipa_get_param_count (info); k++)
4750 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4751
4752 bp = streamer_read_bitpack (ib);
4753 if (ipa_get_param_count (info) != 0)
4754 info->analysis_done = true;
4755 info->node_enqueued = false;
4756 for (k = 0; k < ipa_get_param_count (info); k++)
4757 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4758 for (k = 0; k < ipa_get_param_count (info); k++)
4759 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4760 for (e = node->callees; e; e = e->next_callee)
4761 {
4762 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4763 int count = streamer_read_uhwi (ib);
4764 bool contexts_computed = count & 1;
4765 count /= 2;
4766
4767 if (!count)
4768 continue;
4769 vec_safe_grow_cleared (args->jump_functions, count);
4770 if (contexts_computed)
4771 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4772
4773 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4774 {
4775 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4776 data_in);
4777 if (contexts_computed)
4778 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4779 }
4780 }
4781 for (e = node->indirect_calls; e; e = e->next_callee)
4782 {
4783 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4784 int count = streamer_read_uhwi (ib);
4785 bool contexts_computed = count & 1;
4786 count /= 2;
4787
4788 if (count)
4789 {
4790 vec_safe_grow_cleared (args->jump_functions, count);
4791 if (contexts_computed)
4792 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4793 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4794 {
4795 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4796 data_in);
4797 if (contexts_computed)
4798 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4799 }
4800 }
4801 ipa_read_indirect_edge_info (ib, data_in, e);
4802 }
4803 }
4804
4805 /* Write jump functions for nodes in SET. */
4806
4807 void
4808 ipa_prop_write_jump_functions (void)
4809 {
4810 struct cgraph_node *node;
4811 struct output_block *ob;
4812 unsigned int count = 0;
4813 lto_symtab_encoder_iterator lsei;
4814 lto_symtab_encoder_t encoder;
4815
4816 if (!ipa_node_params_sum)
4817 return;
4818
4819 ob = create_output_block (LTO_section_jump_functions);
4820 encoder = ob->decl_state->symtab_node_encoder;
4821 ob->symbol = NULL;
4822 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4823 lsei_next_function_in_partition (&lsei))
4824 {
4825 node = lsei_cgraph_node (lsei);
4826 if (node->has_gimple_body_p ()
4827 && IPA_NODE_REF (node) != NULL)
4828 count++;
4829 }
4830
4831 streamer_write_uhwi (ob, count);
4832
4833 /* Process all of the functions. */
4834 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4835 lsei_next_function_in_partition (&lsei))
4836 {
4837 node = lsei_cgraph_node (lsei);
4838 if (node->has_gimple_body_p ()
4839 && IPA_NODE_REF (node) != NULL)
4840 ipa_write_node_info (ob, node);
4841 }
4842 streamer_write_char_stream (ob->main_stream, 0);
4843 produce_asm (ob, NULL);
4844 destroy_output_block (ob);
4845 }
4846
4847 /* Read section in file FILE_DATA of length LEN with data DATA. */
4848
4849 static void
4850 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4851 size_t len)
4852 {
4853 const struct lto_function_header *header =
4854 (const struct lto_function_header *) data;
4855 const int cfg_offset = sizeof (struct lto_function_header);
4856 const int main_offset = cfg_offset + header->cfg_size;
4857 const int string_offset = main_offset + header->main_size;
4858 struct data_in *data_in;
4859 unsigned int i;
4860 unsigned int count;
4861
4862 lto_input_block ib_main ((const char *) data + main_offset,
4863 header->main_size, file_data->mode_table);
4864
4865 data_in =
4866 lto_data_in_create (file_data, (const char *) data + string_offset,
4867 header->string_size, vNULL);
4868 count = streamer_read_uhwi (&ib_main);
4869
4870 for (i = 0; i < count; i++)
4871 {
4872 unsigned int index;
4873 struct cgraph_node *node;
4874 lto_symtab_encoder_t encoder;
4875
4876 index = streamer_read_uhwi (&ib_main);
4877 encoder = file_data->symtab_node_encoder;
4878 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4879 index));
4880 gcc_assert (node->definition);
4881 ipa_read_node_info (&ib_main, node, data_in);
4882 }
4883 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4884 len);
4885 lto_data_in_delete (data_in);
4886 }
4887
4888 /* Read ipcp jump functions. */
4889
4890 void
4891 ipa_prop_read_jump_functions (void)
4892 {
4893 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4894 struct lto_file_decl_data *file_data;
4895 unsigned int j = 0;
4896
4897 ipa_check_create_node_params ();
4898 ipa_check_create_edge_args ();
4899 ipa_register_cgraph_hooks ();
4900
4901 while ((file_data = file_data_vec[j++]))
4902 {
4903 size_t len;
4904 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4905
4906 if (data)
4907 ipa_prop_read_section (file_data, data, len);
4908 }
4909 }
4910
4911 /* After merging units, we can get mismatch in argument counts.
4912 Also decl merging might've rendered parameter lists obsolete.
4913 Also compute called_with_variable_arg info. */
4914
4915 void
4916 ipa_update_after_lto_read (void)
4917 {
4918 ipa_check_create_node_params ();
4919 ipa_check_create_edge_args ();
4920 }
4921
4922 void
4923 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4924 {
4925 int node_ref;
4926 unsigned int count = 0;
4927 lto_symtab_encoder_t encoder;
4928 struct ipa_agg_replacement_value *aggvals, *av;
4929
4930 aggvals = ipa_get_agg_replacements_for_node (node);
4931 encoder = ob->decl_state->symtab_node_encoder;
4932 node_ref = lto_symtab_encoder_encode (encoder, node);
4933 streamer_write_uhwi (ob, node_ref);
4934
4935 for (av = aggvals; av; av = av->next)
4936 count++;
4937 streamer_write_uhwi (ob, count);
4938
4939 for (av = aggvals; av; av = av->next)
4940 {
4941 struct bitpack_d bp;
4942
4943 streamer_write_uhwi (ob, av->offset);
4944 streamer_write_uhwi (ob, av->index);
4945 stream_write_tree (ob, av->value, true);
4946
4947 bp = bitpack_create (ob->main_stream);
4948 bp_pack_value (&bp, av->by_ref, 1);
4949 streamer_write_bitpack (&bp);
4950 }
4951
4952 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4953 if (ts && vec_safe_length (ts->alignments) > 0)
4954 {
4955 count = ts->alignments->length ();
4956
4957 streamer_write_uhwi (ob, count);
4958 for (unsigned i = 0; i < count; ++i)
4959 {
4960 ipa_alignment *parm_al = &(*ts->alignments)[i];
4961
4962 struct bitpack_d bp;
4963 bp = bitpack_create (ob->main_stream);
4964 bp_pack_value (&bp, parm_al->known, 1);
4965 streamer_write_bitpack (&bp);
4966 if (parm_al->known)
4967 {
4968 streamer_write_uhwi (ob, parm_al->align);
4969 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4970 parm_al->misalign);
4971 }
4972 }
4973 }
4974 else
4975 streamer_write_uhwi (ob, 0);
4976 }
4977
4978 /* Stream in the aggregate value replacement chain for NODE from IB. */
4979
4980 static void
4981 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4982 data_in *data_in)
4983 {
4984 struct ipa_agg_replacement_value *aggvals = NULL;
4985 unsigned int count, i;
4986
4987 count = streamer_read_uhwi (ib);
4988 for (i = 0; i <count; i++)
4989 {
4990 struct ipa_agg_replacement_value *av;
4991 struct bitpack_d bp;
4992
4993 av = ggc_alloc<ipa_agg_replacement_value> ();
4994 av->offset = streamer_read_uhwi (ib);
4995 av->index = streamer_read_uhwi (ib);
4996 av->value = stream_read_tree (ib, data_in);
4997 bp = streamer_read_bitpack (ib);
4998 av->by_ref = bp_unpack_value (&bp, 1);
4999 av->next = aggvals;
5000 aggvals = av;
5001 }
5002 ipa_set_node_agg_value_chain (node, aggvals);
5003
5004 count = streamer_read_uhwi (ib);
5005 if (count > 0)
5006 {
5007 ipcp_grow_transformations_if_necessary ();
5008
5009 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5010 vec_safe_grow_cleared (ts->alignments, count);
5011
5012 for (i = 0; i < count; i++)
5013 {
5014 ipa_alignment *parm_al;
5015 parm_al = &(*ts->alignments)[i];
5016 struct bitpack_d bp;
5017 bp = streamer_read_bitpack (ib);
5018 parm_al->known = bp_unpack_value (&bp, 1);
5019 if (parm_al->known)
5020 {
5021 parm_al->align = streamer_read_uhwi (ib);
5022 parm_al->misalign
5023 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5024 0, parm_al->align);
5025 }
5026 }
5027 }
5028 }
5029
5030 /* Write all aggregate replacement for nodes in set. */
5031
5032 void
5033 ipcp_write_transformation_summaries (void)
5034 {
5035 struct cgraph_node *node;
5036 struct output_block *ob;
5037 unsigned int count = 0;
5038 lto_symtab_encoder_iterator lsei;
5039 lto_symtab_encoder_t encoder;
5040
5041 ob = create_output_block (LTO_section_ipcp_transform);
5042 encoder = ob->decl_state->symtab_node_encoder;
5043 ob->symbol = NULL;
5044 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5045 lsei_next_function_in_partition (&lsei))
5046 {
5047 node = lsei_cgraph_node (lsei);
5048 if (node->has_gimple_body_p ())
5049 count++;
5050 }
5051
5052 streamer_write_uhwi (ob, count);
5053
5054 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5055 lsei_next_function_in_partition (&lsei))
5056 {
5057 node = lsei_cgraph_node (lsei);
5058 if (node->has_gimple_body_p ())
5059 write_ipcp_transformation_info (ob, node);
5060 }
5061 streamer_write_char_stream (ob->main_stream, 0);
5062 produce_asm (ob, NULL);
5063 destroy_output_block (ob);
5064 }
5065
5066 /* Read replacements section in file FILE_DATA of length LEN with data
5067 DATA. */
5068
5069 static void
5070 read_replacements_section (struct lto_file_decl_data *file_data,
5071 const char *data,
5072 size_t len)
5073 {
5074 const struct lto_function_header *header =
5075 (const struct lto_function_header *) data;
5076 const int cfg_offset = sizeof (struct lto_function_header);
5077 const int main_offset = cfg_offset + header->cfg_size;
5078 const int string_offset = main_offset + header->main_size;
5079 struct data_in *data_in;
5080 unsigned int i;
5081 unsigned int count;
5082
5083 lto_input_block ib_main ((const char *) data + main_offset,
5084 header->main_size, file_data->mode_table);
5085
5086 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5087 header->string_size, vNULL);
5088 count = streamer_read_uhwi (&ib_main);
5089
5090 for (i = 0; i < count; i++)
5091 {
5092 unsigned int index;
5093 struct cgraph_node *node;
5094 lto_symtab_encoder_t encoder;
5095
5096 index = streamer_read_uhwi (&ib_main);
5097 encoder = file_data->symtab_node_encoder;
5098 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5099 index));
5100 gcc_assert (node->definition);
5101 read_ipcp_transformation_info (&ib_main, node, data_in);
5102 }
5103 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5104 len);
5105 lto_data_in_delete (data_in);
5106 }
5107
5108 /* Read IPA-CP aggregate replacements. */
5109
5110 void
5111 ipcp_read_transformation_summaries (void)
5112 {
5113 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5114 struct lto_file_decl_data *file_data;
5115 unsigned int j = 0;
5116
5117 while ((file_data = file_data_vec[j++]))
5118 {
5119 size_t len;
5120 const char *data = lto_get_section_data (file_data,
5121 LTO_section_ipcp_transform,
5122 NULL, &len);
5123 if (data)
5124 read_replacements_section (file_data, data, len);
5125 }
5126 }
5127
5128 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5129 NODE. */
5130
5131 static void
5132 adjust_agg_replacement_values (struct cgraph_node *node,
5133 struct ipa_agg_replacement_value *aggval)
5134 {
5135 struct ipa_agg_replacement_value *v;
5136 int i, c = 0, d = 0, *adj;
5137
5138 if (!node->clone.combined_args_to_skip)
5139 return;
5140
5141 for (v = aggval; v; v = v->next)
5142 {
5143 gcc_assert (v->index >= 0);
5144 if (c < v->index)
5145 c = v->index;
5146 }
5147 c++;
5148
5149 adj = XALLOCAVEC (int, c);
5150 for (i = 0; i < c; i++)
5151 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5152 {
5153 adj[i] = -1;
5154 d++;
5155 }
5156 else
5157 adj[i] = i - d;
5158
5159 for (v = aggval; v; v = v->next)
5160 v->index = adj[v->index];
5161 }
5162
5163 /* Dominator walker driving the ipcp modification phase. */
5164
5165 class ipcp_modif_dom_walker : public dom_walker
5166 {
5167 public:
5168 ipcp_modif_dom_walker (struct func_body_info *fbi,
5169 vec<ipa_param_descriptor> descs,
5170 struct ipa_agg_replacement_value *av,
5171 bool *sc, bool *cc)
5172 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5173 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5174
5175 virtual void before_dom_children (basic_block);
5176
5177 private:
5178 struct func_body_info *m_fbi;
5179 vec<ipa_param_descriptor> m_descriptors;
5180 struct ipa_agg_replacement_value *m_aggval;
5181 bool *m_something_changed, *m_cfg_changed;
5182 };
5183
5184 void
5185 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5186 {
5187 gimple_stmt_iterator gsi;
5188 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5189 {
5190 struct ipa_agg_replacement_value *v;
5191 gimple stmt = gsi_stmt (gsi);
5192 tree rhs, val, t;
5193 HOST_WIDE_INT offset, size;
5194 int index;
5195 bool by_ref, vce;
5196
5197 if (!gimple_assign_load_p (stmt))
5198 continue;
5199 rhs = gimple_assign_rhs1 (stmt);
5200 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5201 continue;
5202
5203 vce = false;
5204 t = rhs;
5205 while (handled_component_p (t))
5206 {
5207 /* V_C_E can do things like convert an array of integers to one
5208 bigger integer and similar things we do not handle below. */
5209 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5210 {
5211 vce = true;
5212 break;
5213 }
5214 t = TREE_OPERAND (t, 0);
5215 }
5216 if (vce)
5217 continue;
5218
5219 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5220 &offset, &size, &by_ref))
5221 continue;
5222 for (v = m_aggval; v; v = v->next)
5223 if (v->index == index
5224 && v->offset == offset)
5225 break;
5226 if (!v
5227 || v->by_ref != by_ref
5228 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5229 continue;
5230
5231 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5232 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5233 {
5234 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5235 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5236 else if (TYPE_SIZE (TREE_TYPE (rhs))
5237 == TYPE_SIZE (TREE_TYPE (v->value)))
5238 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5239 else
5240 {
5241 if (dump_file)
5242 {
5243 fprintf (dump_file, " const ");
5244 print_generic_expr (dump_file, v->value, 0);
5245 fprintf (dump_file, " can't be converted to type of ");
5246 print_generic_expr (dump_file, rhs, 0);
5247 fprintf (dump_file, "\n");
5248 }
5249 continue;
5250 }
5251 }
5252 else
5253 val = v->value;
5254
5255 if (dump_file && (dump_flags & TDF_DETAILS))
5256 {
5257 fprintf (dump_file, "Modifying stmt:\n ");
5258 print_gimple_stmt (dump_file, stmt, 0, 0);
5259 }
5260 gimple_assign_set_rhs_from_tree (&gsi, val);
5261 update_stmt (stmt);
5262
5263 if (dump_file && (dump_flags & TDF_DETAILS))
5264 {
5265 fprintf (dump_file, "into:\n ");
5266 print_gimple_stmt (dump_file, stmt, 0, 0);
5267 fprintf (dump_file, "\n");
5268 }
5269
5270 *m_something_changed = true;
5271 if (maybe_clean_eh_stmt (stmt)
5272 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5273 *m_cfg_changed = true;
5274 }
5275
5276 }
5277
5278 /* Update alignment of formal parameters as described in
5279 ipcp_transformation_summary. */
5280
5281 static void
5282 ipcp_update_alignments (struct cgraph_node *node)
5283 {
5284 tree fndecl = node->decl;
5285 tree parm = DECL_ARGUMENTS (fndecl);
5286 tree next_parm = parm;
5287 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5288 if (!ts || vec_safe_length (ts->alignments) == 0)
5289 return;
5290 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5291 unsigned count = alignments.length ();
5292
5293 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5294 {
5295 if (node->clone.combined_args_to_skip
5296 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5297 continue;
5298 gcc_checking_assert (parm);
5299 next_parm = DECL_CHAIN (parm);
5300
5301 if (!alignments[i].known || !is_gimple_reg (parm))
5302 continue;
5303 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5304 if (!ddef)
5305 continue;
5306
5307 if (dump_file)
5308 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5309 "misalignment to %u\n", i, alignments[i].align,
5310 alignments[i].misalign);
5311
5312 struct ptr_info_def *pi = get_ptr_info (ddef);
5313 gcc_checking_assert (pi);
5314 unsigned old_align;
5315 unsigned old_misalign;
5316 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5317
5318 if (old_known
5319 && old_align >= alignments[i].align)
5320 {
5321 if (dump_file)
5322 fprintf (dump_file, " But the alignment was already %u.\n",
5323 old_align);
5324 continue;
5325 }
5326 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5327 }
5328 }
5329
5330 /* IPCP transformation phase doing propagation of aggregate values. */
5331
5332 unsigned int
5333 ipcp_transform_function (struct cgraph_node *node)
5334 {
5335 vec<ipa_param_descriptor> descriptors = vNULL;
5336 struct func_body_info fbi;
5337 struct ipa_agg_replacement_value *aggval;
5338 int param_count;
5339 bool cfg_changed = false, something_changed = false;
5340
5341 gcc_checking_assert (cfun);
5342 gcc_checking_assert (current_function_decl);
5343
5344 if (dump_file)
5345 fprintf (dump_file, "Modification phase of node %s/%i\n",
5346 node->name (), node->order);
5347
5348 ipcp_update_alignments (node);
5349 aggval = ipa_get_agg_replacements_for_node (node);
5350 if (!aggval)
5351 return 0;
5352 param_count = count_formal_params (node->decl);
5353 if (param_count == 0)
5354 return 0;
5355 adjust_agg_replacement_values (node, aggval);
5356 if (dump_file)
5357 ipa_dump_agg_replacement_values (dump_file, aggval);
5358
5359 fbi.node = node;
5360 fbi.info = NULL;
5361 fbi.bb_infos = vNULL;
5362 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5363 fbi.param_count = param_count;
5364 fbi.aa_walked = 0;
5365
5366 descriptors.safe_grow_cleared (param_count);
5367 ipa_populate_param_decls (node, descriptors);
5368 calculate_dominance_info (CDI_DOMINATORS);
5369 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5370 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5371
5372 int i;
5373 struct ipa_bb_info *bi;
5374 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5375 free_ipa_bb_info (bi);
5376 fbi.bb_infos.release ();
5377 free_dominance_info (CDI_DOMINATORS);
5378 (*ipcp_transformations)[node->uid].agg_values = NULL;
5379 (*ipcp_transformations)[node->uid].alignments = NULL;
5380 descriptors.release ();
5381
5382 if (!something_changed)
5383 return 0;
5384 else if (cfg_changed)
5385 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5386 else
5387 return TODO_update_ssa_only_virtuals;
5388 }