coretypes.h: Include input.h and as-a.h.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "alias.h"
24 #include "symtab.h"
25 #include "options.h"
26 #include "tree.h"
27 #include "fold-const.h"
28 #include "predict.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "function.h"
32 #include "dominance.h"
33 #include "cfg.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimple-expr.h"
40 #include "gimple.h"
41 #include "rtl.h"
42 #include "flags.h"
43 #include "insn-config.h"
44 #include "expmed.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "emit-rtl.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "stor-layout.h"
53 #include "print-tree.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-walk.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "plugin-api.h"
61 #include "ipa-ref.h"
62 #include "cgraph.h"
63 #include "alloc-pool.h"
64 #include "symbol-summary.h"
65 #include "ipa-prop.h"
66 #include "bitmap.h"
67 #include "gimple-ssa.h"
68 #include "tree-cfg.h"
69 #include "tree-phinodes.h"
70 #include "ssa-iterators.h"
71 #include "tree-into-ssa.h"
72 #include "tree-dfa.h"
73 #include "tree-pass.h"
74 #include "tree-inline.h"
75 #include "ipa-inline.h"
76 #include "diagnostic.h"
77 #include "gimple-pretty-print.h"
78 #include "lto-streamer.h"
79 #include "data-streamer.h"
80 #include "tree-streamer.h"
81 #include "params.h"
82 #include "ipa-utils.h"
83 #include "stringpool.h"
84 #include "tree-ssanames.h"
85 #include "dbgcnt.h"
86 #include "domwalk.h"
87 #include "builtins.h"
88
89 /* Intermediate information that we get from alias analysis about a particular
90 parameter in a particular basic_block. When a parameter or the memory it
91 references is marked modified, we use that information in all dominatd
92 blocks without cosulting alias analysis oracle. */
93
94 struct param_aa_status
95 {
96 /* Set when this structure contains meaningful information. If not, the
97 structure describing a dominating BB should be used instead. */
98 bool valid;
99
100 /* Whether we have seen something which might have modified the data in
101 question. PARM is for the parameter itself, REF is for data it points to
102 but using the alias type of individual accesses and PT is the same thing
103 but for computing aggregate pass-through functions using a very inclusive
104 ao_ref. */
105 bool parm_modified, ref_modified, pt_modified;
106 };
107
108 /* Information related to a given BB that used only when looking at function
109 body. */
110
111 struct ipa_bb_info
112 {
113 /* Call graph edges going out of this BB. */
114 vec<cgraph_edge *> cg_edges;
115 /* Alias analysis statuses of each formal parameter at this bb. */
116 vec<param_aa_status> param_aa_statuses;
117 };
118
119 /* Structure with global information that is only used when looking at function
120 body. */
121
122 struct func_body_info
123 {
124 /* The node that is being analyzed. */
125 cgraph_node *node;
126
127 /* Its info. */
128 struct ipa_node_params *info;
129
130 /* Information about individual BBs. */
131 vec<ipa_bb_info> bb_infos;
132
133 /* Number of parameters. */
134 int param_count;
135
136 /* Number of statements already walked by when analyzing this function. */
137 unsigned int aa_walked;
138 };
139
140 /* Function summary where the parameter infos are actually stored. */
141 ipa_node_params_t *ipa_node_params_sum = NULL;
142 /* Vector of IPA-CP transformation data for each clone. */
143 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
144 /* Vector where the parameter infos are actually stored. */
145 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
146
147 /* Holders of ipa cgraph hooks: */
148 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
149 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
150 static struct cgraph_node_hook_list *function_insertion_hook_holder;
151
152 /* Description of a reference to an IPA constant. */
153 struct ipa_cst_ref_desc
154 {
155 /* Edge that corresponds to the statement which took the reference. */
156 struct cgraph_edge *cs;
157 /* Linked list of duplicates created when call graph edges are cloned. */
158 struct ipa_cst_ref_desc *next_duplicate;
159 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
160 if out of control. */
161 int refcount;
162 };
163
164 /* Allocation pool for reference descriptions. */
165
166 static pool_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
167 ("IPA-PROP ref descriptions", 32);
168
169 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
170 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
171
172 static bool
173 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
174 {
175 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
176
177 if (!fs_opts)
178 return false;
179 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
180 }
181
182 /* Return index of the formal whose tree is PTREE in function which corresponds
183 to INFO. */
184
185 static int
186 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
187 {
188 int i, count;
189
190 count = descriptors.length ();
191 for (i = 0; i < count; i++)
192 if (descriptors[i].decl == ptree)
193 return i;
194
195 return -1;
196 }
197
198 /* Return index of the formal whose tree is PTREE in function which corresponds
199 to INFO. */
200
201 int
202 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
203 {
204 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
205 }
206
207 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
208 NODE. */
209
210 static void
211 ipa_populate_param_decls (struct cgraph_node *node,
212 vec<ipa_param_descriptor> &descriptors)
213 {
214 tree fndecl;
215 tree fnargs;
216 tree parm;
217 int param_num;
218
219 fndecl = node->decl;
220 gcc_assert (gimple_has_body_p (fndecl));
221 fnargs = DECL_ARGUMENTS (fndecl);
222 param_num = 0;
223 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
224 {
225 descriptors[param_num].decl = parm;
226 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
227 true);
228 param_num++;
229 }
230 }
231
232 /* Return how many formal parameters FNDECL has. */
233
234 int
235 count_formal_params (tree fndecl)
236 {
237 tree parm;
238 int count = 0;
239 gcc_assert (gimple_has_body_p (fndecl));
240
241 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
242 count++;
243
244 return count;
245 }
246
247 /* Return the declaration of Ith formal parameter of the function corresponding
248 to INFO. Note there is no setter function as this array is built just once
249 using ipa_initialize_node_params. */
250
251 void
252 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
253 {
254 fprintf (file, "param #%i", i);
255 if (info->descriptors[i].decl)
256 {
257 fprintf (file, " ");
258 print_generic_expr (file, info->descriptors[i].decl, 0);
259 }
260 }
261
262 /* Initialize the ipa_node_params structure associated with NODE
263 to hold PARAM_COUNT parameters. */
264
265 void
266 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
267 {
268 struct ipa_node_params *info = IPA_NODE_REF (node);
269
270 if (!info->descriptors.exists () && param_count)
271 info->descriptors.safe_grow_cleared (param_count);
272 }
273
274 /* Initialize the ipa_node_params structure associated with NODE by counting
275 the function parameters, creating the descriptors and populating their
276 param_decls. */
277
278 void
279 ipa_initialize_node_params (struct cgraph_node *node)
280 {
281 struct ipa_node_params *info = IPA_NODE_REF (node);
282
283 if (!info->descriptors.exists ())
284 {
285 ipa_alloc_node_params (node, count_formal_params (node->decl));
286 ipa_populate_param_decls (node, info->descriptors);
287 }
288 }
289
290 /* Print the jump functions associated with call graph edge CS to file F. */
291
292 static void
293 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
294 {
295 int i, count;
296
297 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
298 for (i = 0; i < count; i++)
299 {
300 struct ipa_jump_func *jump_func;
301 enum jump_func_type type;
302
303 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
304 type = jump_func->type;
305
306 fprintf (f, " param %d: ", i);
307 if (type == IPA_JF_UNKNOWN)
308 fprintf (f, "UNKNOWN\n");
309 else if (type == IPA_JF_CONST)
310 {
311 tree val = jump_func->value.constant.value;
312 fprintf (f, "CONST: ");
313 print_generic_expr (f, val, 0);
314 if (TREE_CODE (val) == ADDR_EXPR
315 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
316 {
317 fprintf (f, " -> ");
318 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
319 0);
320 }
321 fprintf (f, "\n");
322 }
323 else if (type == IPA_JF_PASS_THROUGH)
324 {
325 fprintf (f, "PASS THROUGH: ");
326 fprintf (f, "%d, op %s",
327 jump_func->value.pass_through.formal_id,
328 get_tree_code_name(jump_func->value.pass_through.operation));
329 if (jump_func->value.pass_through.operation != NOP_EXPR)
330 {
331 fprintf (f, " ");
332 print_generic_expr (f,
333 jump_func->value.pass_through.operand, 0);
334 }
335 if (jump_func->value.pass_through.agg_preserved)
336 fprintf (f, ", agg_preserved");
337 fprintf (f, "\n");
338 }
339 else if (type == IPA_JF_ANCESTOR)
340 {
341 fprintf (f, "ANCESTOR: ");
342 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
343 jump_func->value.ancestor.formal_id,
344 jump_func->value.ancestor.offset);
345 if (jump_func->value.ancestor.agg_preserved)
346 fprintf (f, ", agg_preserved");
347 fprintf (f, "\n");
348 }
349
350 if (jump_func->agg.items)
351 {
352 struct ipa_agg_jf_item *item;
353 int j;
354
355 fprintf (f, " Aggregate passed by %s:\n",
356 jump_func->agg.by_ref ? "reference" : "value");
357 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
358 {
359 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
360 item->offset);
361 if (TYPE_P (item->value))
362 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
363 tree_to_uhwi (TYPE_SIZE (item->value)));
364 else
365 {
366 fprintf (f, "cst: ");
367 print_generic_expr (f, item->value, 0);
368 }
369 fprintf (f, "\n");
370 }
371 }
372
373 struct ipa_polymorphic_call_context *ctx
374 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
375 if (ctx && !ctx->useless_p ())
376 {
377 fprintf (f, " Context: ");
378 ctx->dump (dump_file);
379 }
380
381 if (jump_func->alignment.known)
382 {
383 fprintf (f, " Alignment: %u, misalignment: %u\n",
384 jump_func->alignment.align,
385 jump_func->alignment.misalign);
386 }
387 else
388 fprintf (f, " Unknown alignment\n");
389 }
390 }
391
392
393 /* Print the jump functions of all arguments on all call graph edges going from
394 NODE to file F. */
395
396 void
397 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
398 {
399 struct cgraph_edge *cs;
400
401 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
402 node->order);
403 for (cs = node->callees; cs; cs = cs->next_callee)
404 {
405 if (!ipa_edge_args_info_available_for_edge_p (cs))
406 continue;
407
408 fprintf (f, " callsite %s/%i -> %s/%i : \n",
409 xstrdup_for_dump (node->name ()), node->order,
410 xstrdup_for_dump (cs->callee->name ()),
411 cs->callee->order);
412 ipa_print_node_jump_functions_for_edge (f, cs);
413 }
414
415 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
416 {
417 struct cgraph_indirect_call_info *ii;
418 if (!ipa_edge_args_info_available_for_edge_p (cs))
419 continue;
420
421 ii = cs->indirect_info;
422 if (ii->agg_contents)
423 fprintf (f, " indirect %s callsite, calling param %i, "
424 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
425 ii->member_ptr ? "member ptr" : "aggregate",
426 ii->param_index, ii->offset,
427 ii->by_ref ? "by reference" : "by_value");
428 else
429 fprintf (f, " indirect %s callsite, calling param %i, "
430 "offset " HOST_WIDE_INT_PRINT_DEC,
431 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
432 ii->offset);
433
434 if (cs->call_stmt)
435 {
436 fprintf (f, ", for stmt ");
437 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
438 }
439 else
440 fprintf (f, "\n");
441 if (ii->polymorphic)
442 ii->context.dump (f);
443 ipa_print_node_jump_functions_for_edge (f, cs);
444 }
445 }
446
447 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
448
449 void
450 ipa_print_all_jump_functions (FILE *f)
451 {
452 struct cgraph_node *node;
453
454 fprintf (f, "\nJump functions:\n");
455 FOR_EACH_FUNCTION (node)
456 {
457 ipa_print_node_jump_functions (f, node);
458 }
459 }
460
461 /* Set jfunc to be a know-really nothing jump function. */
462
463 static void
464 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
465 {
466 jfunc->type = IPA_JF_UNKNOWN;
467 jfunc->alignment.known = false;
468 }
469
470 /* Set JFUNC to be a copy of another jmp (to be used by jump function
471 combination code). The two functions will share their rdesc. */
472
473 static void
474 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
475 struct ipa_jump_func *src)
476
477 {
478 gcc_checking_assert (src->type == IPA_JF_CONST);
479 dst->type = IPA_JF_CONST;
480 dst->value.constant = src->value.constant;
481 }
482
483 /* Set JFUNC to be a constant jmp function. */
484
485 static void
486 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
487 struct cgraph_edge *cs)
488 {
489 constant = unshare_expr (constant);
490 if (constant && EXPR_P (constant))
491 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
492 jfunc->type = IPA_JF_CONST;
493 jfunc->value.constant.value = unshare_expr_without_location (constant);
494
495 if (TREE_CODE (constant) == ADDR_EXPR
496 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
497 {
498 struct ipa_cst_ref_desc *rdesc;
499
500 rdesc = ipa_refdesc_pool.allocate ();
501 rdesc->cs = cs;
502 rdesc->next_duplicate = NULL;
503 rdesc->refcount = 1;
504 jfunc->value.constant.rdesc = rdesc;
505 }
506 else
507 jfunc->value.constant.rdesc = NULL;
508 }
509
510 /* Set JFUNC to be a simple pass-through jump function. */
511 static void
512 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
513 bool agg_preserved)
514 {
515 jfunc->type = IPA_JF_PASS_THROUGH;
516 jfunc->value.pass_through.operand = NULL_TREE;
517 jfunc->value.pass_through.formal_id = formal_id;
518 jfunc->value.pass_through.operation = NOP_EXPR;
519 jfunc->value.pass_through.agg_preserved = agg_preserved;
520 }
521
522 /* Set JFUNC to be an arithmetic pass through jump function. */
523
524 static void
525 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
526 tree operand, enum tree_code operation)
527 {
528 jfunc->type = IPA_JF_PASS_THROUGH;
529 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
530 jfunc->value.pass_through.formal_id = formal_id;
531 jfunc->value.pass_through.operation = operation;
532 jfunc->value.pass_through.agg_preserved = false;
533 }
534
535 /* Set JFUNC to be an ancestor jump function. */
536
537 static void
538 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
539 int formal_id, bool agg_preserved)
540 {
541 jfunc->type = IPA_JF_ANCESTOR;
542 jfunc->value.ancestor.formal_id = formal_id;
543 jfunc->value.ancestor.offset = offset;
544 jfunc->value.ancestor.agg_preserved = agg_preserved;
545 }
546
547 /* Get IPA BB information about the given BB. FBI is the context of analyzis
548 of this function body. */
549
550 static struct ipa_bb_info *
551 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
552 {
553 gcc_checking_assert (fbi);
554 return &fbi->bb_infos[bb->index];
555 }
556
557 /* Structure to be passed in between detect_type_change and
558 check_stmt_for_type_change. */
559
560 struct prop_type_change_info
561 {
562 /* Offset into the object where there is the virtual method pointer we are
563 looking for. */
564 HOST_WIDE_INT offset;
565 /* The declaration or SSA_NAME pointer of the base that we are checking for
566 type change. */
567 tree object;
568 /* Set to true if dynamic type change has been detected. */
569 bool type_maybe_changed;
570 };
571
572 /* Return true if STMT can modify a virtual method table pointer.
573
574 This function makes special assumptions about both constructors and
575 destructors which are all the functions that are allowed to alter the VMT
576 pointers. It assumes that destructors begin with assignment into all VMT
577 pointers and that constructors essentially look in the following way:
578
579 1) The very first thing they do is that they call constructors of ancestor
580 sub-objects that have them.
581
582 2) Then VMT pointers of this and all its ancestors is set to new values
583 corresponding to the type corresponding to the constructor.
584
585 3) Only afterwards, other stuff such as constructor of member sub-objects
586 and the code written by the user is run. Only this may include calling
587 virtual functions, directly or indirectly.
588
589 There is no way to call a constructor of an ancestor sub-object in any
590 other way.
591
592 This means that we do not have to care whether constructors get the correct
593 type information because they will always change it (in fact, if we define
594 the type to be given by the VMT pointer, it is undefined).
595
596 The most important fact to derive from the above is that if, for some
597 statement in the section 3, we try to detect whether the dynamic type has
598 changed, we can safely ignore all calls as we examine the function body
599 backwards until we reach statements in section 2 because these calls cannot
600 be ancestor constructors or destructors (if the input is not bogus) and so
601 do not change the dynamic type (this holds true only for automatically
602 allocated objects but at the moment we devirtualize only these). We then
603 must detect that statements in section 2 change the dynamic type and can try
604 to derive the new type. That is enough and we can stop, we will never see
605 the calls into constructors of sub-objects in this code. Therefore we can
606 safely ignore all call statements that we traverse.
607 */
608
609 static bool
610 stmt_may_be_vtbl_ptr_store (gimple stmt)
611 {
612 if (is_gimple_call (stmt))
613 return false;
614 if (gimple_clobber_p (stmt))
615 return false;
616 else if (is_gimple_assign (stmt))
617 {
618 tree lhs = gimple_assign_lhs (stmt);
619
620 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
621 {
622 if (flag_strict_aliasing
623 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
624 return false;
625
626 if (TREE_CODE (lhs) == COMPONENT_REF
627 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
628 return false;
629 /* In the future we might want to use get_base_ref_and_offset to find
630 if there is a field corresponding to the offset and if so, proceed
631 almost like if it was a component ref. */
632 }
633 }
634 return true;
635 }
636
637 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
638 to check whether a particular statement may modify the virtual table
639 pointerIt stores its result into DATA, which points to a
640 prop_type_change_info structure. */
641
642 static bool
643 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
644 {
645 gimple stmt = SSA_NAME_DEF_STMT (vdef);
646 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
647
648 if (stmt_may_be_vtbl_ptr_store (stmt))
649 {
650 tci->type_maybe_changed = true;
651 return true;
652 }
653 else
654 return false;
655 }
656
657 /* See if ARG is PARAM_DECl describing instance passed by pointer
658 or reference in FUNCTION. Return false if the dynamic type may change
659 in between beggining of the function until CALL is invoked.
660
661 Generally functions are not allowed to change type of such instances,
662 but they call destructors. We assume that methods can not destroy the THIS
663 pointer. Also as a special cases, constructor and destructors may change
664 type of the THIS pointer. */
665
666 static bool
667 param_type_may_change_p (tree function, tree arg, gimple call)
668 {
669 /* Pure functions can not do any changes on the dynamic type;
670 that require writting to memory. */
671 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
672 return false;
673 /* We need to check if we are within inlined consturctor
674 or destructor (ideally we would have way to check that the
675 inline cdtor is actually working on ARG, but we don't have
676 easy tie on this, so punt on all non-pure cdtors.
677 We may also record the types of cdtors and once we know type
678 of the instance match them.
679
680 Also code unification optimizations may merge calls from
681 different blocks making return values unreliable. So
682 do nothing during late optimization. */
683 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
684 return true;
685 if (TREE_CODE (arg) == SSA_NAME
686 && SSA_NAME_IS_DEFAULT_DEF (arg)
687 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
688 {
689 /* Normal (non-THIS) argument. */
690 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
691 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
692 /* THIS pointer of an method - here we we want to watch constructors
693 and destructors as those definitely may change the dynamic
694 type. */
695 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
696 && !DECL_CXX_CONSTRUCTOR_P (function)
697 && !DECL_CXX_DESTRUCTOR_P (function)
698 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
699 {
700 /* Walk the inline stack and watch out for ctors/dtors. */
701 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
702 block = BLOCK_SUPERCONTEXT (block))
703 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
704 return true;
705 return false;
706 }
707 }
708 return true;
709 }
710
711 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
712 callsite CALL) by looking for assignments to its virtual table pointer. If
713 it is, return true and fill in the jump function JFUNC with relevant type
714 information or set it to unknown. ARG is the object itself (not a pointer
715 to it, unless dereferenced). BASE is the base of the memory access as
716 returned by get_ref_base_and_extent, as is the offset.
717
718 This is helper function for detect_type_change and detect_type_change_ssa
719 that does the heavy work which is usually unnecesary. */
720
721 static bool
722 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
723 gcall *call, struct ipa_jump_func *jfunc,
724 HOST_WIDE_INT offset)
725 {
726 struct prop_type_change_info tci;
727 ao_ref ao;
728 bool entry_reached = false;
729
730 gcc_checking_assert (DECL_P (arg)
731 || TREE_CODE (arg) == MEM_REF
732 || handled_component_p (arg));
733
734 comp_type = TYPE_MAIN_VARIANT (comp_type);
735
736 /* Const calls cannot call virtual methods through VMT and so type changes do
737 not matter. */
738 if (!flag_devirtualize || !gimple_vuse (call)
739 /* Be sure expected_type is polymorphic. */
740 || !comp_type
741 || TREE_CODE (comp_type) != RECORD_TYPE
742 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
743 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
744 return true;
745
746 ao_ref_init (&ao, arg);
747 ao.base = base;
748 ao.offset = offset;
749 ao.size = POINTER_SIZE;
750 ao.max_size = ao.size;
751
752 tci.offset = offset;
753 tci.object = get_base_address (arg);
754 tci.type_maybe_changed = false;
755
756 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
757 &tci, NULL, &entry_reached);
758 if (!tci.type_maybe_changed)
759 return false;
760
761 ipa_set_jf_unknown (jfunc);
762 return true;
763 }
764
765 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
766 If it is, return true and fill in the jump function JFUNC with relevant type
767 information or set it to unknown. ARG is the object itself (not a pointer
768 to it, unless dereferenced). BASE is the base of the memory access as
769 returned by get_ref_base_and_extent, as is the offset. */
770
771 static bool
772 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
773 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
774 {
775 if (!flag_devirtualize)
776 return false;
777
778 if (TREE_CODE (base) == MEM_REF
779 && !param_type_may_change_p (current_function_decl,
780 TREE_OPERAND (base, 0),
781 call))
782 return false;
783 return detect_type_change_from_memory_writes (arg, base, comp_type,
784 call, jfunc, offset);
785 }
786
787 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
788 SSA name (its dereference will become the base and the offset is assumed to
789 be zero). */
790
791 static bool
792 detect_type_change_ssa (tree arg, tree comp_type,
793 gcall *call, struct ipa_jump_func *jfunc)
794 {
795 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
796 if (!flag_devirtualize
797 || !POINTER_TYPE_P (TREE_TYPE (arg)))
798 return false;
799
800 if (!param_type_may_change_p (current_function_decl, arg, call))
801 return false;
802
803 arg = build2 (MEM_REF, ptr_type_node, arg,
804 build_int_cst (ptr_type_node, 0));
805
806 return detect_type_change_from_memory_writes (arg, arg, comp_type,
807 call, jfunc, 0);
808 }
809
810 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
811 boolean variable pointed to by DATA. */
812
813 static bool
814 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
815 void *data)
816 {
817 bool *b = (bool *) data;
818 *b = true;
819 return true;
820 }
821
822 /* Return true if we have already walked so many statements in AA that we
823 should really just start giving up. */
824
825 static bool
826 aa_overwalked (struct func_body_info *fbi)
827 {
828 gcc_checking_assert (fbi);
829 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
830 }
831
832 /* Find the nearest valid aa status for parameter specified by INDEX that
833 dominates BB. */
834
835 static struct param_aa_status *
836 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
837 int index)
838 {
839 while (true)
840 {
841 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
842 if (!bb)
843 return NULL;
844 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
845 if (!bi->param_aa_statuses.is_empty ()
846 && bi->param_aa_statuses[index].valid)
847 return &bi->param_aa_statuses[index];
848 }
849 }
850
851 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
852 structures and/or intialize the result with a dominating description as
853 necessary. */
854
855 static struct param_aa_status *
856 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
857 int index)
858 {
859 gcc_checking_assert (fbi);
860 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
861 if (bi->param_aa_statuses.is_empty ())
862 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
863 struct param_aa_status *paa = &bi->param_aa_statuses[index];
864 if (!paa->valid)
865 {
866 gcc_checking_assert (!paa->parm_modified
867 && !paa->ref_modified
868 && !paa->pt_modified);
869 struct param_aa_status *dom_paa;
870 dom_paa = find_dominating_aa_status (fbi, bb, index);
871 if (dom_paa)
872 *paa = *dom_paa;
873 else
874 paa->valid = true;
875 }
876
877 return paa;
878 }
879
880 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
881 a value known not to be modified in this function before reaching the
882 statement STMT. FBI holds information about the function we have so far
883 gathered but do not survive the summary building stage. */
884
885 static bool
886 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
887 gimple stmt, tree parm_load)
888 {
889 struct param_aa_status *paa;
890 bool modified = false;
891 ao_ref refd;
892
893 /* FIXME: FBI can be NULL if we are being called from outside
894 ipa_node_analysis or ipcp_transform_function, which currently happens
895 during inlining analysis. It would be great to extend fbi's lifetime and
896 always have it. Currently, we are just not afraid of too much walking in
897 that case. */
898 if (fbi)
899 {
900 if (aa_overwalked (fbi))
901 return false;
902 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
903 if (paa->parm_modified)
904 return false;
905 }
906 else
907 paa = NULL;
908
909 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
910 ao_ref_init (&refd, parm_load);
911 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
912 &modified, NULL);
913 if (fbi)
914 fbi->aa_walked += walked;
915 if (paa && modified)
916 paa->parm_modified = true;
917 return !modified;
918 }
919
920 /* If STMT is an assignment that loads a value from an parameter declaration,
921 return the index of the parameter in ipa_node_params which has not been
922 modified. Otherwise return -1. */
923
924 static int
925 load_from_unmodified_param (struct func_body_info *fbi,
926 vec<ipa_param_descriptor> descriptors,
927 gimple stmt)
928 {
929 int index;
930 tree op1;
931
932 if (!gimple_assign_single_p (stmt))
933 return -1;
934
935 op1 = gimple_assign_rhs1 (stmt);
936 if (TREE_CODE (op1) != PARM_DECL)
937 return -1;
938
939 index = ipa_get_param_decl_index_1 (descriptors, op1);
940 if (index < 0
941 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
942 return -1;
943
944 return index;
945 }
946
947 /* Return true if memory reference REF (which must be a load through parameter
948 with INDEX) loads data that are known to be unmodified in this function
949 before reaching statement STMT. */
950
951 static bool
952 parm_ref_data_preserved_p (struct func_body_info *fbi,
953 int index, gimple stmt, tree ref)
954 {
955 struct param_aa_status *paa;
956 bool modified = false;
957 ao_ref refd;
958
959 /* FIXME: FBI can be NULL if we are being called from outside
960 ipa_node_analysis or ipcp_transform_function, which currently happens
961 during inlining analysis. It would be great to extend fbi's lifetime and
962 always have it. Currently, we are just not afraid of too much walking in
963 that case. */
964 if (fbi)
965 {
966 if (aa_overwalked (fbi))
967 return false;
968 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
969 if (paa->ref_modified)
970 return false;
971 }
972 else
973 paa = NULL;
974
975 gcc_checking_assert (gimple_vuse (stmt));
976 ao_ref_init (&refd, ref);
977 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
978 &modified, NULL);
979 if (fbi)
980 fbi->aa_walked += walked;
981 if (paa && modified)
982 paa->ref_modified = true;
983 return !modified;
984 }
985
986 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
987 is known to be unmodified in this function before reaching call statement
988 CALL into which it is passed. FBI describes the function body. */
989
990 static bool
991 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
992 gimple call, tree parm)
993 {
994 bool modified = false;
995 ao_ref refd;
996
997 /* It's unnecessary to calculate anything about memory contnets for a const
998 function because it is not goin to use it. But do not cache the result
999 either. Also, no such calculations for non-pointers. */
1000 if (!gimple_vuse (call)
1001 || !POINTER_TYPE_P (TREE_TYPE (parm))
1002 || aa_overwalked (fbi))
1003 return false;
1004
1005 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1006 index);
1007 if (paa->pt_modified)
1008 return false;
1009
1010 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1011 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1012 &modified, NULL);
1013 fbi->aa_walked += walked;
1014 if (modified)
1015 paa->pt_modified = true;
1016 return !modified;
1017 }
1018
1019 /* Return true if we can prove that OP is a memory reference loading unmodified
1020 data from an aggregate passed as a parameter and if the aggregate is passed
1021 by reference, that the alias type of the load corresponds to the type of the
1022 formal parameter (so that we can rely on this type for TBAA in callers).
1023 INFO and PARMS_AINFO describe parameters of the current function (but the
1024 latter can be NULL), STMT is the load statement. If function returns true,
1025 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1026 within the aggregate and whether it is a load from a value passed by
1027 reference respectively. */
1028
1029 static bool
1030 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1031 vec<ipa_param_descriptor> descriptors,
1032 gimple stmt, tree op, int *index_p,
1033 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1034 bool *by_ref_p)
1035 {
1036 int index;
1037 HOST_WIDE_INT size, max_size;
1038 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1039
1040 if (max_size == -1 || max_size != size || *offset_p < 0)
1041 return false;
1042
1043 if (DECL_P (base))
1044 {
1045 int index = ipa_get_param_decl_index_1 (descriptors, base);
1046 if (index >= 0
1047 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1048 {
1049 *index_p = index;
1050 *by_ref_p = false;
1051 if (size_p)
1052 *size_p = size;
1053 return true;
1054 }
1055 return false;
1056 }
1057
1058 if (TREE_CODE (base) != MEM_REF
1059 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1060 || !integer_zerop (TREE_OPERAND (base, 1)))
1061 return false;
1062
1063 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1064 {
1065 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1066 index = ipa_get_param_decl_index_1 (descriptors, parm);
1067 }
1068 else
1069 {
1070 /* This branch catches situations where a pointer parameter is not a
1071 gimple register, for example:
1072
1073 void hip7(S*) (struct S * p)
1074 {
1075 void (*<T2e4>) (struct S *) D.1867;
1076 struct S * p.1;
1077
1078 <bb 2>:
1079 p.1_1 = p;
1080 D.1867_2 = p.1_1->f;
1081 D.1867_2 ();
1082 gdp = &p;
1083 */
1084
1085 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1086 index = load_from_unmodified_param (fbi, descriptors, def);
1087 }
1088
1089 if (index >= 0
1090 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1091 {
1092 *index_p = index;
1093 *by_ref_p = true;
1094 if (size_p)
1095 *size_p = size;
1096 return true;
1097 }
1098 return false;
1099 }
1100
1101 /* Just like the previous function, just without the param_analysis_info
1102 pointer, for users outside of this file. */
1103
1104 bool
1105 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1106 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1107 bool *by_ref_p)
1108 {
1109 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1110 offset_p, NULL, by_ref_p);
1111 }
1112
1113 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1114 of an assignment statement STMT, try to determine whether we are actually
1115 handling any of the following cases and construct an appropriate jump
1116 function into JFUNC if so:
1117
1118 1) The passed value is loaded from a formal parameter which is not a gimple
1119 register (most probably because it is addressable, the value has to be
1120 scalar) and we can guarantee the value has not changed. This case can
1121 therefore be described by a simple pass-through jump function. For example:
1122
1123 foo (int a)
1124 {
1125 int a.0;
1126
1127 a.0_2 = a;
1128 bar (a.0_2);
1129
1130 2) The passed value can be described by a simple arithmetic pass-through
1131 jump function. E.g.
1132
1133 foo (int a)
1134 {
1135 int D.2064;
1136
1137 D.2064_4 = a.1(D) + 4;
1138 bar (D.2064_4);
1139
1140 This case can also occur in combination of the previous one, e.g.:
1141
1142 foo (int a, int z)
1143 {
1144 int a.0;
1145 int D.2064;
1146
1147 a.0_3 = a;
1148 D.2064_4 = a.0_3 + 4;
1149 foo (D.2064_4);
1150
1151 3) The passed value is an address of an object within another one (which
1152 also passed by reference). Such situations are described by an ancestor
1153 jump function and describe situations such as:
1154
1155 B::foo() (struct B * const this)
1156 {
1157 struct A * D.1845;
1158
1159 D.1845_2 = &this_1(D)->D.1748;
1160 A::bar (D.1845_2);
1161
1162 INFO is the structure describing individual parameters access different
1163 stages of IPA optimizations. PARMS_AINFO contains the information that is
1164 only needed for intraprocedural analysis. */
1165
1166 static void
1167 compute_complex_assign_jump_func (struct func_body_info *fbi,
1168 struct ipa_node_params *info,
1169 struct ipa_jump_func *jfunc,
1170 gcall *call, gimple stmt, tree name,
1171 tree param_type)
1172 {
1173 HOST_WIDE_INT offset, size, max_size;
1174 tree op1, tc_ssa, base, ssa;
1175 int index;
1176
1177 op1 = gimple_assign_rhs1 (stmt);
1178
1179 if (TREE_CODE (op1) == SSA_NAME)
1180 {
1181 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1182 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1183 else
1184 index = load_from_unmodified_param (fbi, info->descriptors,
1185 SSA_NAME_DEF_STMT (op1));
1186 tc_ssa = op1;
1187 }
1188 else
1189 {
1190 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1191 tc_ssa = gimple_assign_lhs (stmt);
1192 }
1193
1194 if (index >= 0)
1195 {
1196 tree op2 = gimple_assign_rhs2 (stmt);
1197
1198 if (op2)
1199 {
1200 if (!is_gimple_ip_invariant (op2)
1201 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1202 && !useless_type_conversion_p (TREE_TYPE (name),
1203 TREE_TYPE (op1))))
1204 return;
1205
1206 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1207 gimple_assign_rhs_code (stmt));
1208 }
1209 else if (gimple_assign_single_p (stmt))
1210 {
1211 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1212 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1213 }
1214 return;
1215 }
1216
1217 if (TREE_CODE (op1) != ADDR_EXPR)
1218 return;
1219 op1 = TREE_OPERAND (op1, 0);
1220 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1221 return;
1222 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1223 if (TREE_CODE (base) != MEM_REF
1224 /* If this is a varying address, punt. */
1225 || max_size == -1
1226 || max_size != size)
1227 return;
1228 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1229 ssa = TREE_OPERAND (base, 0);
1230 if (TREE_CODE (ssa) != SSA_NAME
1231 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1232 || offset < 0)
1233 return;
1234
1235 /* Dynamic types are changed in constructors and destructors. */
1236 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1237 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1238 ipa_set_ancestor_jf (jfunc, offset, index,
1239 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1240 }
1241
1242 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1243 it looks like:
1244
1245 iftmp.1_3 = &obj_2(D)->D.1762;
1246
1247 The base of the MEM_REF must be a default definition SSA NAME of a
1248 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1249 whole MEM_REF expression is returned and the offset calculated from any
1250 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1251 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1252
1253 static tree
1254 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1255 {
1256 HOST_WIDE_INT size, max_size;
1257 tree expr, parm, obj;
1258
1259 if (!gimple_assign_single_p (assign))
1260 return NULL_TREE;
1261 expr = gimple_assign_rhs1 (assign);
1262
1263 if (TREE_CODE (expr) != ADDR_EXPR)
1264 return NULL_TREE;
1265 expr = TREE_OPERAND (expr, 0);
1266 obj = expr;
1267 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1268
1269 if (TREE_CODE (expr) != MEM_REF
1270 /* If this is a varying address, punt. */
1271 || max_size == -1
1272 || max_size != size
1273 || *offset < 0)
1274 return NULL_TREE;
1275 parm = TREE_OPERAND (expr, 0);
1276 if (TREE_CODE (parm) != SSA_NAME
1277 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1278 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1279 return NULL_TREE;
1280
1281 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1282 *obj_p = obj;
1283 return expr;
1284 }
1285
1286
1287 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1288 statement PHI, try to find out whether NAME is in fact a
1289 multiple-inheritance typecast from a descendant into an ancestor of a formal
1290 parameter and thus can be described by an ancestor jump function and if so,
1291 write the appropriate function into JFUNC.
1292
1293 Essentially we want to match the following pattern:
1294
1295 if (obj_2(D) != 0B)
1296 goto <bb 3>;
1297 else
1298 goto <bb 4>;
1299
1300 <bb 3>:
1301 iftmp.1_3 = &obj_2(D)->D.1762;
1302
1303 <bb 4>:
1304 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1305 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1306 return D.1879_6; */
1307
1308 static void
1309 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1310 struct ipa_node_params *info,
1311 struct ipa_jump_func *jfunc,
1312 gcall *call, gphi *phi)
1313 {
1314 HOST_WIDE_INT offset;
1315 gimple assign, cond;
1316 basic_block phi_bb, assign_bb, cond_bb;
1317 tree tmp, parm, expr, obj;
1318 int index, i;
1319
1320 if (gimple_phi_num_args (phi) != 2)
1321 return;
1322
1323 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1324 tmp = PHI_ARG_DEF (phi, 0);
1325 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1326 tmp = PHI_ARG_DEF (phi, 1);
1327 else
1328 return;
1329 if (TREE_CODE (tmp) != SSA_NAME
1330 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1331 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1332 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1333 return;
1334
1335 assign = SSA_NAME_DEF_STMT (tmp);
1336 assign_bb = gimple_bb (assign);
1337 if (!single_pred_p (assign_bb))
1338 return;
1339 expr = get_ancestor_addr_info (assign, &obj, &offset);
1340 if (!expr)
1341 return;
1342 parm = TREE_OPERAND (expr, 0);
1343 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1344 if (index < 0)
1345 return;
1346
1347 cond_bb = single_pred (assign_bb);
1348 cond = last_stmt (cond_bb);
1349 if (!cond
1350 || gimple_code (cond) != GIMPLE_COND
1351 || gimple_cond_code (cond) != NE_EXPR
1352 || gimple_cond_lhs (cond) != parm
1353 || !integer_zerop (gimple_cond_rhs (cond)))
1354 return;
1355
1356 phi_bb = gimple_bb (phi);
1357 for (i = 0; i < 2; i++)
1358 {
1359 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1360 if (pred != assign_bb && pred != cond_bb)
1361 return;
1362 }
1363
1364 ipa_set_ancestor_jf (jfunc, offset, index,
1365 parm_ref_data_pass_through_p (fbi, index, call, parm));
1366 }
1367
1368 /* Inspect the given TYPE and return true iff it has the same structure (the
1369 same number of fields of the same types) as a C++ member pointer. If
1370 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1371 corresponding fields there. */
1372
1373 static bool
1374 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1375 {
1376 tree fld;
1377
1378 if (TREE_CODE (type) != RECORD_TYPE)
1379 return false;
1380
1381 fld = TYPE_FIELDS (type);
1382 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1383 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1384 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1385 return false;
1386
1387 if (method_ptr)
1388 *method_ptr = fld;
1389
1390 fld = DECL_CHAIN (fld);
1391 if (!fld || INTEGRAL_TYPE_P (fld)
1392 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1393 return false;
1394 if (delta)
1395 *delta = fld;
1396
1397 if (DECL_CHAIN (fld))
1398 return false;
1399
1400 return true;
1401 }
1402
1403 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1404 return the rhs of its defining statement. Otherwise return RHS as it
1405 is. */
1406
1407 static inline tree
1408 get_ssa_def_if_simple_copy (tree rhs)
1409 {
1410 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1411 {
1412 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1413
1414 if (gimple_assign_single_p (def_stmt))
1415 rhs = gimple_assign_rhs1 (def_stmt);
1416 else
1417 break;
1418 }
1419 return rhs;
1420 }
1421
1422 /* Simple linked list, describing known contents of an aggregate beforere
1423 call. */
1424
1425 struct ipa_known_agg_contents_list
1426 {
1427 /* Offset and size of the described part of the aggregate. */
1428 HOST_WIDE_INT offset, size;
1429 /* Known constant value or NULL if the contents is known to be unknown. */
1430 tree constant;
1431 /* Pointer to the next structure in the list. */
1432 struct ipa_known_agg_contents_list *next;
1433 };
1434
1435 /* Find the proper place in linked list of ipa_known_agg_contents_list
1436 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1437 unless there is a partial overlap, in which case return NULL, or such
1438 element is already there, in which case set *ALREADY_THERE to true. */
1439
1440 static struct ipa_known_agg_contents_list **
1441 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1442 HOST_WIDE_INT lhs_offset,
1443 HOST_WIDE_INT lhs_size,
1444 bool *already_there)
1445 {
1446 struct ipa_known_agg_contents_list **p = list;
1447 while (*p && (*p)->offset < lhs_offset)
1448 {
1449 if ((*p)->offset + (*p)->size > lhs_offset)
1450 return NULL;
1451 p = &(*p)->next;
1452 }
1453
1454 if (*p && (*p)->offset < lhs_offset + lhs_size)
1455 {
1456 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1457 /* We already know this value is subsequently overwritten with
1458 something else. */
1459 *already_there = true;
1460 else
1461 /* Otherwise this is a partial overlap which we cannot
1462 represent. */
1463 return NULL;
1464 }
1465 return p;
1466 }
1467
1468 /* Build aggregate jump function from LIST, assuming there are exactly
1469 CONST_COUNT constant entries there and that th offset of the passed argument
1470 is ARG_OFFSET and store it into JFUNC. */
1471
1472 static void
1473 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1474 int const_count, HOST_WIDE_INT arg_offset,
1475 struct ipa_jump_func *jfunc)
1476 {
1477 vec_alloc (jfunc->agg.items, const_count);
1478 while (list)
1479 {
1480 if (list->constant)
1481 {
1482 struct ipa_agg_jf_item item;
1483 item.offset = list->offset - arg_offset;
1484 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1485 item.value = unshare_expr_without_location (list->constant);
1486 jfunc->agg.items->quick_push (item);
1487 }
1488 list = list->next;
1489 }
1490 }
1491
1492 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1493 in ARG is filled in with constant values. ARG can either be an aggregate
1494 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1495 aggregate. JFUNC is the jump function into which the constants are
1496 subsequently stored. */
1497
1498 static void
1499 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1500 tree arg_type,
1501 struct ipa_jump_func *jfunc)
1502 {
1503 struct ipa_known_agg_contents_list *list = NULL;
1504 int item_count = 0, const_count = 0;
1505 HOST_WIDE_INT arg_offset, arg_size;
1506 gimple_stmt_iterator gsi;
1507 tree arg_base;
1508 bool check_ref, by_ref;
1509 ao_ref r;
1510
1511 /* The function operates in three stages. First, we prepare check_ref, r,
1512 arg_base and arg_offset based on what is actually passed as an actual
1513 argument. */
1514
1515 if (POINTER_TYPE_P (arg_type))
1516 {
1517 by_ref = true;
1518 if (TREE_CODE (arg) == SSA_NAME)
1519 {
1520 tree type_size;
1521 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1522 return;
1523 check_ref = true;
1524 arg_base = arg;
1525 arg_offset = 0;
1526 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1527 arg_size = tree_to_uhwi (type_size);
1528 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1529 }
1530 else if (TREE_CODE (arg) == ADDR_EXPR)
1531 {
1532 HOST_WIDE_INT arg_max_size;
1533
1534 arg = TREE_OPERAND (arg, 0);
1535 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1536 &arg_max_size);
1537 if (arg_max_size == -1
1538 || arg_max_size != arg_size
1539 || arg_offset < 0)
1540 return;
1541 if (DECL_P (arg_base))
1542 {
1543 check_ref = false;
1544 ao_ref_init (&r, arg_base);
1545 }
1546 else
1547 return;
1548 }
1549 else
1550 return;
1551 }
1552 else
1553 {
1554 HOST_WIDE_INT arg_max_size;
1555
1556 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1557
1558 by_ref = false;
1559 check_ref = false;
1560 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1561 &arg_max_size);
1562 if (arg_max_size == -1
1563 || arg_max_size != arg_size
1564 || arg_offset < 0)
1565 return;
1566
1567 ao_ref_init (&r, arg);
1568 }
1569
1570 /* Second stage walks back the BB, looks at individual statements and as long
1571 as it is confident of how the statements affect contents of the
1572 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1573 describing it. */
1574 gsi = gsi_for_stmt (call);
1575 gsi_prev (&gsi);
1576 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1577 {
1578 struct ipa_known_agg_contents_list *n, **p;
1579 gimple stmt = gsi_stmt (gsi);
1580 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1581 tree lhs, rhs, lhs_base;
1582
1583 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1584 continue;
1585 if (!gimple_assign_single_p (stmt))
1586 break;
1587
1588 lhs = gimple_assign_lhs (stmt);
1589 rhs = gimple_assign_rhs1 (stmt);
1590 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1591 || TREE_CODE (lhs) == BIT_FIELD_REF
1592 || contains_bitfld_component_ref_p (lhs))
1593 break;
1594
1595 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1596 &lhs_max_size);
1597 if (lhs_max_size == -1
1598 || lhs_max_size != lhs_size)
1599 break;
1600
1601 if (check_ref)
1602 {
1603 if (TREE_CODE (lhs_base) != MEM_REF
1604 || TREE_OPERAND (lhs_base, 0) != arg_base
1605 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1606 break;
1607 }
1608 else if (lhs_base != arg_base)
1609 {
1610 if (DECL_P (lhs_base))
1611 continue;
1612 else
1613 break;
1614 }
1615
1616 bool already_there = false;
1617 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1618 &already_there);
1619 if (!p)
1620 break;
1621 if (already_there)
1622 continue;
1623
1624 rhs = get_ssa_def_if_simple_copy (rhs);
1625 n = XALLOCA (struct ipa_known_agg_contents_list);
1626 n->size = lhs_size;
1627 n->offset = lhs_offset;
1628 if (is_gimple_ip_invariant (rhs))
1629 {
1630 n->constant = rhs;
1631 const_count++;
1632 }
1633 else
1634 n->constant = NULL_TREE;
1635 n->next = *p;
1636 *p = n;
1637
1638 item_count++;
1639 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1640 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1641 break;
1642 }
1643
1644 /* Third stage just goes over the list and creates an appropriate vector of
1645 ipa_agg_jf_item structures out of it, of sourse only if there are
1646 any known constants to begin with. */
1647
1648 if (const_count)
1649 {
1650 jfunc->agg.by_ref = by_ref;
1651 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1652 }
1653 }
1654
1655 static tree
1656 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1657 {
1658 int n;
1659 tree type = (e->callee
1660 ? TREE_TYPE (e->callee->decl)
1661 : gimple_call_fntype (e->call_stmt));
1662 tree t = TYPE_ARG_TYPES (type);
1663
1664 for (n = 0; n < i; n++)
1665 {
1666 if (!t)
1667 break;
1668 t = TREE_CHAIN (t);
1669 }
1670 if (t)
1671 return TREE_VALUE (t);
1672 if (!e->callee)
1673 return NULL;
1674 t = DECL_ARGUMENTS (e->callee->decl);
1675 for (n = 0; n < i; n++)
1676 {
1677 if (!t)
1678 return NULL;
1679 t = TREE_CHAIN (t);
1680 }
1681 if (t)
1682 return TREE_TYPE (t);
1683 return NULL;
1684 }
1685
1686 /* Compute jump function for all arguments of callsite CS and insert the
1687 information in the jump_functions array in the ipa_edge_args corresponding
1688 to this callsite. */
1689
1690 static void
1691 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1692 struct cgraph_edge *cs)
1693 {
1694 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1695 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1696 gcall *call = cs->call_stmt;
1697 int n, arg_num = gimple_call_num_args (call);
1698 bool useful_context = false;
1699
1700 if (arg_num == 0 || args->jump_functions)
1701 return;
1702 vec_safe_grow_cleared (args->jump_functions, arg_num);
1703 if (flag_devirtualize)
1704 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1705
1706 if (gimple_call_internal_p (call))
1707 return;
1708 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1709 return;
1710
1711 for (n = 0; n < arg_num; n++)
1712 {
1713 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1714 tree arg = gimple_call_arg (call, n);
1715 tree param_type = ipa_get_callee_param_type (cs, n);
1716 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1717 {
1718 tree instance;
1719 struct ipa_polymorphic_call_context context (cs->caller->decl,
1720 arg, cs->call_stmt,
1721 &instance);
1722 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1723 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1724 if (!context.useless_p ())
1725 useful_context = true;
1726 }
1727
1728 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1729 {
1730 unsigned HOST_WIDE_INT hwi_bitpos;
1731 unsigned align;
1732
1733 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1734 && align % BITS_PER_UNIT == 0
1735 && hwi_bitpos % BITS_PER_UNIT == 0)
1736 {
1737 jfunc->alignment.known = true;
1738 jfunc->alignment.align = align / BITS_PER_UNIT;
1739 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1740 }
1741 else
1742 gcc_assert (!jfunc->alignment.known);
1743 }
1744 else
1745 gcc_assert (!jfunc->alignment.known);
1746
1747 if (is_gimple_ip_invariant (arg))
1748 ipa_set_jf_constant (jfunc, arg, cs);
1749 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1750 && TREE_CODE (arg) == PARM_DECL)
1751 {
1752 int index = ipa_get_param_decl_index (info, arg);
1753
1754 gcc_assert (index >=0);
1755 /* Aggregate passed by value, check for pass-through, otherwise we
1756 will attempt to fill in aggregate contents later in this
1757 for cycle. */
1758 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1759 {
1760 ipa_set_jf_simple_pass_through (jfunc, index, false);
1761 continue;
1762 }
1763 }
1764 else if (TREE_CODE (arg) == SSA_NAME)
1765 {
1766 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1767 {
1768 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1769 if (index >= 0)
1770 {
1771 bool agg_p;
1772 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1773 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1774 }
1775 }
1776 else
1777 {
1778 gimple stmt = SSA_NAME_DEF_STMT (arg);
1779 if (is_gimple_assign (stmt))
1780 compute_complex_assign_jump_func (fbi, info, jfunc,
1781 call, stmt, arg, param_type);
1782 else if (gimple_code (stmt) == GIMPLE_PHI)
1783 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1784 call,
1785 as_a <gphi *> (stmt));
1786 }
1787 }
1788
1789 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1790 passed (because type conversions are ignored in gimple). Usually we can
1791 safely get type from function declaration, but in case of K&R prototypes or
1792 variadic functions we can try our luck with type of the pointer passed.
1793 TODO: Since we look for actual initialization of the memory object, we may better
1794 work out the type based on the memory stores we find. */
1795 if (!param_type)
1796 param_type = TREE_TYPE (arg);
1797
1798 if ((jfunc->type != IPA_JF_PASS_THROUGH
1799 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1800 && (jfunc->type != IPA_JF_ANCESTOR
1801 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1802 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1803 || POINTER_TYPE_P (param_type)))
1804 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1805 }
1806 if (!useful_context)
1807 vec_free (args->polymorphic_call_contexts);
1808 }
1809
1810 /* Compute jump functions for all edges - both direct and indirect - outgoing
1811 from BB. */
1812
1813 static void
1814 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1815 {
1816 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1817 int i;
1818 struct cgraph_edge *cs;
1819
1820 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1821 {
1822 struct cgraph_node *callee = cs->callee;
1823
1824 if (callee)
1825 {
1826 callee->ultimate_alias_target ();
1827 /* We do not need to bother analyzing calls to unknown functions
1828 unless they may become known during lto/whopr. */
1829 if (!callee->definition && !flag_lto)
1830 continue;
1831 }
1832 ipa_compute_jump_functions_for_edge (fbi, cs);
1833 }
1834 }
1835
1836 /* If STMT looks like a statement loading a value from a member pointer formal
1837 parameter, return that parameter and store the offset of the field to
1838 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1839 might be clobbered). If USE_DELTA, then we look for a use of the delta
1840 field rather than the pfn. */
1841
1842 static tree
1843 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1844 HOST_WIDE_INT *offset_p)
1845 {
1846 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1847
1848 if (!gimple_assign_single_p (stmt))
1849 return NULL_TREE;
1850
1851 rhs = gimple_assign_rhs1 (stmt);
1852 if (TREE_CODE (rhs) == COMPONENT_REF)
1853 {
1854 ref_field = TREE_OPERAND (rhs, 1);
1855 rhs = TREE_OPERAND (rhs, 0);
1856 }
1857 else
1858 ref_field = NULL_TREE;
1859 if (TREE_CODE (rhs) != MEM_REF)
1860 return NULL_TREE;
1861 rec = TREE_OPERAND (rhs, 0);
1862 if (TREE_CODE (rec) != ADDR_EXPR)
1863 return NULL_TREE;
1864 rec = TREE_OPERAND (rec, 0);
1865 if (TREE_CODE (rec) != PARM_DECL
1866 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1867 return NULL_TREE;
1868 ref_offset = TREE_OPERAND (rhs, 1);
1869
1870 if (use_delta)
1871 fld = delta_field;
1872 else
1873 fld = ptr_field;
1874 if (offset_p)
1875 *offset_p = int_bit_position (fld);
1876
1877 if (ref_field)
1878 {
1879 if (integer_nonzerop (ref_offset))
1880 return NULL_TREE;
1881 return ref_field == fld ? rec : NULL_TREE;
1882 }
1883 else
1884 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1885 : NULL_TREE;
1886 }
1887
1888 /* Returns true iff T is an SSA_NAME defined by a statement. */
1889
1890 static bool
1891 ipa_is_ssa_with_stmt_def (tree t)
1892 {
1893 if (TREE_CODE (t) == SSA_NAME
1894 && !SSA_NAME_IS_DEFAULT_DEF (t))
1895 return true;
1896 else
1897 return false;
1898 }
1899
1900 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1901 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1902 indirect call graph edge. */
1903
1904 static struct cgraph_edge *
1905 ipa_note_param_call (struct cgraph_node *node, int param_index,
1906 gcall *stmt)
1907 {
1908 struct cgraph_edge *cs;
1909
1910 cs = node->get_edge (stmt);
1911 cs->indirect_info->param_index = param_index;
1912 cs->indirect_info->agg_contents = 0;
1913 cs->indirect_info->member_ptr = 0;
1914 return cs;
1915 }
1916
1917 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1918 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1919 intermediate information about each formal parameter. Currently it checks
1920 whether the call calls a pointer that is a formal parameter and if so, the
1921 parameter is marked with the called flag and an indirect call graph edge
1922 describing the call is created. This is very simple for ordinary pointers
1923 represented in SSA but not-so-nice when it comes to member pointers. The
1924 ugly part of this function does nothing more than trying to match the
1925 pattern of such a call. An example of such a pattern is the gimple dump
1926 below, the call is on the last line:
1927
1928 <bb 2>:
1929 f$__delta_5 = f.__delta;
1930 f$__pfn_24 = f.__pfn;
1931
1932 or
1933 <bb 2>:
1934 f$__delta_5 = MEM[(struct *)&f];
1935 f$__pfn_24 = MEM[(struct *)&f + 4B];
1936
1937 and a few lines below:
1938
1939 <bb 5>
1940 D.2496_3 = (int) f$__pfn_24;
1941 D.2497_4 = D.2496_3 & 1;
1942 if (D.2497_4 != 0)
1943 goto <bb 3>;
1944 else
1945 goto <bb 4>;
1946
1947 <bb 6>:
1948 D.2500_7 = (unsigned int) f$__delta_5;
1949 D.2501_8 = &S + D.2500_7;
1950 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1951 D.2503_10 = *D.2502_9;
1952 D.2504_12 = f$__pfn_24 + -1;
1953 D.2505_13 = (unsigned int) D.2504_12;
1954 D.2506_14 = D.2503_10 + D.2505_13;
1955 D.2507_15 = *D.2506_14;
1956 iftmp.11_16 = (String:: *) D.2507_15;
1957
1958 <bb 7>:
1959 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1960 D.2500_19 = (unsigned int) f$__delta_5;
1961 D.2508_20 = &S + D.2500_19;
1962 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1963
1964 Such patterns are results of simple calls to a member pointer:
1965
1966 int doprinting (int (MyString::* f)(int) const)
1967 {
1968 MyString S ("somestring");
1969
1970 return (S.*f)(4);
1971 }
1972
1973 Moreover, the function also looks for called pointers loaded from aggregates
1974 passed by value or reference. */
1975
1976 static void
1977 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1978 tree target)
1979 {
1980 struct ipa_node_params *info = fbi->info;
1981 HOST_WIDE_INT offset;
1982 bool by_ref;
1983
1984 if (SSA_NAME_IS_DEFAULT_DEF (target))
1985 {
1986 tree var = SSA_NAME_VAR (target);
1987 int index = ipa_get_param_decl_index (info, var);
1988 if (index >= 0)
1989 ipa_note_param_call (fbi->node, index, call);
1990 return;
1991 }
1992
1993 int index;
1994 gimple def = SSA_NAME_DEF_STMT (target);
1995 if (gimple_assign_single_p (def)
1996 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
1997 gimple_assign_rhs1 (def), &index, &offset,
1998 NULL, &by_ref))
1999 {
2000 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2001 cs->indirect_info->offset = offset;
2002 cs->indirect_info->agg_contents = 1;
2003 cs->indirect_info->by_ref = by_ref;
2004 return;
2005 }
2006
2007 /* Now we need to try to match the complex pattern of calling a member
2008 pointer. */
2009 if (gimple_code (def) != GIMPLE_PHI
2010 || gimple_phi_num_args (def) != 2
2011 || !POINTER_TYPE_P (TREE_TYPE (target))
2012 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2013 return;
2014
2015 /* First, we need to check whether one of these is a load from a member
2016 pointer that is a parameter to this function. */
2017 tree n1 = PHI_ARG_DEF (def, 0);
2018 tree n2 = PHI_ARG_DEF (def, 1);
2019 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2020 return;
2021 gimple d1 = SSA_NAME_DEF_STMT (n1);
2022 gimple d2 = SSA_NAME_DEF_STMT (n2);
2023
2024 tree rec;
2025 basic_block bb, virt_bb;
2026 basic_block join = gimple_bb (def);
2027 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2028 {
2029 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2030 return;
2031
2032 bb = EDGE_PRED (join, 0)->src;
2033 virt_bb = gimple_bb (d2);
2034 }
2035 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2036 {
2037 bb = EDGE_PRED (join, 1)->src;
2038 virt_bb = gimple_bb (d1);
2039 }
2040 else
2041 return;
2042
2043 /* Second, we need to check that the basic blocks are laid out in the way
2044 corresponding to the pattern. */
2045
2046 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2047 || single_pred (virt_bb) != bb
2048 || single_succ (virt_bb) != join)
2049 return;
2050
2051 /* Third, let's see that the branching is done depending on the least
2052 significant bit of the pfn. */
2053
2054 gimple branch = last_stmt (bb);
2055 if (!branch || gimple_code (branch) != GIMPLE_COND)
2056 return;
2057
2058 if ((gimple_cond_code (branch) != NE_EXPR
2059 && gimple_cond_code (branch) != EQ_EXPR)
2060 || !integer_zerop (gimple_cond_rhs (branch)))
2061 return;
2062
2063 tree cond = gimple_cond_lhs (branch);
2064 if (!ipa_is_ssa_with_stmt_def (cond))
2065 return;
2066
2067 def = SSA_NAME_DEF_STMT (cond);
2068 if (!is_gimple_assign (def)
2069 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2070 || !integer_onep (gimple_assign_rhs2 (def)))
2071 return;
2072
2073 cond = gimple_assign_rhs1 (def);
2074 if (!ipa_is_ssa_with_stmt_def (cond))
2075 return;
2076
2077 def = SSA_NAME_DEF_STMT (cond);
2078
2079 if (is_gimple_assign (def)
2080 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2081 {
2082 cond = gimple_assign_rhs1 (def);
2083 if (!ipa_is_ssa_with_stmt_def (cond))
2084 return;
2085 def = SSA_NAME_DEF_STMT (cond);
2086 }
2087
2088 tree rec2;
2089 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2090 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2091 == ptrmemfunc_vbit_in_delta),
2092 NULL);
2093 if (rec != rec2)
2094 return;
2095
2096 index = ipa_get_param_decl_index (info, rec);
2097 if (index >= 0
2098 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2099 {
2100 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2101 cs->indirect_info->offset = offset;
2102 cs->indirect_info->agg_contents = 1;
2103 cs->indirect_info->member_ptr = 1;
2104 }
2105
2106 return;
2107 }
2108
2109 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2110 object referenced in the expression is a formal parameter of the caller
2111 FBI->node (described by FBI->info), create a call note for the
2112 statement. */
2113
2114 static void
2115 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2116 gcall *call, tree target)
2117 {
2118 tree obj = OBJ_TYPE_REF_OBJECT (target);
2119 int index;
2120 HOST_WIDE_INT anc_offset;
2121
2122 if (!flag_devirtualize)
2123 return;
2124
2125 if (TREE_CODE (obj) != SSA_NAME)
2126 return;
2127
2128 struct ipa_node_params *info = fbi->info;
2129 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2130 {
2131 struct ipa_jump_func jfunc;
2132 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2133 return;
2134
2135 anc_offset = 0;
2136 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2137 gcc_assert (index >= 0);
2138 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2139 call, &jfunc))
2140 return;
2141 }
2142 else
2143 {
2144 struct ipa_jump_func jfunc;
2145 gimple stmt = SSA_NAME_DEF_STMT (obj);
2146 tree expr;
2147
2148 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2149 if (!expr)
2150 return;
2151 index = ipa_get_param_decl_index (info,
2152 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2153 gcc_assert (index >= 0);
2154 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2155 call, &jfunc, anc_offset))
2156 return;
2157 }
2158
2159 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2160 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2161 ii->offset = anc_offset;
2162 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2163 ii->otr_type = obj_type_ref_class (target);
2164 ii->polymorphic = 1;
2165 }
2166
2167 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2168 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2169 containing intermediate information about each formal parameter. */
2170
2171 static void
2172 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2173 {
2174 tree target = gimple_call_fn (call);
2175
2176 if (!target
2177 || (TREE_CODE (target) != SSA_NAME
2178 && !virtual_method_call_p (target)))
2179 return;
2180
2181 struct cgraph_edge *cs = fbi->node->get_edge (call);
2182 /* If we previously turned the call into a direct call, there is
2183 no need to analyze. */
2184 if (cs && !cs->indirect_unknown_callee)
2185 return;
2186
2187 if (cs->indirect_info->polymorphic && flag_devirtualize)
2188 {
2189 tree instance;
2190 tree target = gimple_call_fn (call);
2191 ipa_polymorphic_call_context context (current_function_decl,
2192 target, call, &instance);
2193
2194 gcc_checking_assert (cs->indirect_info->otr_type
2195 == obj_type_ref_class (target));
2196 gcc_checking_assert (cs->indirect_info->otr_token
2197 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2198
2199 cs->indirect_info->vptr_changed
2200 = !context.get_dynamic_type (instance,
2201 OBJ_TYPE_REF_OBJECT (target),
2202 obj_type_ref_class (target), call);
2203 cs->indirect_info->context = context;
2204 }
2205
2206 if (TREE_CODE (target) == SSA_NAME)
2207 ipa_analyze_indirect_call_uses (fbi, call, target);
2208 else if (virtual_method_call_p (target))
2209 ipa_analyze_virtual_call_uses (fbi, call, target);
2210 }
2211
2212
2213 /* Analyze the call statement STMT with respect to formal parameters (described
2214 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2215 formal parameters are called. */
2216
2217 static void
2218 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2219 {
2220 if (is_gimple_call (stmt))
2221 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2222 }
2223
2224 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2225 If OP is a parameter declaration, mark it as used in the info structure
2226 passed in DATA. */
2227
2228 static bool
2229 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2230 {
2231 struct ipa_node_params *info = (struct ipa_node_params *) data;
2232
2233 op = get_base_address (op);
2234 if (op
2235 && TREE_CODE (op) == PARM_DECL)
2236 {
2237 int index = ipa_get_param_decl_index (info, op);
2238 gcc_assert (index >= 0);
2239 ipa_set_param_used (info, index, true);
2240 }
2241
2242 return false;
2243 }
2244
2245 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2246 the findings in various structures of the associated ipa_node_params
2247 structure, such as parameter flags, notes etc. FBI holds various data about
2248 the function being analyzed. */
2249
2250 static void
2251 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2252 {
2253 gimple_stmt_iterator gsi;
2254 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2255 {
2256 gimple stmt = gsi_stmt (gsi);
2257
2258 if (is_gimple_debug (stmt))
2259 continue;
2260
2261 ipa_analyze_stmt_uses (fbi, stmt);
2262 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2263 visit_ref_for_mod_analysis,
2264 visit_ref_for_mod_analysis,
2265 visit_ref_for_mod_analysis);
2266 }
2267 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2268 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2269 visit_ref_for_mod_analysis,
2270 visit_ref_for_mod_analysis,
2271 visit_ref_for_mod_analysis);
2272 }
2273
2274 /* Calculate controlled uses of parameters of NODE. */
2275
2276 static void
2277 ipa_analyze_controlled_uses (struct cgraph_node *node)
2278 {
2279 struct ipa_node_params *info = IPA_NODE_REF (node);
2280
2281 for (int i = 0; i < ipa_get_param_count (info); i++)
2282 {
2283 tree parm = ipa_get_param (info, i);
2284 int controlled_uses = 0;
2285
2286 /* For SSA regs see if parameter is used. For non-SSA we compute
2287 the flag during modification analysis. */
2288 if (is_gimple_reg (parm))
2289 {
2290 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2291 parm);
2292 if (ddef && !has_zero_uses (ddef))
2293 {
2294 imm_use_iterator imm_iter;
2295 use_operand_p use_p;
2296
2297 ipa_set_param_used (info, i, true);
2298 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2299 if (!is_gimple_call (USE_STMT (use_p)))
2300 {
2301 if (!is_gimple_debug (USE_STMT (use_p)))
2302 {
2303 controlled_uses = IPA_UNDESCRIBED_USE;
2304 break;
2305 }
2306 }
2307 else
2308 controlled_uses++;
2309 }
2310 else
2311 controlled_uses = 0;
2312 }
2313 else
2314 controlled_uses = IPA_UNDESCRIBED_USE;
2315 ipa_set_controlled_uses (info, i, controlled_uses);
2316 }
2317 }
2318
2319 /* Free stuff in BI. */
2320
2321 static void
2322 free_ipa_bb_info (struct ipa_bb_info *bi)
2323 {
2324 bi->cg_edges.release ();
2325 bi->param_aa_statuses.release ();
2326 }
2327
2328 /* Dominator walker driving the analysis. */
2329
2330 class analysis_dom_walker : public dom_walker
2331 {
2332 public:
2333 analysis_dom_walker (struct func_body_info *fbi)
2334 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2335
2336 virtual void before_dom_children (basic_block);
2337
2338 private:
2339 struct func_body_info *m_fbi;
2340 };
2341
2342 void
2343 analysis_dom_walker::before_dom_children (basic_block bb)
2344 {
2345 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2346 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2347 }
2348
2349 /* Initialize the array describing properties of of formal parameters
2350 of NODE, analyze their uses and compute jump functions associated
2351 with actual arguments of calls from within NODE. */
2352
2353 void
2354 ipa_analyze_node (struct cgraph_node *node)
2355 {
2356 struct func_body_info fbi;
2357 struct ipa_node_params *info;
2358
2359 ipa_check_create_node_params ();
2360 ipa_check_create_edge_args ();
2361 info = IPA_NODE_REF (node);
2362
2363 if (info->analysis_done)
2364 return;
2365 info->analysis_done = 1;
2366
2367 if (ipa_func_spec_opts_forbid_analysis_p (node))
2368 {
2369 for (int i = 0; i < ipa_get_param_count (info); i++)
2370 {
2371 ipa_set_param_used (info, i, true);
2372 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2373 }
2374 return;
2375 }
2376
2377 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2378 push_cfun (func);
2379 calculate_dominance_info (CDI_DOMINATORS);
2380 ipa_initialize_node_params (node);
2381 ipa_analyze_controlled_uses (node);
2382
2383 fbi.node = node;
2384 fbi.info = IPA_NODE_REF (node);
2385 fbi.bb_infos = vNULL;
2386 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2387 fbi.param_count = ipa_get_param_count (info);
2388 fbi.aa_walked = 0;
2389
2390 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2391 {
2392 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2393 bi->cg_edges.safe_push (cs);
2394 }
2395
2396 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2397 {
2398 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2399 bi->cg_edges.safe_push (cs);
2400 }
2401
2402 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2403
2404 int i;
2405 struct ipa_bb_info *bi;
2406 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2407 free_ipa_bb_info (bi);
2408 fbi.bb_infos.release ();
2409 free_dominance_info (CDI_DOMINATORS);
2410 pop_cfun ();
2411 }
2412
2413 /* Update the jump functions associated with call graph edge E when the call
2414 graph edge CS is being inlined, assuming that E->caller is already (possibly
2415 indirectly) inlined into CS->callee and that E has not been inlined. */
2416
2417 static void
2418 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2419 struct cgraph_edge *e)
2420 {
2421 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2422 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2423 int count = ipa_get_cs_argument_count (args);
2424 int i;
2425
2426 for (i = 0; i < count; i++)
2427 {
2428 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2429 struct ipa_polymorphic_call_context *dst_ctx
2430 = ipa_get_ith_polymorhic_call_context (args, i);
2431
2432 if (dst->type == IPA_JF_ANCESTOR)
2433 {
2434 struct ipa_jump_func *src;
2435 int dst_fid = dst->value.ancestor.formal_id;
2436 struct ipa_polymorphic_call_context *src_ctx
2437 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2438
2439 /* Variable number of arguments can cause havoc if we try to access
2440 one that does not exist in the inlined edge. So make sure we
2441 don't. */
2442 if (dst_fid >= ipa_get_cs_argument_count (top))
2443 {
2444 ipa_set_jf_unknown (dst);
2445 continue;
2446 }
2447
2448 src = ipa_get_ith_jump_func (top, dst_fid);
2449
2450 if (src_ctx && !src_ctx->useless_p ())
2451 {
2452 struct ipa_polymorphic_call_context ctx = *src_ctx;
2453
2454 /* TODO: Make type preserved safe WRT contexts. */
2455 if (!ipa_get_jf_ancestor_type_preserved (dst))
2456 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2457 ctx.offset_by (dst->value.ancestor.offset);
2458 if (!ctx.useless_p ())
2459 {
2460 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2461 count);
2462 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2463 }
2464 dst_ctx->combine_with (ctx);
2465 }
2466
2467 if (src->agg.items
2468 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2469 {
2470 struct ipa_agg_jf_item *item;
2471 int j;
2472
2473 /* Currently we do not produce clobber aggregate jump functions,
2474 replace with merging when we do. */
2475 gcc_assert (!dst->agg.items);
2476
2477 dst->agg.items = vec_safe_copy (src->agg.items);
2478 dst->agg.by_ref = src->agg.by_ref;
2479 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2480 item->offset -= dst->value.ancestor.offset;
2481 }
2482
2483 if (src->type == IPA_JF_PASS_THROUGH
2484 && src->value.pass_through.operation == NOP_EXPR)
2485 {
2486 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2487 dst->value.ancestor.agg_preserved &=
2488 src->value.pass_through.agg_preserved;
2489 }
2490 else if (src->type == IPA_JF_ANCESTOR)
2491 {
2492 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2493 dst->value.ancestor.offset += src->value.ancestor.offset;
2494 dst->value.ancestor.agg_preserved &=
2495 src->value.ancestor.agg_preserved;
2496 }
2497 else
2498 ipa_set_jf_unknown (dst);
2499 }
2500 else if (dst->type == IPA_JF_PASS_THROUGH)
2501 {
2502 struct ipa_jump_func *src;
2503 /* We must check range due to calls with variable number of arguments
2504 and we cannot combine jump functions with operations. */
2505 if (dst->value.pass_through.operation == NOP_EXPR
2506 && (dst->value.pass_through.formal_id
2507 < ipa_get_cs_argument_count (top)))
2508 {
2509 int dst_fid = dst->value.pass_through.formal_id;
2510 src = ipa_get_ith_jump_func (top, dst_fid);
2511 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2512 struct ipa_polymorphic_call_context *src_ctx
2513 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2514
2515 if (src_ctx && !src_ctx->useless_p ())
2516 {
2517 struct ipa_polymorphic_call_context ctx = *src_ctx;
2518
2519 /* TODO: Make type preserved safe WRT contexts. */
2520 if (!ipa_get_jf_pass_through_type_preserved (dst))
2521 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2522 if (!ctx.useless_p ())
2523 {
2524 if (!dst_ctx)
2525 {
2526 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2527 count);
2528 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2529 }
2530 dst_ctx->combine_with (ctx);
2531 }
2532 }
2533 switch (src->type)
2534 {
2535 case IPA_JF_UNKNOWN:
2536 ipa_set_jf_unknown (dst);
2537 break;
2538 case IPA_JF_CONST:
2539 ipa_set_jf_cst_copy (dst, src);
2540 break;
2541
2542 case IPA_JF_PASS_THROUGH:
2543 {
2544 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2545 enum tree_code operation;
2546 operation = ipa_get_jf_pass_through_operation (src);
2547
2548 if (operation == NOP_EXPR)
2549 {
2550 bool agg_p;
2551 agg_p = dst_agg_p
2552 && ipa_get_jf_pass_through_agg_preserved (src);
2553 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2554 }
2555 else
2556 {
2557 tree operand = ipa_get_jf_pass_through_operand (src);
2558 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2559 operation);
2560 }
2561 break;
2562 }
2563 case IPA_JF_ANCESTOR:
2564 {
2565 bool agg_p;
2566 agg_p = dst_agg_p
2567 && ipa_get_jf_ancestor_agg_preserved (src);
2568 ipa_set_ancestor_jf (dst,
2569 ipa_get_jf_ancestor_offset (src),
2570 ipa_get_jf_ancestor_formal_id (src),
2571 agg_p);
2572 break;
2573 }
2574 default:
2575 gcc_unreachable ();
2576 }
2577
2578 if (src->agg.items
2579 && (dst_agg_p || !src->agg.by_ref))
2580 {
2581 /* Currently we do not produce clobber aggregate jump
2582 functions, replace with merging when we do. */
2583 gcc_assert (!dst->agg.items);
2584
2585 dst->agg.by_ref = src->agg.by_ref;
2586 dst->agg.items = vec_safe_copy (src->agg.items);
2587 }
2588 }
2589 else
2590 ipa_set_jf_unknown (dst);
2591 }
2592 }
2593 }
2594
2595 /* If TARGET is an addr_expr of a function declaration, make it the
2596 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2597 Otherwise, return NULL. */
2598
2599 struct cgraph_edge *
2600 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2601 bool speculative)
2602 {
2603 struct cgraph_node *callee;
2604 struct inline_edge_summary *es = inline_edge_summary (ie);
2605 bool unreachable = false;
2606
2607 if (TREE_CODE (target) == ADDR_EXPR)
2608 target = TREE_OPERAND (target, 0);
2609 if (TREE_CODE (target) != FUNCTION_DECL)
2610 {
2611 target = canonicalize_constructor_val (target, NULL);
2612 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2613 {
2614 /* Member pointer call that goes through a VMT lookup. */
2615 if (ie->indirect_info->member_ptr
2616 /* Or if target is not an invariant expression and we do not
2617 know if it will evaulate to function at runtime.
2618 This can happen when folding through &VAR, where &VAR
2619 is IP invariant, but VAR itself is not.
2620
2621 TODO: Revisit this when GCC 5 is branched. It seems that
2622 member_ptr check is not needed and that we may try to fold
2623 the expression and see if VAR is readonly. */
2624 || !is_gimple_ip_invariant (target))
2625 {
2626 if (dump_enabled_p ())
2627 {
2628 location_t loc = gimple_location_safe (ie->call_stmt);
2629 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2630 "discovered direct call non-invariant "
2631 "%s/%i\n",
2632 ie->caller->name (), ie->caller->order);
2633 }
2634 return NULL;
2635 }
2636
2637
2638 if (dump_enabled_p ())
2639 {
2640 location_t loc = gimple_location_safe (ie->call_stmt);
2641 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2642 "discovered direct call to non-function in %s/%i, "
2643 "making it __builtin_unreachable\n",
2644 ie->caller->name (), ie->caller->order);
2645 }
2646
2647 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2648 callee = cgraph_node::get_create (target);
2649 unreachable = true;
2650 }
2651 else
2652 callee = cgraph_node::get (target);
2653 }
2654 else
2655 callee = cgraph_node::get (target);
2656
2657 /* Because may-edges are not explicitely represented and vtable may be external,
2658 we may create the first reference to the object in the unit. */
2659 if (!callee || callee->global.inlined_to)
2660 {
2661
2662 /* We are better to ensure we can refer to it.
2663 In the case of static functions we are out of luck, since we already
2664 removed its body. In the case of public functions we may or may
2665 not introduce the reference. */
2666 if (!canonicalize_constructor_val (target, NULL)
2667 || !TREE_PUBLIC (target))
2668 {
2669 if (dump_file)
2670 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2671 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2672 xstrdup_for_dump (ie->caller->name ()),
2673 ie->caller->order,
2674 xstrdup_for_dump (ie->callee->name ()),
2675 ie->callee->order);
2676 return NULL;
2677 }
2678 callee = cgraph_node::get_create (target);
2679 }
2680
2681 /* If the edge is already speculated. */
2682 if (speculative && ie->speculative)
2683 {
2684 struct cgraph_edge *e2;
2685 struct ipa_ref *ref;
2686 ie->speculative_call_info (e2, ie, ref);
2687 if (e2->callee->ultimate_alias_target ()
2688 != callee->ultimate_alias_target ())
2689 {
2690 if (dump_file)
2691 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2692 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2693 xstrdup_for_dump (ie->caller->name ()),
2694 ie->caller->order,
2695 xstrdup_for_dump (callee->name ()),
2696 callee->order,
2697 xstrdup_for_dump (e2->callee->name ()),
2698 e2->callee->order);
2699 }
2700 else
2701 {
2702 if (dump_file)
2703 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2704 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2705 xstrdup_for_dump (ie->caller->name ()),
2706 ie->caller->order,
2707 xstrdup_for_dump (callee->name ()),
2708 callee->order);
2709 }
2710 return NULL;
2711 }
2712
2713 if (!dbg_cnt (devirt))
2714 return NULL;
2715
2716 ipa_check_create_node_params ();
2717
2718 /* We can not make edges to inline clones. It is bug that someone removed
2719 the cgraph node too early. */
2720 gcc_assert (!callee->global.inlined_to);
2721
2722 if (dump_file && !unreachable)
2723 {
2724 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2725 "(%s/%i -> %s/%i), for stmt ",
2726 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2727 speculative ? "speculative" : "known",
2728 xstrdup_for_dump (ie->caller->name ()),
2729 ie->caller->order,
2730 xstrdup_for_dump (callee->name ()),
2731 callee->order);
2732 if (ie->call_stmt)
2733 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2734 else
2735 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2736 }
2737 if (dump_enabled_p ())
2738 {
2739 location_t loc = gimple_location_safe (ie->call_stmt);
2740
2741 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2742 "converting indirect call in %s to direct call to %s\n",
2743 ie->caller->name (), callee->name ());
2744 }
2745 if (!speculative)
2746 {
2747 struct cgraph_edge *orig = ie;
2748 ie = ie->make_direct (callee);
2749 /* If we resolved speculative edge the cost is already up to date
2750 for direct call (adjusted by inline_edge_duplication_hook). */
2751 if (ie == orig)
2752 {
2753 es = inline_edge_summary (ie);
2754 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2755 - eni_size_weights.call_cost);
2756 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2757 - eni_time_weights.call_cost);
2758 }
2759 }
2760 else
2761 {
2762 if (!callee->can_be_discarded_p ())
2763 {
2764 cgraph_node *alias;
2765 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2766 if (alias)
2767 callee = alias;
2768 }
2769 /* make_speculative will update ie's cost to direct call cost. */
2770 ie = ie->make_speculative
2771 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2772 }
2773
2774 return ie;
2775 }
2776
2777 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2778 return NULL if there is not any. BY_REF specifies whether the value has to
2779 be passed by reference or by value. */
2780
2781 tree
2782 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2783 HOST_WIDE_INT offset, bool by_ref)
2784 {
2785 struct ipa_agg_jf_item *item;
2786 int i;
2787
2788 if (by_ref != agg->by_ref)
2789 return NULL;
2790
2791 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2792 if (item->offset == offset)
2793 {
2794 /* Currently we do not have clobber values, return NULL for them once
2795 we do. */
2796 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2797 return item->value;
2798 }
2799 return NULL;
2800 }
2801
2802 /* Remove a reference to SYMBOL from the list of references of a node given by
2803 reference description RDESC. Return true if the reference has been
2804 successfully found and removed. */
2805
2806 static bool
2807 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2808 {
2809 struct ipa_ref *to_del;
2810 struct cgraph_edge *origin;
2811
2812 origin = rdesc->cs;
2813 if (!origin)
2814 return false;
2815 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2816 origin->lto_stmt_uid);
2817 if (!to_del)
2818 return false;
2819
2820 to_del->remove_reference ();
2821 if (dump_file)
2822 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2823 xstrdup_for_dump (origin->caller->name ()),
2824 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2825 return true;
2826 }
2827
2828 /* If JFUNC has a reference description with refcount different from
2829 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2830 NULL. JFUNC must be a constant jump function. */
2831
2832 static struct ipa_cst_ref_desc *
2833 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2834 {
2835 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2836 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2837 return rdesc;
2838 else
2839 return NULL;
2840 }
2841
2842 /* If the value of constant jump function JFUNC is an address of a function
2843 declaration, return the associated call graph node. Otherwise return
2844 NULL. */
2845
2846 static cgraph_node *
2847 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2848 {
2849 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2850 tree cst = ipa_get_jf_constant (jfunc);
2851 if (TREE_CODE (cst) != ADDR_EXPR
2852 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2853 return NULL;
2854
2855 return cgraph_node::get (TREE_OPERAND (cst, 0));
2856 }
2857
2858
2859 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2860 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2861 the edge specified in the rdesc. Return false if either the symbol or the
2862 reference could not be found, otherwise return true. */
2863
2864 static bool
2865 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2866 {
2867 struct ipa_cst_ref_desc *rdesc;
2868 if (jfunc->type == IPA_JF_CONST
2869 && (rdesc = jfunc_rdesc_usable (jfunc))
2870 && --rdesc->refcount == 0)
2871 {
2872 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2873 if (!symbol)
2874 return false;
2875
2876 return remove_described_reference (symbol, rdesc);
2877 }
2878 return true;
2879 }
2880
2881 /* Try to find a destination for indirect edge IE that corresponds to a simple
2882 call or a call of a member function pointer and where the destination is a
2883 pointer formal parameter described by jump function JFUNC. If it can be
2884 determined, return the newly direct edge, otherwise return NULL.
2885 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2886
2887 static struct cgraph_edge *
2888 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2889 struct ipa_jump_func *jfunc,
2890 struct ipa_node_params *new_root_info)
2891 {
2892 struct cgraph_edge *cs;
2893 tree target;
2894 bool agg_contents = ie->indirect_info->agg_contents;
2895
2896 if (ie->indirect_info->agg_contents)
2897 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2898 ie->indirect_info->offset,
2899 ie->indirect_info->by_ref);
2900 else
2901 target = ipa_value_from_jfunc (new_root_info, jfunc);
2902 if (!target)
2903 return NULL;
2904 cs = ipa_make_edge_direct_to_target (ie, target);
2905
2906 if (cs && !agg_contents)
2907 {
2908 bool ok;
2909 gcc_checking_assert (cs->callee
2910 && (cs != ie
2911 || jfunc->type != IPA_JF_CONST
2912 || !cgraph_node_for_jfunc (jfunc)
2913 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2914 ok = try_decrement_rdesc_refcount (jfunc);
2915 gcc_checking_assert (ok);
2916 }
2917
2918 return cs;
2919 }
2920
2921 /* Return the target to be used in cases of impossible devirtualization. IE
2922 and target (the latter can be NULL) are dumped when dumping is enabled. */
2923
2924 tree
2925 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2926 {
2927 if (dump_file)
2928 {
2929 if (target)
2930 fprintf (dump_file,
2931 "Type inconsistent devirtualization: %s/%i->%s\n",
2932 ie->caller->name (), ie->caller->order,
2933 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2934 else
2935 fprintf (dump_file,
2936 "No devirtualization target in %s/%i\n",
2937 ie->caller->name (), ie->caller->order);
2938 }
2939 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2940 cgraph_node::get_create (new_target);
2941 return new_target;
2942 }
2943
2944 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2945 call based on a formal parameter which is described by jump function JFUNC
2946 and if it can be determined, make it direct and return the direct edge.
2947 Otherwise, return NULL. CTX describes the polymorphic context that the
2948 parameter the call is based on brings along with it. */
2949
2950 static struct cgraph_edge *
2951 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2952 struct ipa_jump_func *jfunc,
2953 struct ipa_polymorphic_call_context ctx)
2954 {
2955 tree target = NULL;
2956 bool speculative = false;
2957
2958 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2959 return NULL;
2960
2961 gcc_assert (!ie->indirect_info->by_ref);
2962
2963 /* Try to do lookup via known virtual table pointer value. */
2964 if (!ie->indirect_info->vptr_changed
2965 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2966 {
2967 tree vtable;
2968 unsigned HOST_WIDE_INT offset;
2969 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2970 ie->indirect_info->offset,
2971 true);
2972 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2973 {
2974 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2975 vtable, offset);
2976 if (t)
2977 {
2978 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2979 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2980 || !possible_polymorphic_call_target_p
2981 (ie, cgraph_node::get (t)))
2982 {
2983 /* Do not speculate builtin_unreachable, it is stupid! */
2984 if (!ie->indirect_info->vptr_changed)
2985 target = ipa_impossible_devirt_target (ie, target);
2986 }
2987 else
2988 {
2989 target = t;
2990 speculative = ie->indirect_info->vptr_changed;
2991 }
2992 }
2993 }
2994 }
2995
2996 ipa_polymorphic_call_context ie_context (ie);
2997 vec <cgraph_node *>targets;
2998 bool final;
2999
3000 ctx.offset_by (ie->indirect_info->offset);
3001 if (ie->indirect_info->vptr_changed)
3002 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3003 ie->indirect_info->otr_type);
3004 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3005 targets = possible_polymorphic_call_targets
3006 (ie->indirect_info->otr_type,
3007 ie->indirect_info->otr_token,
3008 ctx, &final);
3009 if (final && targets.length () <= 1)
3010 {
3011 speculative = false;
3012 if (targets.length () == 1)
3013 target = targets[0]->decl;
3014 else
3015 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3016 }
3017 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3018 && !ie->speculative && ie->maybe_hot_p ())
3019 {
3020 cgraph_node *n;
3021 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3022 ie->indirect_info->otr_token,
3023 ie->indirect_info->context);
3024 if (n)
3025 {
3026 target = n->decl;
3027 speculative = true;
3028 }
3029 }
3030
3031 if (target)
3032 {
3033 if (!possible_polymorphic_call_target_p
3034 (ie, cgraph_node::get_create (target)))
3035 {
3036 if (speculative)
3037 return NULL;
3038 target = ipa_impossible_devirt_target (ie, target);
3039 }
3040 return ipa_make_edge_direct_to_target (ie, target, speculative);
3041 }
3042 else
3043 return NULL;
3044 }
3045
3046 /* Update the param called notes associated with NODE when CS is being inlined,
3047 assuming NODE is (potentially indirectly) inlined into CS->callee.
3048 Moreover, if the callee is discovered to be constant, create a new cgraph
3049 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3050 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3051
3052 static bool
3053 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3054 struct cgraph_node *node,
3055 vec<cgraph_edge *> *new_edges)
3056 {
3057 struct ipa_edge_args *top;
3058 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3059 struct ipa_node_params *new_root_info;
3060 bool res = false;
3061
3062 ipa_check_create_edge_args ();
3063 top = IPA_EDGE_REF (cs);
3064 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3065 ? cs->caller->global.inlined_to
3066 : cs->caller);
3067
3068 for (ie = node->indirect_calls; ie; ie = next_ie)
3069 {
3070 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3071 struct ipa_jump_func *jfunc;
3072 int param_index;
3073 cgraph_node *spec_target = NULL;
3074
3075 next_ie = ie->next_callee;
3076
3077 if (ici->param_index == -1)
3078 continue;
3079
3080 /* We must check range due to calls with variable number of arguments: */
3081 if (ici->param_index >= ipa_get_cs_argument_count (top))
3082 {
3083 ici->param_index = -1;
3084 continue;
3085 }
3086
3087 param_index = ici->param_index;
3088 jfunc = ipa_get_ith_jump_func (top, param_index);
3089
3090 if (ie->speculative)
3091 {
3092 struct cgraph_edge *de;
3093 struct ipa_ref *ref;
3094 ie->speculative_call_info (de, ie, ref);
3095 spec_target = de->callee;
3096 }
3097
3098 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3099 new_direct_edge = NULL;
3100 else if (ici->polymorphic)
3101 {
3102 ipa_polymorphic_call_context ctx;
3103 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3104 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3105 }
3106 else
3107 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3108 new_root_info);
3109 /* If speculation was removed, then we need to do nothing. */
3110 if (new_direct_edge && new_direct_edge != ie
3111 && new_direct_edge->callee == spec_target)
3112 {
3113 new_direct_edge->indirect_inlining_edge = 1;
3114 top = IPA_EDGE_REF (cs);
3115 res = true;
3116 if (!new_direct_edge->speculative)
3117 continue;
3118 }
3119 else if (new_direct_edge)
3120 {
3121 new_direct_edge->indirect_inlining_edge = 1;
3122 if (new_direct_edge->call_stmt)
3123 new_direct_edge->call_stmt_cannot_inline_p
3124 = !gimple_check_call_matching_types (
3125 new_direct_edge->call_stmt,
3126 new_direct_edge->callee->decl, false);
3127 if (new_edges)
3128 {
3129 new_edges->safe_push (new_direct_edge);
3130 res = true;
3131 }
3132 top = IPA_EDGE_REF (cs);
3133 /* If speculative edge was introduced we still need to update
3134 call info of the indirect edge. */
3135 if (!new_direct_edge->speculative)
3136 continue;
3137 }
3138 if (jfunc->type == IPA_JF_PASS_THROUGH
3139 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3140 {
3141 if (ici->agg_contents
3142 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3143 && !ici->polymorphic)
3144 ici->param_index = -1;
3145 else
3146 {
3147 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3148 if (ici->polymorphic
3149 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3150 ici->vptr_changed = true;
3151 }
3152 }
3153 else if (jfunc->type == IPA_JF_ANCESTOR)
3154 {
3155 if (ici->agg_contents
3156 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3157 && !ici->polymorphic)
3158 ici->param_index = -1;
3159 else
3160 {
3161 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3162 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3163 if (ici->polymorphic
3164 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3165 ici->vptr_changed = true;
3166 }
3167 }
3168 else
3169 /* Either we can find a destination for this edge now or never. */
3170 ici->param_index = -1;
3171 }
3172
3173 return res;
3174 }
3175
3176 /* Recursively traverse subtree of NODE (including node) made of inlined
3177 cgraph_edges when CS has been inlined and invoke
3178 update_indirect_edges_after_inlining on all nodes and
3179 update_jump_functions_after_inlining on all non-inlined edges that lead out
3180 of this subtree. Newly discovered indirect edges will be added to
3181 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3182 created. */
3183
3184 static bool
3185 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3186 struct cgraph_node *node,
3187 vec<cgraph_edge *> *new_edges)
3188 {
3189 struct cgraph_edge *e;
3190 bool res;
3191
3192 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3193
3194 for (e = node->callees; e; e = e->next_callee)
3195 if (!e->inline_failed)
3196 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3197 else
3198 update_jump_functions_after_inlining (cs, e);
3199 for (e = node->indirect_calls; e; e = e->next_callee)
3200 update_jump_functions_after_inlining (cs, e);
3201
3202 return res;
3203 }
3204
3205 /* Combine two controlled uses counts as done during inlining. */
3206
3207 static int
3208 combine_controlled_uses_counters (int c, int d)
3209 {
3210 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3211 return IPA_UNDESCRIBED_USE;
3212 else
3213 return c + d - 1;
3214 }
3215
3216 /* Propagate number of controlled users from CS->caleee to the new root of the
3217 tree of inlined nodes. */
3218
3219 static void
3220 propagate_controlled_uses (struct cgraph_edge *cs)
3221 {
3222 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3223 struct cgraph_node *new_root = cs->caller->global.inlined_to
3224 ? cs->caller->global.inlined_to : cs->caller;
3225 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3226 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3227 int count, i;
3228
3229 count = MIN (ipa_get_cs_argument_count (args),
3230 ipa_get_param_count (old_root_info));
3231 for (i = 0; i < count; i++)
3232 {
3233 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3234 struct ipa_cst_ref_desc *rdesc;
3235
3236 if (jf->type == IPA_JF_PASS_THROUGH)
3237 {
3238 int src_idx, c, d;
3239 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3240 c = ipa_get_controlled_uses (new_root_info, src_idx);
3241 d = ipa_get_controlled_uses (old_root_info, i);
3242
3243 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3244 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3245 c = combine_controlled_uses_counters (c, d);
3246 ipa_set_controlled_uses (new_root_info, src_idx, c);
3247 if (c == 0 && new_root_info->ipcp_orig_node)
3248 {
3249 struct cgraph_node *n;
3250 struct ipa_ref *ref;
3251 tree t = new_root_info->known_csts[src_idx];
3252
3253 if (t && TREE_CODE (t) == ADDR_EXPR
3254 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3255 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3256 && (ref = new_root->find_reference (n, NULL, 0)))
3257 {
3258 if (dump_file)
3259 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3260 "reference from %s/%i to %s/%i.\n",
3261 xstrdup_for_dump (new_root->name ()),
3262 new_root->order,
3263 xstrdup_for_dump (n->name ()), n->order);
3264 ref->remove_reference ();
3265 }
3266 }
3267 }
3268 else if (jf->type == IPA_JF_CONST
3269 && (rdesc = jfunc_rdesc_usable (jf)))
3270 {
3271 int d = ipa_get_controlled_uses (old_root_info, i);
3272 int c = rdesc->refcount;
3273 rdesc->refcount = combine_controlled_uses_counters (c, d);
3274 if (rdesc->refcount == 0)
3275 {
3276 tree cst = ipa_get_jf_constant (jf);
3277 struct cgraph_node *n;
3278 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3279 && TREE_CODE (TREE_OPERAND (cst, 0))
3280 == FUNCTION_DECL);
3281 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3282 if (n)
3283 {
3284 struct cgraph_node *clone;
3285 bool ok;
3286 ok = remove_described_reference (n, rdesc);
3287 gcc_checking_assert (ok);
3288
3289 clone = cs->caller;
3290 while (clone->global.inlined_to
3291 && clone != rdesc->cs->caller
3292 && IPA_NODE_REF (clone)->ipcp_orig_node)
3293 {
3294 struct ipa_ref *ref;
3295 ref = clone->find_reference (n, NULL, 0);
3296 if (ref)
3297 {
3298 if (dump_file)
3299 fprintf (dump_file, "ipa-prop: Removing "
3300 "cloning-created reference "
3301 "from %s/%i to %s/%i.\n",
3302 xstrdup_for_dump (clone->name ()),
3303 clone->order,
3304 xstrdup_for_dump (n->name ()),
3305 n->order);
3306 ref->remove_reference ();
3307 }
3308 clone = clone->callers->caller;
3309 }
3310 }
3311 }
3312 }
3313 }
3314
3315 for (i = ipa_get_param_count (old_root_info);
3316 i < ipa_get_cs_argument_count (args);
3317 i++)
3318 {
3319 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3320
3321 if (jf->type == IPA_JF_CONST)
3322 {
3323 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3324 if (rdesc)
3325 rdesc->refcount = IPA_UNDESCRIBED_USE;
3326 }
3327 else if (jf->type == IPA_JF_PASS_THROUGH)
3328 ipa_set_controlled_uses (new_root_info,
3329 jf->value.pass_through.formal_id,
3330 IPA_UNDESCRIBED_USE);
3331 }
3332 }
3333
3334 /* Update jump functions and call note functions on inlining the call site CS.
3335 CS is expected to lead to a node already cloned by
3336 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3337 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3338 created. */
3339
3340 bool
3341 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3342 vec<cgraph_edge *> *new_edges)
3343 {
3344 bool changed;
3345 /* Do nothing if the preparation phase has not been carried out yet
3346 (i.e. during early inlining). */
3347 if (!ipa_node_params_sum)
3348 return false;
3349 gcc_assert (ipa_edge_args_vector);
3350
3351 propagate_controlled_uses (cs);
3352 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3353
3354 return changed;
3355 }
3356
3357 /* Frees all dynamically allocated structures that the argument info points
3358 to. */
3359
3360 void
3361 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3362 {
3363 vec_free (args->jump_functions);
3364 memset (args, 0, sizeof (*args));
3365 }
3366
3367 /* Free all ipa_edge structures. */
3368
3369 void
3370 ipa_free_all_edge_args (void)
3371 {
3372 int i;
3373 struct ipa_edge_args *args;
3374
3375 if (!ipa_edge_args_vector)
3376 return;
3377
3378 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3379 ipa_free_edge_args_substructures (args);
3380
3381 vec_free (ipa_edge_args_vector);
3382 }
3383
3384 /* Frees all dynamically allocated structures that the param info points
3385 to. */
3386
3387 ipa_node_params::~ipa_node_params ()
3388 {
3389 descriptors.release ();
3390 free (lattices);
3391 /* Lattice values and their sources are deallocated with their alocation
3392 pool. */
3393 known_contexts.release ();
3394
3395 lattices = NULL;
3396 ipcp_orig_node = NULL;
3397 analysis_done = 0;
3398 node_enqueued = 0;
3399 do_clone_for_all_contexts = 0;
3400 is_all_contexts_clone = 0;
3401 node_dead = 0;
3402 }
3403
3404 /* Free all ipa_node_params structures. */
3405
3406 void
3407 ipa_free_all_node_params (void)
3408 {
3409 delete ipa_node_params_sum;
3410 ipa_node_params_sum = NULL;
3411 }
3412
3413 /* Grow ipcp_transformations if necessary. */
3414
3415 void
3416 ipcp_grow_transformations_if_necessary (void)
3417 {
3418 if (vec_safe_length (ipcp_transformations)
3419 <= (unsigned) symtab->cgraph_max_uid)
3420 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3421 }
3422
3423 /* Set the aggregate replacements of NODE to be AGGVALS. */
3424
3425 void
3426 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3427 struct ipa_agg_replacement_value *aggvals)
3428 {
3429 ipcp_grow_transformations_if_necessary ();
3430 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3431 }
3432
3433 /* Hook that is called by cgraph.c when an edge is removed. */
3434
3435 static void
3436 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3437 {
3438 struct ipa_edge_args *args;
3439
3440 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3441 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3442 return;
3443
3444 args = IPA_EDGE_REF (cs);
3445 if (args->jump_functions)
3446 {
3447 struct ipa_jump_func *jf;
3448 int i;
3449 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3450 {
3451 struct ipa_cst_ref_desc *rdesc;
3452 try_decrement_rdesc_refcount (jf);
3453 if (jf->type == IPA_JF_CONST
3454 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3455 && rdesc->cs == cs)
3456 rdesc->cs = NULL;
3457 }
3458 }
3459
3460 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3461 }
3462
3463 /* Hook that is called by cgraph.c when an edge is duplicated. */
3464
3465 static void
3466 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3467 void *)
3468 {
3469 struct ipa_edge_args *old_args, *new_args;
3470 unsigned int i;
3471
3472 ipa_check_create_edge_args ();
3473
3474 old_args = IPA_EDGE_REF (src);
3475 new_args = IPA_EDGE_REF (dst);
3476
3477 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3478 if (old_args->polymorphic_call_contexts)
3479 new_args->polymorphic_call_contexts
3480 = vec_safe_copy (old_args->polymorphic_call_contexts);
3481
3482 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3483 {
3484 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3485 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3486
3487 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3488
3489 if (src_jf->type == IPA_JF_CONST)
3490 {
3491 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3492
3493 if (!src_rdesc)
3494 dst_jf->value.constant.rdesc = NULL;
3495 else if (src->caller == dst->caller)
3496 {
3497 struct ipa_ref *ref;
3498 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3499 gcc_checking_assert (n);
3500 ref = src->caller->find_reference (n, src->call_stmt,
3501 src->lto_stmt_uid);
3502 gcc_checking_assert (ref);
3503 dst->caller->clone_reference (ref, ref->stmt);
3504
3505 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3506 dst_rdesc->cs = dst;
3507 dst_rdesc->refcount = src_rdesc->refcount;
3508 dst_rdesc->next_duplicate = NULL;
3509 dst_jf->value.constant.rdesc = dst_rdesc;
3510 }
3511 else if (src_rdesc->cs == src)
3512 {
3513 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3514 dst_rdesc->cs = dst;
3515 dst_rdesc->refcount = src_rdesc->refcount;
3516 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3517 src_rdesc->next_duplicate = dst_rdesc;
3518 dst_jf->value.constant.rdesc = dst_rdesc;
3519 }
3520 else
3521 {
3522 struct ipa_cst_ref_desc *dst_rdesc;
3523 /* This can happen during inlining, when a JFUNC can refer to a
3524 reference taken in a function up in the tree of inline clones.
3525 We need to find the duplicate that refers to our tree of
3526 inline clones. */
3527
3528 gcc_assert (dst->caller->global.inlined_to);
3529 for (dst_rdesc = src_rdesc->next_duplicate;
3530 dst_rdesc;
3531 dst_rdesc = dst_rdesc->next_duplicate)
3532 {
3533 struct cgraph_node *top;
3534 top = dst_rdesc->cs->caller->global.inlined_to
3535 ? dst_rdesc->cs->caller->global.inlined_to
3536 : dst_rdesc->cs->caller;
3537 if (dst->caller->global.inlined_to == top)
3538 break;
3539 }
3540 gcc_assert (dst_rdesc);
3541 dst_jf->value.constant.rdesc = dst_rdesc;
3542 }
3543 }
3544 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3545 && src->caller == dst->caller)
3546 {
3547 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3548 ? dst->caller->global.inlined_to : dst->caller;
3549 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3550 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3551
3552 int c = ipa_get_controlled_uses (root_info, idx);
3553 if (c != IPA_UNDESCRIBED_USE)
3554 {
3555 c++;
3556 ipa_set_controlled_uses (root_info, idx, c);
3557 }
3558 }
3559 }
3560 }
3561
3562 /* Analyze newly added function into callgraph. */
3563
3564 static void
3565 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3566 {
3567 if (node->has_gimple_body_p ())
3568 ipa_analyze_node (node);
3569 }
3570
3571 /* Hook that is called by summary when a node is duplicated. */
3572
3573 void
3574 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3575 ipa_node_params *old_info,
3576 ipa_node_params *new_info)
3577 {
3578 ipa_agg_replacement_value *old_av, *new_av;
3579
3580 new_info->descriptors = old_info->descriptors.copy ();
3581 new_info->lattices = NULL;
3582 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3583
3584 new_info->analysis_done = old_info->analysis_done;
3585 new_info->node_enqueued = old_info->node_enqueued;
3586
3587 old_av = ipa_get_agg_replacements_for_node (src);
3588 if (old_av)
3589 {
3590 new_av = NULL;
3591 while (old_av)
3592 {
3593 struct ipa_agg_replacement_value *v;
3594
3595 v = ggc_alloc<ipa_agg_replacement_value> ();
3596 memcpy (v, old_av, sizeof (*v));
3597 v->next = new_av;
3598 new_av = v;
3599 old_av = old_av->next;
3600 }
3601 ipa_set_node_agg_value_chain (dst, new_av);
3602 }
3603
3604 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3605
3606 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3607 {
3608 ipcp_grow_transformations_if_necessary ();
3609 src_trans = ipcp_get_transformation_summary (src);
3610 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3611 vec<ipa_alignment, va_gc> *&dst_alignments
3612 = ipcp_get_transformation_summary (dst)->alignments;
3613 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3614 for (unsigned i = 0; i < src_alignments->length (); ++i)
3615 dst_alignments->quick_push ((*src_alignments)[i]);
3616 }
3617 }
3618
3619 /* Register our cgraph hooks if they are not already there. */
3620
3621 void
3622 ipa_register_cgraph_hooks (void)
3623 {
3624 ipa_check_create_node_params ();
3625
3626 if (!edge_removal_hook_holder)
3627 edge_removal_hook_holder =
3628 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3629 if (!edge_duplication_hook_holder)
3630 edge_duplication_hook_holder =
3631 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3632 function_insertion_hook_holder =
3633 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3634 }
3635
3636 /* Unregister our cgraph hooks if they are not already there. */
3637
3638 static void
3639 ipa_unregister_cgraph_hooks (void)
3640 {
3641 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3642 edge_removal_hook_holder = NULL;
3643 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3644 edge_duplication_hook_holder = NULL;
3645 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3646 function_insertion_hook_holder = NULL;
3647 }
3648
3649 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3650 longer needed after ipa-cp. */
3651
3652 void
3653 ipa_free_all_structures_after_ipa_cp (void)
3654 {
3655 if (!optimize && !in_lto_p)
3656 {
3657 ipa_free_all_edge_args ();
3658 ipa_free_all_node_params ();
3659 ipcp_sources_pool.release ();
3660 ipcp_cst_values_pool.release ();
3661 ipcp_poly_ctx_values_pool.release ();
3662 ipcp_agg_lattice_pool.release ();
3663 ipa_unregister_cgraph_hooks ();
3664 ipa_refdesc_pool.release ();
3665 }
3666 }
3667
3668 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3669 longer needed after indirect inlining. */
3670
3671 void
3672 ipa_free_all_structures_after_iinln (void)
3673 {
3674 ipa_free_all_edge_args ();
3675 ipa_free_all_node_params ();
3676 ipa_unregister_cgraph_hooks ();
3677 ipcp_sources_pool.release ();
3678 ipcp_cst_values_pool.release ();
3679 ipcp_poly_ctx_values_pool.release ();
3680 ipcp_agg_lattice_pool.release ();
3681 ipa_refdesc_pool.release ();
3682 }
3683
3684 /* Print ipa_tree_map data structures of all functions in the
3685 callgraph to F. */
3686
3687 void
3688 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3689 {
3690 int i, count;
3691 struct ipa_node_params *info;
3692
3693 if (!node->definition)
3694 return;
3695 info = IPA_NODE_REF (node);
3696 fprintf (f, " function %s/%i parameter descriptors:\n",
3697 node->name (), node->order);
3698 count = ipa_get_param_count (info);
3699 for (i = 0; i < count; i++)
3700 {
3701 int c;
3702
3703 fprintf (f, " ");
3704 ipa_dump_param (f, info, i);
3705 if (ipa_is_param_used (info, i))
3706 fprintf (f, " used");
3707 c = ipa_get_controlled_uses (info, i);
3708 if (c == IPA_UNDESCRIBED_USE)
3709 fprintf (f, " undescribed_use");
3710 else
3711 fprintf (f, " controlled_uses=%i", c);
3712 fprintf (f, "\n");
3713 }
3714 }
3715
3716 /* Print ipa_tree_map data structures of all functions in the
3717 callgraph to F. */
3718
3719 void
3720 ipa_print_all_params (FILE * f)
3721 {
3722 struct cgraph_node *node;
3723
3724 fprintf (f, "\nFunction parameters:\n");
3725 FOR_EACH_FUNCTION (node)
3726 ipa_print_node_params (f, node);
3727 }
3728
3729 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3730
3731 vec<tree>
3732 ipa_get_vector_of_formal_parms (tree fndecl)
3733 {
3734 vec<tree> args;
3735 int count;
3736 tree parm;
3737
3738 gcc_assert (!flag_wpa);
3739 count = count_formal_params (fndecl);
3740 args.create (count);
3741 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3742 args.quick_push (parm);
3743
3744 return args;
3745 }
3746
3747 /* Return a heap allocated vector containing types of formal parameters of
3748 function type FNTYPE. */
3749
3750 vec<tree>
3751 ipa_get_vector_of_formal_parm_types (tree fntype)
3752 {
3753 vec<tree> types;
3754 int count = 0;
3755 tree t;
3756
3757 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3758 count++;
3759
3760 types.create (count);
3761 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3762 types.quick_push (TREE_VALUE (t));
3763
3764 return types;
3765 }
3766
3767 /* Modify the function declaration FNDECL and its type according to the plan in
3768 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3769 to reflect the actual parameters being modified which are determined by the
3770 base_index field. */
3771
3772 void
3773 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3774 {
3775 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3776 tree orig_type = TREE_TYPE (fndecl);
3777 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3778
3779 /* The following test is an ugly hack, some functions simply don't have any
3780 arguments in their type. This is probably a bug but well... */
3781 bool care_for_types = (old_arg_types != NULL_TREE);
3782 bool last_parm_void;
3783 vec<tree> otypes;
3784 if (care_for_types)
3785 {
3786 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3787 == void_type_node);
3788 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3789 if (last_parm_void)
3790 gcc_assert (oparms.length () + 1 == otypes.length ());
3791 else
3792 gcc_assert (oparms.length () == otypes.length ());
3793 }
3794 else
3795 {
3796 last_parm_void = false;
3797 otypes.create (0);
3798 }
3799
3800 int len = adjustments.length ();
3801 tree *link = &DECL_ARGUMENTS (fndecl);
3802 tree new_arg_types = NULL;
3803 for (int i = 0; i < len; i++)
3804 {
3805 struct ipa_parm_adjustment *adj;
3806 gcc_assert (link);
3807
3808 adj = &adjustments[i];
3809 tree parm;
3810 if (adj->op == IPA_PARM_OP_NEW)
3811 parm = NULL;
3812 else
3813 parm = oparms[adj->base_index];
3814 adj->base = parm;
3815
3816 if (adj->op == IPA_PARM_OP_COPY)
3817 {
3818 if (care_for_types)
3819 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3820 new_arg_types);
3821 *link = parm;
3822 link = &DECL_CHAIN (parm);
3823 }
3824 else if (adj->op != IPA_PARM_OP_REMOVE)
3825 {
3826 tree new_parm;
3827 tree ptype;
3828
3829 if (adj->by_ref)
3830 ptype = build_pointer_type (adj->type);
3831 else
3832 {
3833 ptype = adj->type;
3834 if (is_gimple_reg_type (ptype))
3835 {
3836 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3837 if (TYPE_ALIGN (ptype) < malign)
3838 ptype = build_aligned_type (ptype, malign);
3839 }
3840 }
3841
3842 if (care_for_types)
3843 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3844
3845 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3846 ptype);
3847 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3848 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3849 DECL_ARTIFICIAL (new_parm) = 1;
3850 DECL_ARG_TYPE (new_parm) = ptype;
3851 DECL_CONTEXT (new_parm) = fndecl;
3852 TREE_USED (new_parm) = 1;
3853 DECL_IGNORED_P (new_parm) = 1;
3854 layout_decl (new_parm, 0);
3855
3856 if (adj->op == IPA_PARM_OP_NEW)
3857 adj->base = NULL;
3858 else
3859 adj->base = parm;
3860 adj->new_decl = new_parm;
3861
3862 *link = new_parm;
3863 link = &DECL_CHAIN (new_parm);
3864 }
3865 }
3866
3867 *link = NULL_TREE;
3868
3869 tree new_reversed = NULL;
3870 if (care_for_types)
3871 {
3872 new_reversed = nreverse (new_arg_types);
3873 if (last_parm_void)
3874 {
3875 if (new_reversed)
3876 TREE_CHAIN (new_arg_types) = void_list_node;
3877 else
3878 new_reversed = void_list_node;
3879 }
3880 }
3881
3882 /* Use copy_node to preserve as much as possible from original type
3883 (debug info, attribute lists etc.)
3884 Exception is METHOD_TYPEs must have THIS argument.
3885 When we are asked to remove it, we need to build new FUNCTION_TYPE
3886 instead. */
3887 tree new_type = NULL;
3888 if (TREE_CODE (orig_type) != METHOD_TYPE
3889 || (adjustments[0].op == IPA_PARM_OP_COPY
3890 && adjustments[0].base_index == 0))
3891 {
3892 new_type = build_distinct_type_copy (orig_type);
3893 TYPE_ARG_TYPES (new_type) = new_reversed;
3894 }
3895 else
3896 {
3897 new_type
3898 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3899 new_reversed));
3900 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3901 DECL_VINDEX (fndecl) = NULL_TREE;
3902 }
3903
3904 /* When signature changes, we need to clear builtin info. */
3905 if (DECL_BUILT_IN (fndecl))
3906 {
3907 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3908 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3909 }
3910
3911 TREE_TYPE (fndecl) = new_type;
3912 DECL_VIRTUAL_P (fndecl) = 0;
3913 DECL_LANG_SPECIFIC (fndecl) = NULL;
3914 otypes.release ();
3915 oparms.release ();
3916 }
3917
3918 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3919 If this is a directly recursive call, CS must be NULL. Otherwise it must
3920 contain the corresponding call graph edge. */
3921
3922 void
3923 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3924 ipa_parm_adjustment_vec adjustments)
3925 {
3926 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3927 vec<tree> vargs;
3928 vec<tree, va_gc> **debug_args = NULL;
3929 gcall *new_stmt;
3930 gimple_stmt_iterator gsi, prev_gsi;
3931 tree callee_decl;
3932 int i, len;
3933
3934 len = adjustments.length ();
3935 vargs.create (len);
3936 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3937 current_node->remove_stmt_references (stmt);
3938
3939 gsi = gsi_for_stmt (stmt);
3940 prev_gsi = gsi;
3941 gsi_prev (&prev_gsi);
3942 for (i = 0; i < len; i++)
3943 {
3944 struct ipa_parm_adjustment *adj;
3945
3946 adj = &adjustments[i];
3947
3948 if (adj->op == IPA_PARM_OP_COPY)
3949 {
3950 tree arg = gimple_call_arg (stmt, adj->base_index);
3951
3952 vargs.quick_push (arg);
3953 }
3954 else if (adj->op != IPA_PARM_OP_REMOVE)
3955 {
3956 tree expr, base, off;
3957 location_t loc;
3958 unsigned int deref_align = 0;
3959 bool deref_base = false;
3960
3961 /* We create a new parameter out of the value of the old one, we can
3962 do the following kind of transformations:
3963
3964 - A scalar passed by reference is converted to a scalar passed by
3965 value. (adj->by_ref is false and the type of the original
3966 actual argument is a pointer to a scalar).
3967
3968 - A part of an aggregate is passed instead of the whole aggregate.
3969 The part can be passed either by value or by reference, this is
3970 determined by value of adj->by_ref. Moreover, the code below
3971 handles both situations when the original aggregate is passed by
3972 value (its type is not a pointer) and when it is passed by
3973 reference (it is a pointer to an aggregate).
3974
3975 When the new argument is passed by reference (adj->by_ref is true)
3976 it must be a part of an aggregate and therefore we form it by
3977 simply taking the address of a reference inside the original
3978 aggregate. */
3979
3980 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3981 base = gimple_call_arg (stmt, adj->base_index);
3982 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3983 : EXPR_LOCATION (base);
3984
3985 if (TREE_CODE (base) != ADDR_EXPR
3986 && POINTER_TYPE_P (TREE_TYPE (base)))
3987 off = build_int_cst (adj->alias_ptr_type,
3988 adj->offset / BITS_PER_UNIT);
3989 else
3990 {
3991 HOST_WIDE_INT base_offset;
3992 tree prev_base;
3993 bool addrof;
3994
3995 if (TREE_CODE (base) == ADDR_EXPR)
3996 {
3997 base = TREE_OPERAND (base, 0);
3998 addrof = true;
3999 }
4000 else
4001 addrof = false;
4002 prev_base = base;
4003 base = get_addr_base_and_unit_offset (base, &base_offset);
4004 /* Aggregate arguments can have non-invariant addresses. */
4005 if (!base)
4006 {
4007 base = build_fold_addr_expr (prev_base);
4008 off = build_int_cst (adj->alias_ptr_type,
4009 adj->offset / BITS_PER_UNIT);
4010 }
4011 else if (TREE_CODE (base) == MEM_REF)
4012 {
4013 if (!addrof)
4014 {
4015 deref_base = true;
4016 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4017 }
4018 off = build_int_cst (adj->alias_ptr_type,
4019 base_offset
4020 + adj->offset / BITS_PER_UNIT);
4021 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4022 off);
4023 base = TREE_OPERAND (base, 0);
4024 }
4025 else
4026 {
4027 off = build_int_cst (adj->alias_ptr_type,
4028 base_offset
4029 + adj->offset / BITS_PER_UNIT);
4030 base = build_fold_addr_expr (base);
4031 }
4032 }
4033
4034 if (!adj->by_ref)
4035 {
4036 tree type = adj->type;
4037 unsigned int align;
4038 unsigned HOST_WIDE_INT misalign;
4039
4040 if (deref_base)
4041 {
4042 align = deref_align;
4043 misalign = 0;
4044 }
4045 else
4046 {
4047 get_pointer_alignment_1 (base, &align, &misalign);
4048 if (TYPE_ALIGN (type) > align)
4049 align = TYPE_ALIGN (type);
4050 }
4051 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4052 * BITS_PER_UNIT);
4053 misalign = misalign & (align - 1);
4054 if (misalign != 0)
4055 align = (misalign & -misalign);
4056 if (align < TYPE_ALIGN (type))
4057 type = build_aligned_type (type, align);
4058 base = force_gimple_operand_gsi (&gsi, base,
4059 true, NULL, true, GSI_SAME_STMT);
4060 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4061 /* If expr is not a valid gimple call argument emit
4062 a load into a temporary. */
4063 if (is_gimple_reg_type (TREE_TYPE (expr)))
4064 {
4065 gimple tem = gimple_build_assign (NULL_TREE, expr);
4066 if (gimple_in_ssa_p (cfun))
4067 {
4068 gimple_set_vuse (tem, gimple_vuse (stmt));
4069 expr = make_ssa_name (TREE_TYPE (expr), tem);
4070 }
4071 else
4072 expr = create_tmp_reg (TREE_TYPE (expr));
4073 gimple_assign_set_lhs (tem, expr);
4074 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4075 }
4076 }
4077 else
4078 {
4079 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4080 expr = build_fold_addr_expr (expr);
4081 expr = force_gimple_operand_gsi (&gsi, expr,
4082 true, NULL, true, GSI_SAME_STMT);
4083 }
4084 vargs.quick_push (expr);
4085 }
4086 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4087 {
4088 unsigned int ix;
4089 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4090 gimple def_temp;
4091
4092 arg = gimple_call_arg (stmt, adj->base_index);
4093 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4094 {
4095 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4096 continue;
4097 arg = fold_convert_loc (gimple_location (stmt),
4098 TREE_TYPE (origin), arg);
4099 }
4100 if (debug_args == NULL)
4101 debug_args = decl_debug_args_insert (callee_decl);
4102 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4103 if (ddecl == origin)
4104 {
4105 ddecl = (**debug_args)[ix + 1];
4106 break;
4107 }
4108 if (ddecl == NULL)
4109 {
4110 ddecl = make_node (DEBUG_EXPR_DECL);
4111 DECL_ARTIFICIAL (ddecl) = 1;
4112 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4113 DECL_MODE (ddecl) = DECL_MODE (origin);
4114
4115 vec_safe_push (*debug_args, origin);
4116 vec_safe_push (*debug_args, ddecl);
4117 }
4118 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4119 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4120 }
4121 }
4122
4123 if (dump_file && (dump_flags & TDF_DETAILS))
4124 {
4125 fprintf (dump_file, "replacing stmt:");
4126 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4127 }
4128
4129 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4130 vargs.release ();
4131 if (gimple_call_lhs (stmt))
4132 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4133
4134 gimple_set_block (new_stmt, gimple_block (stmt));
4135 if (gimple_has_location (stmt))
4136 gimple_set_location (new_stmt, gimple_location (stmt));
4137 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4138 gimple_call_copy_flags (new_stmt, stmt);
4139 if (gimple_in_ssa_p (cfun))
4140 {
4141 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4142 if (gimple_vdef (stmt))
4143 {
4144 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4145 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4146 }
4147 }
4148
4149 if (dump_file && (dump_flags & TDF_DETAILS))
4150 {
4151 fprintf (dump_file, "with stmt:");
4152 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4153 fprintf (dump_file, "\n");
4154 }
4155 gsi_replace (&gsi, new_stmt, true);
4156 if (cs)
4157 cs->set_call_stmt (new_stmt);
4158 do
4159 {
4160 current_node->record_stmt_references (gsi_stmt (gsi));
4161 gsi_prev (&gsi);
4162 }
4163 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4164 }
4165
4166 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4167 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4168 specifies whether the function should care about type incompatibility the
4169 current and new expressions. If it is false, the function will leave
4170 incompatibility issues to the caller. Return true iff the expression
4171 was modified. */
4172
4173 bool
4174 ipa_modify_expr (tree *expr, bool convert,
4175 ipa_parm_adjustment_vec adjustments)
4176 {
4177 struct ipa_parm_adjustment *cand
4178 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4179 if (!cand)
4180 return false;
4181
4182 tree src;
4183 if (cand->by_ref)
4184 src = build_simple_mem_ref (cand->new_decl);
4185 else
4186 src = cand->new_decl;
4187
4188 if (dump_file && (dump_flags & TDF_DETAILS))
4189 {
4190 fprintf (dump_file, "About to replace expr ");
4191 print_generic_expr (dump_file, *expr, 0);
4192 fprintf (dump_file, " with ");
4193 print_generic_expr (dump_file, src, 0);
4194 fprintf (dump_file, "\n");
4195 }
4196
4197 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4198 {
4199 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4200 *expr = vce;
4201 }
4202 else
4203 *expr = src;
4204 return true;
4205 }
4206
4207 /* If T is an SSA_NAME, return NULL if it is not a default def or
4208 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4209 the base variable is always returned, regardless if it is a default
4210 def. Return T if it is not an SSA_NAME. */
4211
4212 static tree
4213 get_ssa_base_param (tree t, bool ignore_default_def)
4214 {
4215 if (TREE_CODE (t) == SSA_NAME)
4216 {
4217 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4218 return SSA_NAME_VAR (t);
4219 else
4220 return NULL_TREE;
4221 }
4222 return t;
4223 }
4224
4225 /* Given an expression, return an adjustment entry specifying the
4226 transformation to be done on EXPR. If no suitable adjustment entry
4227 was found, returns NULL.
4228
4229 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4230 default def, otherwise bail on them.
4231
4232 If CONVERT is non-NULL, this function will set *CONVERT if the
4233 expression provided is a component reference. ADJUSTMENTS is the
4234 adjustments vector. */
4235
4236 ipa_parm_adjustment *
4237 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4238 ipa_parm_adjustment_vec adjustments,
4239 bool ignore_default_def)
4240 {
4241 if (TREE_CODE (**expr) == BIT_FIELD_REF
4242 || TREE_CODE (**expr) == IMAGPART_EXPR
4243 || TREE_CODE (**expr) == REALPART_EXPR)
4244 {
4245 *expr = &TREE_OPERAND (**expr, 0);
4246 if (convert)
4247 *convert = true;
4248 }
4249
4250 HOST_WIDE_INT offset, size, max_size;
4251 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4252 if (!base || size == -1 || max_size == -1)
4253 return NULL;
4254
4255 if (TREE_CODE (base) == MEM_REF)
4256 {
4257 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4258 base = TREE_OPERAND (base, 0);
4259 }
4260
4261 base = get_ssa_base_param (base, ignore_default_def);
4262 if (!base || TREE_CODE (base) != PARM_DECL)
4263 return NULL;
4264
4265 struct ipa_parm_adjustment *cand = NULL;
4266 unsigned int len = adjustments.length ();
4267 for (unsigned i = 0; i < len; i++)
4268 {
4269 struct ipa_parm_adjustment *adj = &adjustments[i];
4270
4271 if (adj->base == base
4272 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4273 {
4274 cand = adj;
4275 break;
4276 }
4277 }
4278
4279 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4280 return NULL;
4281 return cand;
4282 }
4283
4284 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4285
4286 static bool
4287 index_in_adjustments_multiple_times_p (int base_index,
4288 ipa_parm_adjustment_vec adjustments)
4289 {
4290 int i, len = adjustments.length ();
4291 bool one = false;
4292
4293 for (i = 0; i < len; i++)
4294 {
4295 struct ipa_parm_adjustment *adj;
4296 adj = &adjustments[i];
4297
4298 if (adj->base_index == base_index)
4299 {
4300 if (one)
4301 return true;
4302 else
4303 one = true;
4304 }
4305 }
4306 return false;
4307 }
4308
4309
4310 /* Return adjustments that should have the same effect on function parameters
4311 and call arguments as if they were first changed according to adjustments in
4312 INNER and then by adjustments in OUTER. */
4313
4314 ipa_parm_adjustment_vec
4315 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4316 ipa_parm_adjustment_vec outer)
4317 {
4318 int i, outlen = outer.length ();
4319 int inlen = inner.length ();
4320 int removals = 0;
4321 ipa_parm_adjustment_vec adjustments, tmp;
4322
4323 tmp.create (inlen);
4324 for (i = 0; i < inlen; i++)
4325 {
4326 struct ipa_parm_adjustment *n;
4327 n = &inner[i];
4328
4329 if (n->op == IPA_PARM_OP_REMOVE)
4330 removals++;
4331 else
4332 {
4333 /* FIXME: Handling of new arguments are not implemented yet. */
4334 gcc_assert (n->op != IPA_PARM_OP_NEW);
4335 tmp.quick_push (*n);
4336 }
4337 }
4338
4339 adjustments.create (outlen + removals);
4340 for (i = 0; i < outlen; i++)
4341 {
4342 struct ipa_parm_adjustment r;
4343 struct ipa_parm_adjustment *out = &outer[i];
4344 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4345
4346 memset (&r, 0, sizeof (r));
4347 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4348 if (out->op == IPA_PARM_OP_REMOVE)
4349 {
4350 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4351 {
4352 r.op = IPA_PARM_OP_REMOVE;
4353 adjustments.quick_push (r);
4354 }
4355 continue;
4356 }
4357 else
4358 {
4359 /* FIXME: Handling of new arguments are not implemented yet. */
4360 gcc_assert (out->op != IPA_PARM_OP_NEW);
4361 }
4362
4363 r.base_index = in->base_index;
4364 r.type = out->type;
4365
4366 /* FIXME: Create nonlocal value too. */
4367
4368 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4369 r.op = IPA_PARM_OP_COPY;
4370 else if (in->op == IPA_PARM_OP_COPY)
4371 r.offset = out->offset;
4372 else if (out->op == IPA_PARM_OP_COPY)
4373 r.offset = in->offset;
4374 else
4375 r.offset = in->offset + out->offset;
4376 adjustments.quick_push (r);
4377 }
4378
4379 for (i = 0; i < inlen; i++)
4380 {
4381 struct ipa_parm_adjustment *n = &inner[i];
4382
4383 if (n->op == IPA_PARM_OP_REMOVE)
4384 adjustments.quick_push (*n);
4385 }
4386
4387 tmp.release ();
4388 return adjustments;
4389 }
4390
4391 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4392 friendly way, assuming they are meant to be applied to FNDECL. */
4393
4394 void
4395 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4396 tree fndecl)
4397 {
4398 int i, len = adjustments.length ();
4399 bool first = true;
4400 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4401
4402 fprintf (file, "IPA param adjustments: ");
4403 for (i = 0; i < len; i++)
4404 {
4405 struct ipa_parm_adjustment *adj;
4406 adj = &adjustments[i];
4407
4408 if (!first)
4409 fprintf (file, " ");
4410 else
4411 first = false;
4412
4413 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4414 print_generic_expr (file, parms[adj->base_index], 0);
4415 if (adj->base)
4416 {
4417 fprintf (file, ", base: ");
4418 print_generic_expr (file, adj->base, 0);
4419 }
4420 if (adj->new_decl)
4421 {
4422 fprintf (file, ", new_decl: ");
4423 print_generic_expr (file, adj->new_decl, 0);
4424 }
4425 if (adj->new_ssa_base)
4426 {
4427 fprintf (file, ", new_ssa_base: ");
4428 print_generic_expr (file, adj->new_ssa_base, 0);
4429 }
4430
4431 if (adj->op == IPA_PARM_OP_COPY)
4432 fprintf (file, ", copy_param");
4433 else if (adj->op == IPA_PARM_OP_REMOVE)
4434 fprintf (file, ", remove_param");
4435 else
4436 fprintf (file, ", offset %li", (long) adj->offset);
4437 if (adj->by_ref)
4438 fprintf (file, ", by_ref");
4439 print_node_brief (file, ", type: ", adj->type, 0);
4440 fprintf (file, "\n");
4441 }
4442 parms.release ();
4443 }
4444
4445 /* Dump the AV linked list. */
4446
4447 void
4448 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4449 {
4450 bool comma = false;
4451 fprintf (f, " Aggregate replacements:");
4452 for (; av; av = av->next)
4453 {
4454 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4455 av->index, av->offset);
4456 print_generic_expr (f, av->value, 0);
4457 comma = true;
4458 }
4459 fprintf (f, "\n");
4460 }
4461
4462 /* Stream out jump function JUMP_FUNC to OB. */
4463
4464 static void
4465 ipa_write_jump_function (struct output_block *ob,
4466 struct ipa_jump_func *jump_func)
4467 {
4468 struct ipa_agg_jf_item *item;
4469 struct bitpack_d bp;
4470 int i, count;
4471
4472 streamer_write_uhwi (ob, jump_func->type);
4473 switch (jump_func->type)
4474 {
4475 case IPA_JF_UNKNOWN:
4476 break;
4477 case IPA_JF_CONST:
4478 gcc_assert (
4479 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4480 stream_write_tree (ob, jump_func->value.constant.value, true);
4481 break;
4482 case IPA_JF_PASS_THROUGH:
4483 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4484 if (jump_func->value.pass_through.operation == NOP_EXPR)
4485 {
4486 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4487 bp = bitpack_create (ob->main_stream);
4488 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4489 streamer_write_bitpack (&bp);
4490 }
4491 else
4492 {
4493 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4494 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4495 }
4496 break;
4497 case IPA_JF_ANCESTOR:
4498 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4499 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4500 bp = bitpack_create (ob->main_stream);
4501 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4502 streamer_write_bitpack (&bp);
4503 break;
4504 }
4505
4506 count = vec_safe_length (jump_func->agg.items);
4507 streamer_write_uhwi (ob, count);
4508 if (count)
4509 {
4510 bp = bitpack_create (ob->main_stream);
4511 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4512 streamer_write_bitpack (&bp);
4513 }
4514
4515 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4516 {
4517 streamer_write_uhwi (ob, item->offset);
4518 stream_write_tree (ob, item->value, true);
4519 }
4520
4521 bp = bitpack_create (ob->main_stream);
4522 bp_pack_value (&bp, jump_func->alignment.known, 1);
4523 streamer_write_bitpack (&bp);
4524 if (jump_func->alignment.known)
4525 {
4526 streamer_write_uhwi (ob, jump_func->alignment.align);
4527 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4528 }
4529 }
4530
4531 /* Read in jump function JUMP_FUNC from IB. */
4532
4533 static void
4534 ipa_read_jump_function (struct lto_input_block *ib,
4535 struct ipa_jump_func *jump_func,
4536 struct cgraph_edge *cs,
4537 struct data_in *data_in)
4538 {
4539 enum jump_func_type jftype;
4540 enum tree_code operation;
4541 int i, count;
4542
4543 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4544 switch (jftype)
4545 {
4546 case IPA_JF_UNKNOWN:
4547 ipa_set_jf_unknown (jump_func);
4548 break;
4549 case IPA_JF_CONST:
4550 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4551 break;
4552 case IPA_JF_PASS_THROUGH:
4553 operation = (enum tree_code) streamer_read_uhwi (ib);
4554 if (operation == NOP_EXPR)
4555 {
4556 int formal_id = streamer_read_uhwi (ib);
4557 struct bitpack_d bp = streamer_read_bitpack (ib);
4558 bool agg_preserved = bp_unpack_value (&bp, 1);
4559 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4560 }
4561 else
4562 {
4563 tree operand = stream_read_tree (ib, data_in);
4564 int formal_id = streamer_read_uhwi (ib);
4565 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4566 operation);
4567 }
4568 break;
4569 case IPA_JF_ANCESTOR:
4570 {
4571 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4572 int formal_id = streamer_read_uhwi (ib);
4573 struct bitpack_d bp = streamer_read_bitpack (ib);
4574 bool agg_preserved = bp_unpack_value (&bp, 1);
4575 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4576 break;
4577 }
4578 }
4579
4580 count = streamer_read_uhwi (ib);
4581 vec_alloc (jump_func->agg.items, count);
4582 if (count)
4583 {
4584 struct bitpack_d bp = streamer_read_bitpack (ib);
4585 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4586 }
4587 for (i = 0; i < count; i++)
4588 {
4589 struct ipa_agg_jf_item item;
4590 item.offset = streamer_read_uhwi (ib);
4591 item.value = stream_read_tree (ib, data_in);
4592 jump_func->agg.items->quick_push (item);
4593 }
4594
4595 struct bitpack_d bp = streamer_read_bitpack (ib);
4596 bool alignment_known = bp_unpack_value (&bp, 1);
4597 if (alignment_known)
4598 {
4599 jump_func->alignment.known = true;
4600 jump_func->alignment.align = streamer_read_uhwi (ib);
4601 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4602 }
4603 else
4604 jump_func->alignment.known = false;
4605 }
4606
4607 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4608 relevant to indirect inlining to OB. */
4609
4610 static void
4611 ipa_write_indirect_edge_info (struct output_block *ob,
4612 struct cgraph_edge *cs)
4613 {
4614 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4615 struct bitpack_d bp;
4616
4617 streamer_write_hwi (ob, ii->param_index);
4618 bp = bitpack_create (ob->main_stream);
4619 bp_pack_value (&bp, ii->polymorphic, 1);
4620 bp_pack_value (&bp, ii->agg_contents, 1);
4621 bp_pack_value (&bp, ii->member_ptr, 1);
4622 bp_pack_value (&bp, ii->by_ref, 1);
4623 bp_pack_value (&bp, ii->vptr_changed, 1);
4624 streamer_write_bitpack (&bp);
4625 if (ii->agg_contents || ii->polymorphic)
4626 streamer_write_hwi (ob, ii->offset);
4627 else
4628 gcc_assert (ii->offset == 0);
4629
4630 if (ii->polymorphic)
4631 {
4632 streamer_write_hwi (ob, ii->otr_token);
4633 stream_write_tree (ob, ii->otr_type, true);
4634 ii->context.stream_out (ob);
4635 }
4636 }
4637
4638 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4639 relevant to indirect inlining from IB. */
4640
4641 static void
4642 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4643 struct data_in *data_in,
4644 struct cgraph_edge *cs)
4645 {
4646 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4647 struct bitpack_d bp;
4648
4649 ii->param_index = (int) streamer_read_hwi (ib);
4650 bp = streamer_read_bitpack (ib);
4651 ii->polymorphic = bp_unpack_value (&bp, 1);
4652 ii->agg_contents = bp_unpack_value (&bp, 1);
4653 ii->member_ptr = bp_unpack_value (&bp, 1);
4654 ii->by_ref = bp_unpack_value (&bp, 1);
4655 ii->vptr_changed = bp_unpack_value (&bp, 1);
4656 if (ii->agg_contents || ii->polymorphic)
4657 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4658 else
4659 ii->offset = 0;
4660 if (ii->polymorphic)
4661 {
4662 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4663 ii->otr_type = stream_read_tree (ib, data_in);
4664 ii->context.stream_in (ib, data_in);
4665 }
4666 }
4667
4668 /* Stream out NODE info to OB. */
4669
4670 static void
4671 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4672 {
4673 int node_ref;
4674 lto_symtab_encoder_t encoder;
4675 struct ipa_node_params *info = IPA_NODE_REF (node);
4676 int j;
4677 struct cgraph_edge *e;
4678 struct bitpack_d bp;
4679
4680 encoder = ob->decl_state->symtab_node_encoder;
4681 node_ref = lto_symtab_encoder_encode (encoder, node);
4682 streamer_write_uhwi (ob, node_ref);
4683
4684 streamer_write_uhwi (ob, ipa_get_param_count (info));
4685 for (j = 0; j < ipa_get_param_count (info); j++)
4686 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4687 bp = bitpack_create (ob->main_stream);
4688 gcc_assert (info->analysis_done
4689 || ipa_get_param_count (info) == 0);
4690 gcc_assert (!info->node_enqueued);
4691 gcc_assert (!info->ipcp_orig_node);
4692 for (j = 0; j < ipa_get_param_count (info); j++)
4693 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4694 streamer_write_bitpack (&bp);
4695 for (j = 0; j < ipa_get_param_count (info); j++)
4696 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4697 for (e = node->callees; e; e = e->next_callee)
4698 {
4699 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4700
4701 streamer_write_uhwi (ob,
4702 ipa_get_cs_argument_count (args) * 2
4703 + (args->polymorphic_call_contexts != NULL));
4704 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4705 {
4706 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4707 if (args->polymorphic_call_contexts != NULL)
4708 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4709 }
4710 }
4711 for (e = node->indirect_calls; e; e = e->next_callee)
4712 {
4713 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4714
4715 streamer_write_uhwi (ob,
4716 ipa_get_cs_argument_count (args) * 2
4717 + (args->polymorphic_call_contexts != NULL));
4718 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4719 {
4720 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4721 if (args->polymorphic_call_contexts != NULL)
4722 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4723 }
4724 ipa_write_indirect_edge_info (ob, e);
4725 }
4726 }
4727
4728 /* Stream in NODE info from IB. */
4729
4730 static void
4731 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4732 struct data_in *data_in)
4733 {
4734 struct ipa_node_params *info = IPA_NODE_REF (node);
4735 int k;
4736 struct cgraph_edge *e;
4737 struct bitpack_d bp;
4738
4739 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4740
4741 for (k = 0; k < ipa_get_param_count (info); k++)
4742 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4743
4744 bp = streamer_read_bitpack (ib);
4745 if (ipa_get_param_count (info) != 0)
4746 info->analysis_done = true;
4747 info->node_enqueued = false;
4748 for (k = 0; k < ipa_get_param_count (info); k++)
4749 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4750 for (k = 0; k < ipa_get_param_count (info); k++)
4751 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4752 for (e = node->callees; e; e = e->next_callee)
4753 {
4754 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4755 int count = streamer_read_uhwi (ib);
4756 bool contexts_computed = count & 1;
4757 count /= 2;
4758
4759 if (!count)
4760 continue;
4761 vec_safe_grow_cleared (args->jump_functions, count);
4762 if (contexts_computed)
4763 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4764
4765 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4766 {
4767 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4768 data_in);
4769 if (contexts_computed)
4770 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4771 }
4772 }
4773 for (e = node->indirect_calls; e; e = e->next_callee)
4774 {
4775 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4776 int count = streamer_read_uhwi (ib);
4777 bool contexts_computed = count & 1;
4778 count /= 2;
4779
4780 if (count)
4781 {
4782 vec_safe_grow_cleared (args->jump_functions, count);
4783 if (contexts_computed)
4784 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4785 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4786 {
4787 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4788 data_in);
4789 if (contexts_computed)
4790 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4791 }
4792 }
4793 ipa_read_indirect_edge_info (ib, data_in, e);
4794 }
4795 }
4796
4797 /* Write jump functions for nodes in SET. */
4798
4799 void
4800 ipa_prop_write_jump_functions (void)
4801 {
4802 struct cgraph_node *node;
4803 struct output_block *ob;
4804 unsigned int count = 0;
4805 lto_symtab_encoder_iterator lsei;
4806 lto_symtab_encoder_t encoder;
4807
4808 if (!ipa_node_params_sum)
4809 return;
4810
4811 ob = create_output_block (LTO_section_jump_functions);
4812 encoder = ob->decl_state->symtab_node_encoder;
4813 ob->symbol = NULL;
4814 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4815 lsei_next_function_in_partition (&lsei))
4816 {
4817 node = lsei_cgraph_node (lsei);
4818 if (node->has_gimple_body_p ()
4819 && IPA_NODE_REF (node) != NULL)
4820 count++;
4821 }
4822
4823 streamer_write_uhwi (ob, count);
4824
4825 /* Process all of the functions. */
4826 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4827 lsei_next_function_in_partition (&lsei))
4828 {
4829 node = lsei_cgraph_node (lsei);
4830 if (node->has_gimple_body_p ()
4831 && IPA_NODE_REF (node) != NULL)
4832 ipa_write_node_info (ob, node);
4833 }
4834 streamer_write_char_stream (ob->main_stream, 0);
4835 produce_asm (ob, NULL);
4836 destroy_output_block (ob);
4837 }
4838
4839 /* Read section in file FILE_DATA of length LEN with data DATA. */
4840
4841 static void
4842 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4843 size_t len)
4844 {
4845 const struct lto_function_header *header =
4846 (const struct lto_function_header *) data;
4847 const int cfg_offset = sizeof (struct lto_function_header);
4848 const int main_offset = cfg_offset + header->cfg_size;
4849 const int string_offset = main_offset + header->main_size;
4850 struct data_in *data_in;
4851 unsigned int i;
4852 unsigned int count;
4853
4854 lto_input_block ib_main ((const char *) data + main_offset,
4855 header->main_size, file_data->mode_table);
4856
4857 data_in =
4858 lto_data_in_create (file_data, (const char *) data + string_offset,
4859 header->string_size, vNULL);
4860 count = streamer_read_uhwi (&ib_main);
4861
4862 for (i = 0; i < count; i++)
4863 {
4864 unsigned int index;
4865 struct cgraph_node *node;
4866 lto_symtab_encoder_t encoder;
4867
4868 index = streamer_read_uhwi (&ib_main);
4869 encoder = file_data->symtab_node_encoder;
4870 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4871 index));
4872 gcc_assert (node->definition);
4873 ipa_read_node_info (&ib_main, node, data_in);
4874 }
4875 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4876 len);
4877 lto_data_in_delete (data_in);
4878 }
4879
4880 /* Read ipcp jump functions. */
4881
4882 void
4883 ipa_prop_read_jump_functions (void)
4884 {
4885 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4886 struct lto_file_decl_data *file_data;
4887 unsigned int j = 0;
4888
4889 ipa_check_create_node_params ();
4890 ipa_check_create_edge_args ();
4891 ipa_register_cgraph_hooks ();
4892
4893 while ((file_data = file_data_vec[j++]))
4894 {
4895 size_t len;
4896 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4897
4898 if (data)
4899 ipa_prop_read_section (file_data, data, len);
4900 }
4901 }
4902
4903 /* After merging units, we can get mismatch in argument counts.
4904 Also decl merging might've rendered parameter lists obsolete.
4905 Also compute called_with_variable_arg info. */
4906
4907 void
4908 ipa_update_after_lto_read (void)
4909 {
4910 ipa_check_create_node_params ();
4911 ipa_check_create_edge_args ();
4912 }
4913
4914 void
4915 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4916 {
4917 int node_ref;
4918 unsigned int count = 0;
4919 lto_symtab_encoder_t encoder;
4920 struct ipa_agg_replacement_value *aggvals, *av;
4921
4922 aggvals = ipa_get_agg_replacements_for_node (node);
4923 encoder = ob->decl_state->symtab_node_encoder;
4924 node_ref = lto_symtab_encoder_encode (encoder, node);
4925 streamer_write_uhwi (ob, node_ref);
4926
4927 for (av = aggvals; av; av = av->next)
4928 count++;
4929 streamer_write_uhwi (ob, count);
4930
4931 for (av = aggvals; av; av = av->next)
4932 {
4933 struct bitpack_d bp;
4934
4935 streamer_write_uhwi (ob, av->offset);
4936 streamer_write_uhwi (ob, av->index);
4937 stream_write_tree (ob, av->value, true);
4938
4939 bp = bitpack_create (ob->main_stream);
4940 bp_pack_value (&bp, av->by_ref, 1);
4941 streamer_write_bitpack (&bp);
4942 }
4943
4944 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4945 if (ts && vec_safe_length (ts->alignments) > 0)
4946 {
4947 count = ts->alignments->length ();
4948
4949 streamer_write_uhwi (ob, count);
4950 for (unsigned i = 0; i < count; ++i)
4951 {
4952 ipa_alignment *parm_al = &(*ts->alignments)[i];
4953
4954 struct bitpack_d bp;
4955 bp = bitpack_create (ob->main_stream);
4956 bp_pack_value (&bp, parm_al->known, 1);
4957 streamer_write_bitpack (&bp);
4958 if (parm_al->known)
4959 {
4960 streamer_write_uhwi (ob, parm_al->align);
4961 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4962 parm_al->misalign);
4963 }
4964 }
4965 }
4966 else
4967 streamer_write_uhwi (ob, 0);
4968 }
4969
4970 /* Stream in the aggregate value replacement chain for NODE from IB. */
4971
4972 static void
4973 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4974 data_in *data_in)
4975 {
4976 struct ipa_agg_replacement_value *aggvals = NULL;
4977 unsigned int count, i;
4978
4979 count = streamer_read_uhwi (ib);
4980 for (i = 0; i <count; i++)
4981 {
4982 struct ipa_agg_replacement_value *av;
4983 struct bitpack_d bp;
4984
4985 av = ggc_alloc<ipa_agg_replacement_value> ();
4986 av->offset = streamer_read_uhwi (ib);
4987 av->index = streamer_read_uhwi (ib);
4988 av->value = stream_read_tree (ib, data_in);
4989 bp = streamer_read_bitpack (ib);
4990 av->by_ref = bp_unpack_value (&bp, 1);
4991 av->next = aggvals;
4992 aggvals = av;
4993 }
4994 ipa_set_node_agg_value_chain (node, aggvals);
4995
4996 count = streamer_read_uhwi (ib);
4997 if (count > 0)
4998 {
4999 ipcp_grow_transformations_if_necessary ();
5000
5001 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5002 vec_safe_grow_cleared (ts->alignments, count);
5003
5004 for (i = 0; i < count; i++)
5005 {
5006 ipa_alignment *parm_al;
5007 parm_al = &(*ts->alignments)[i];
5008 struct bitpack_d bp;
5009 bp = streamer_read_bitpack (ib);
5010 parm_al->known = bp_unpack_value (&bp, 1);
5011 if (parm_al->known)
5012 {
5013 parm_al->align = streamer_read_uhwi (ib);
5014 parm_al->misalign
5015 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5016 0, parm_al->align);
5017 }
5018 }
5019 }
5020 }
5021
5022 /* Write all aggregate replacement for nodes in set. */
5023
5024 void
5025 ipcp_write_transformation_summaries (void)
5026 {
5027 struct cgraph_node *node;
5028 struct output_block *ob;
5029 unsigned int count = 0;
5030 lto_symtab_encoder_iterator lsei;
5031 lto_symtab_encoder_t encoder;
5032
5033 ob = create_output_block (LTO_section_ipcp_transform);
5034 encoder = ob->decl_state->symtab_node_encoder;
5035 ob->symbol = NULL;
5036 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5037 lsei_next_function_in_partition (&lsei))
5038 {
5039 node = lsei_cgraph_node (lsei);
5040 if (node->has_gimple_body_p ())
5041 count++;
5042 }
5043
5044 streamer_write_uhwi (ob, count);
5045
5046 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5047 lsei_next_function_in_partition (&lsei))
5048 {
5049 node = lsei_cgraph_node (lsei);
5050 if (node->has_gimple_body_p ())
5051 write_ipcp_transformation_info (ob, node);
5052 }
5053 streamer_write_char_stream (ob->main_stream, 0);
5054 produce_asm (ob, NULL);
5055 destroy_output_block (ob);
5056 }
5057
5058 /* Read replacements section in file FILE_DATA of length LEN with data
5059 DATA. */
5060
5061 static void
5062 read_replacements_section (struct lto_file_decl_data *file_data,
5063 const char *data,
5064 size_t len)
5065 {
5066 const struct lto_function_header *header =
5067 (const struct lto_function_header *) data;
5068 const int cfg_offset = sizeof (struct lto_function_header);
5069 const int main_offset = cfg_offset + header->cfg_size;
5070 const int string_offset = main_offset + header->main_size;
5071 struct data_in *data_in;
5072 unsigned int i;
5073 unsigned int count;
5074
5075 lto_input_block ib_main ((const char *) data + main_offset,
5076 header->main_size, file_data->mode_table);
5077
5078 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5079 header->string_size, vNULL);
5080 count = streamer_read_uhwi (&ib_main);
5081
5082 for (i = 0; i < count; i++)
5083 {
5084 unsigned int index;
5085 struct cgraph_node *node;
5086 lto_symtab_encoder_t encoder;
5087
5088 index = streamer_read_uhwi (&ib_main);
5089 encoder = file_data->symtab_node_encoder;
5090 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5091 index));
5092 gcc_assert (node->definition);
5093 read_ipcp_transformation_info (&ib_main, node, data_in);
5094 }
5095 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5096 len);
5097 lto_data_in_delete (data_in);
5098 }
5099
5100 /* Read IPA-CP aggregate replacements. */
5101
5102 void
5103 ipcp_read_transformation_summaries (void)
5104 {
5105 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5106 struct lto_file_decl_data *file_data;
5107 unsigned int j = 0;
5108
5109 while ((file_data = file_data_vec[j++]))
5110 {
5111 size_t len;
5112 const char *data = lto_get_section_data (file_data,
5113 LTO_section_ipcp_transform,
5114 NULL, &len);
5115 if (data)
5116 read_replacements_section (file_data, data, len);
5117 }
5118 }
5119
5120 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5121 NODE. */
5122
5123 static void
5124 adjust_agg_replacement_values (struct cgraph_node *node,
5125 struct ipa_agg_replacement_value *aggval)
5126 {
5127 struct ipa_agg_replacement_value *v;
5128 int i, c = 0, d = 0, *adj;
5129
5130 if (!node->clone.combined_args_to_skip)
5131 return;
5132
5133 for (v = aggval; v; v = v->next)
5134 {
5135 gcc_assert (v->index >= 0);
5136 if (c < v->index)
5137 c = v->index;
5138 }
5139 c++;
5140
5141 adj = XALLOCAVEC (int, c);
5142 for (i = 0; i < c; i++)
5143 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5144 {
5145 adj[i] = -1;
5146 d++;
5147 }
5148 else
5149 adj[i] = i - d;
5150
5151 for (v = aggval; v; v = v->next)
5152 v->index = adj[v->index];
5153 }
5154
5155 /* Dominator walker driving the ipcp modification phase. */
5156
5157 class ipcp_modif_dom_walker : public dom_walker
5158 {
5159 public:
5160 ipcp_modif_dom_walker (struct func_body_info *fbi,
5161 vec<ipa_param_descriptor> descs,
5162 struct ipa_agg_replacement_value *av,
5163 bool *sc, bool *cc)
5164 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5165 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5166
5167 virtual void before_dom_children (basic_block);
5168
5169 private:
5170 struct func_body_info *m_fbi;
5171 vec<ipa_param_descriptor> m_descriptors;
5172 struct ipa_agg_replacement_value *m_aggval;
5173 bool *m_something_changed, *m_cfg_changed;
5174 };
5175
5176 void
5177 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5178 {
5179 gimple_stmt_iterator gsi;
5180 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5181 {
5182 struct ipa_agg_replacement_value *v;
5183 gimple stmt = gsi_stmt (gsi);
5184 tree rhs, val, t;
5185 HOST_WIDE_INT offset, size;
5186 int index;
5187 bool by_ref, vce;
5188
5189 if (!gimple_assign_load_p (stmt))
5190 continue;
5191 rhs = gimple_assign_rhs1 (stmt);
5192 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5193 continue;
5194
5195 vce = false;
5196 t = rhs;
5197 while (handled_component_p (t))
5198 {
5199 /* V_C_E can do things like convert an array of integers to one
5200 bigger integer and similar things we do not handle below. */
5201 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5202 {
5203 vce = true;
5204 break;
5205 }
5206 t = TREE_OPERAND (t, 0);
5207 }
5208 if (vce)
5209 continue;
5210
5211 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5212 &offset, &size, &by_ref))
5213 continue;
5214 for (v = m_aggval; v; v = v->next)
5215 if (v->index == index
5216 && v->offset == offset)
5217 break;
5218 if (!v
5219 || v->by_ref != by_ref
5220 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5221 continue;
5222
5223 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5224 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5225 {
5226 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5227 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5228 else if (TYPE_SIZE (TREE_TYPE (rhs))
5229 == TYPE_SIZE (TREE_TYPE (v->value)))
5230 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5231 else
5232 {
5233 if (dump_file)
5234 {
5235 fprintf (dump_file, " const ");
5236 print_generic_expr (dump_file, v->value, 0);
5237 fprintf (dump_file, " can't be converted to type of ");
5238 print_generic_expr (dump_file, rhs, 0);
5239 fprintf (dump_file, "\n");
5240 }
5241 continue;
5242 }
5243 }
5244 else
5245 val = v->value;
5246
5247 if (dump_file && (dump_flags & TDF_DETAILS))
5248 {
5249 fprintf (dump_file, "Modifying stmt:\n ");
5250 print_gimple_stmt (dump_file, stmt, 0, 0);
5251 }
5252 gimple_assign_set_rhs_from_tree (&gsi, val);
5253 update_stmt (stmt);
5254
5255 if (dump_file && (dump_flags & TDF_DETAILS))
5256 {
5257 fprintf (dump_file, "into:\n ");
5258 print_gimple_stmt (dump_file, stmt, 0, 0);
5259 fprintf (dump_file, "\n");
5260 }
5261
5262 *m_something_changed = true;
5263 if (maybe_clean_eh_stmt (stmt)
5264 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5265 *m_cfg_changed = true;
5266 }
5267
5268 }
5269
5270 /* Update alignment of formal parameters as described in
5271 ipcp_transformation_summary. */
5272
5273 static void
5274 ipcp_update_alignments (struct cgraph_node *node)
5275 {
5276 tree fndecl = node->decl;
5277 tree parm = DECL_ARGUMENTS (fndecl);
5278 tree next_parm = parm;
5279 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5280 if (!ts || vec_safe_length (ts->alignments) == 0)
5281 return;
5282 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5283 unsigned count = alignments.length ();
5284
5285 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5286 {
5287 if (node->clone.combined_args_to_skip
5288 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5289 continue;
5290 gcc_checking_assert (parm);
5291 next_parm = DECL_CHAIN (parm);
5292
5293 if (!alignments[i].known || !is_gimple_reg (parm))
5294 continue;
5295 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5296 if (!ddef)
5297 continue;
5298
5299 if (dump_file)
5300 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5301 "misalignment to %u\n", i, alignments[i].align,
5302 alignments[i].misalign);
5303
5304 struct ptr_info_def *pi = get_ptr_info (ddef);
5305 gcc_checking_assert (pi);
5306 unsigned old_align;
5307 unsigned old_misalign;
5308 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5309
5310 if (old_known
5311 && old_align >= alignments[i].align)
5312 {
5313 if (dump_file)
5314 fprintf (dump_file, " But the alignment was already %u.\n",
5315 old_align);
5316 continue;
5317 }
5318 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5319 }
5320 }
5321
5322 /* IPCP transformation phase doing propagation of aggregate values. */
5323
5324 unsigned int
5325 ipcp_transform_function (struct cgraph_node *node)
5326 {
5327 vec<ipa_param_descriptor> descriptors = vNULL;
5328 struct func_body_info fbi;
5329 struct ipa_agg_replacement_value *aggval;
5330 int param_count;
5331 bool cfg_changed = false, something_changed = false;
5332
5333 gcc_checking_assert (cfun);
5334 gcc_checking_assert (current_function_decl);
5335
5336 if (dump_file)
5337 fprintf (dump_file, "Modification phase of node %s/%i\n",
5338 node->name (), node->order);
5339
5340 ipcp_update_alignments (node);
5341 aggval = ipa_get_agg_replacements_for_node (node);
5342 if (!aggval)
5343 return 0;
5344 param_count = count_formal_params (node->decl);
5345 if (param_count == 0)
5346 return 0;
5347 adjust_agg_replacement_values (node, aggval);
5348 if (dump_file)
5349 ipa_dump_agg_replacement_values (dump_file, aggval);
5350
5351 fbi.node = node;
5352 fbi.info = NULL;
5353 fbi.bb_infos = vNULL;
5354 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5355 fbi.param_count = param_count;
5356 fbi.aa_walked = 0;
5357
5358 descriptors.safe_grow_cleared (param_count);
5359 ipa_populate_param_decls (node, descriptors);
5360 calculate_dominance_info (CDI_DOMINATORS);
5361 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5362 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5363
5364 int i;
5365 struct ipa_bb_info *bi;
5366 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5367 free_ipa_bb_info (bi);
5368 fbi.bb_infos.release ();
5369 free_dominance_info (CDI_DOMINATORS);
5370 (*ipcp_transformations)[node->uid].agg_values = NULL;
5371 (*ipcp_transformations)[node->uid].alignments = NULL;
5372 descriptors.release ();
5373
5374 if (!something_changed)
5375 return 0;
5376 else if (cfg_changed)
5377 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5378 else
5379 return TODO_update_ssa_only_virtuals;
5380 }