ggcplug.c: Shuffle includes to include gcc-plugin.h earlier.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "ipa-prop.h"
53 #include "bitmap.h"
54 #include "gimple-ssa.h"
55 #include "tree-cfg.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "tree-into-ssa.h"
59 #include "tree-dfa.h"
60 #include "tree-pass.h"
61 #include "tree-inline.h"
62 #include "ipa-inline.h"
63 #include "flags.h"
64 #include "diagnostic.h"
65 #include "gimple-pretty-print.h"
66 #include "lto-streamer.h"
67 #include "data-streamer.h"
68 #include "tree-streamer.h"
69 #include "params.h"
70 #include "ipa-utils.h"
71 #include "stringpool.h"
72 #include "tree-ssanames.h"
73 #include "dbgcnt.h"
74 #include "domwalk.h"
75 #include "builtins.h"
76 #include "calls.h"
77
78 /* Intermediate information that we get from alias analysis about a particular
79 parameter in a particular basic_block. When a parameter or the memory it
80 references is marked modified, we use that information in all dominatd
81 blocks without cosulting alias analysis oracle. */
82
83 struct param_aa_status
84 {
85 /* Set when this structure contains meaningful information. If not, the
86 structure describing a dominating BB should be used instead. */
87 bool valid;
88
89 /* Whether we have seen something which might have modified the data in
90 question. PARM is for the parameter itself, REF is for data it points to
91 but using the alias type of individual accesses and PT is the same thing
92 but for computing aggregate pass-through functions using a very inclusive
93 ao_ref. */
94 bool parm_modified, ref_modified, pt_modified;
95 };
96
97 /* Information related to a given BB that used only when looking at function
98 body. */
99
100 struct ipa_bb_info
101 {
102 /* Call graph edges going out of this BB. */
103 vec<cgraph_edge *> cg_edges;
104 /* Alias analysis statuses of each formal parameter at this bb. */
105 vec<param_aa_status> param_aa_statuses;
106 };
107
108 /* Structure with global information that is only used when looking at function
109 body. */
110
111 struct func_body_info
112 {
113 /* The node that is being analyzed. */
114 cgraph_node *node;
115
116 /* Its info. */
117 struct ipa_node_params *info;
118
119 /* Information about individual BBs. */
120 vec<ipa_bb_info> bb_infos;
121
122 /* Number of parameters. */
123 int param_count;
124
125 /* Number of statements already walked by when analyzing this function. */
126 unsigned int aa_walked;
127 };
128
129 /* Vector where the parameter infos are actually stored. */
130 vec<ipa_node_params> ipa_node_params_vector;
131 /* Vector of known aggregate values in cloned nodes. */
132 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
133 /* Vector where the parameter infos are actually stored. */
134 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
135
136 /* Holders of ipa cgraph hooks: */
137 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
138 static struct cgraph_node_hook_list *node_removal_hook_holder;
139 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
140 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
141 static struct cgraph_node_hook_list *function_insertion_hook_holder;
142
143 /* Description of a reference to an IPA constant. */
144 struct ipa_cst_ref_desc
145 {
146 /* Edge that corresponds to the statement which took the reference. */
147 struct cgraph_edge *cs;
148 /* Linked list of duplicates created when call graph edges are cloned. */
149 struct ipa_cst_ref_desc *next_duplicate;
150 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
151 if out of control. */
152 int refcount;
153 };
154
155 /* Allocation pool for reference descriptions. */
156
157 static alloc_pool ipa_refdesc_pool;
158
159 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
160 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
161
162 static bool
163 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
164 {
165 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
166 struct cl_optimization *os;
167
168 if (!fs_opts)
169 return false;
170 os = TREE_OPTIMIZATION (fs_opts);
171 return !os->x_optimize || !os->x_flag_ipa_cp;
172 }
173
174 /* Return index of the formal whose tree is PTREE in function which corresponds
175 to INFO. */
176
177 static int
178 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
179 {
180 int i, count;
181
182 count = descriptors.length ();
183 for (i = 0; i < count; i++)
184 if (descriptors[i].decl == ptree)
185 return i;
186
187 return -1;
188 }
189
190 /* Return index of the formal whose tree is PTREE in function which corresponds
191 to INFO. */
192
193 int
194 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
195 {
196 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
197 }
198
199 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
200 NODE. */
201
202 static void
203 ipa_populate_param_decls (struct cgraph_node *node,
204 vec<ipa_param_descriptor> &descriptors)
205 {
206 tree fndecl;
207 tree fnargs;
208 tree parm;
209 int param_num;
210
211 fndecl = node->decl;
212 gcc_assert (gimple_has_body_p (fndecl));
213 fnargs = DECL_ARGUMENTS (fndecl);
214 param_num = 0;
215 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
216 {
217 descriptors[param_num].decl = parm;
218 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
219 true);
220 param_num++;
221 }
222 }
223
224 /* Return how many formal parameters FNDECL has. */
225
226 int
227 count_formal_params (tree fndecl)
228 {
229 tree parm;
230 int count = 0;
231 gcc_assert (gimple_has_body_p (fndecl));
232
233 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
234 count++;
235
236 return count;
237 }
238
239 /* Return the declaration of Ith formal parameter of the function corresponding
240 to INFO. Note there is no setter function as this array is built just once
241 using ipa_initialize_node_params. */
242
243 void
244 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
245 {
246 fprintf (file, "param #%i", i);
247 if (info->descriptors[i].decl)
248 {
249 fprintf (file, " ");
250 print_generic_expr (file, info->descriptors[i].decl, 0);
251 }
252 }
253
254 /* Initialize the ipa_node_params structure associated with NODE
255 to hold PARAM_COUNT parameters. */
256
257 void
258 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
259 {
260 struct ipa_node_params *info = IPA_NODE_REF (node);
261
262 if (!info->descriptors.exists () && param_count)
263 info->descriptors.safe_grow_cleared (param_count);
264 }
265
266 /* Initialize the ipa_node_params structure associated with NODE by counting
267 the function parameters, creating the descriptors and populating their
268 param_decls. */
269
270 void
271 ipa_initialize_node_params (struct cgraph_node *node)
272 {
273 struct ipa_node_params *info = IPA_NODE_REF (node);
274
275 if (!info->descriptors.exists ())
276 {
277 ipa_alloc_node_params (node, count_formal_params (node->decl));
278 ipa_populate_param_decls (node, info->descriptors);
279 }
280 }
281
282 /* Print the jump functions associated with call graph edge CS to file F. */
283
284 static void
285 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
286 {
287 int i, count;
288
289 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
290 for (i = 0; i < count; i++)
291 {
292 struct ipa_jump_func *jump_func;
293 enum jump_func_type type;
294
295 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
296 type = jump_func->type;
297
298 fprintf (f, " param %d: ", i);
299 if (type == IPA_JF_UNKNOWN)
300 fprintf (f, "UNKNOWN\n");
301 else if (type == IPA_JF_KNOWN_TYPE)
302 {
303 fprintf (f, "KNOWN TYPE: base ");
304 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
305 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
306 jump_func->value.known_type.offset);
307 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
308 fprintf (f, "\n");
309 }
310 else if (type == IPA_JF_CONST)
311 {
312 tree val = jump_func->value.constant.value;
313 fprintf (f, "CONST: ");
314 print_generic_expr (f, val, 0);
315 if (TREE_CODE (val) == ADDR_EXPR
316 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
317 {
318 fprintf (f, " -> ");
319 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
320 0);
321 }
322 fprintf (f, "\n");
323 }
324 else if (type == IPA_JF_PASS_THROUGH)
325 {
326 fprintf (f, "PASS THROUGH: ");
327 fprintf (f, "%d, op %s",
328 jump_func->value.pass_through.formal_id,
329 get_tree_code_name(jump_func->value.pass_through.operation));
330 if (jump_func->value.pass_through.operation != NOP_EXPR)
331 {
332 fprintf (f, " ");
333 print_generic_expr (f,
334 jump_func->value.pass_through.operand, 0);
335 }
336 if (jump_func->value.pass_through.agg_preserved)
337 fprintf (f, ", agg_preserved");
338 if (jump_func->value.pass_through.type_preserved)
339 fprintf (f, ", type_preserved");
340 fprintf (f, "\n");
341 }
342 else if (type == IPA_JF_ANCESTOR)
343 {
344 fprintf (f, "ANCESTOR: ");
345 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
346 jump_func->value.ancestor.formal_id,
347 jump_func->value.ancestor.offset);
348 print_generic_expr (f, jump_func->value.ancestor.type, 0);
349 if (jump_func->value.ancestor.agg_preserved)
350 fprintf (f, ", agg_preserved");
351 if (jump_func->value.ancestor.type_preserved)
352 fprintf (f, ", type_preserved");
353 fprintf (f, "\n");
354 }
355
356 if (jump_func->agg.items)
357 {
358 struct ipa_agg_jf_item *item;
359 int j;
360
361 fprintf (f, " Aggregate passed by %s:\n",
362 jump_func->agg.by_ref ? "reference" : "value");
363 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
364 {
365 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
366 item->offset);
367 if (TYPE_P (item->value))
368 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
369 tree_to_uhwi (TYPE_SIZE (item->value)));
370 else
371 {
372 fprintf (f, "cst: ");
373 print_generic_expr (f, item->value, 0);
374 }
375 fprintf (f, "\n");
376 }
377 }
378 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
379 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
380 }
381 }
382
383
384 /* Print the jump functions of all arguments on all call graph edges going from
385 NODE to file F. */
386
387 void
388 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
389 {
390 struct cgraph_edge *cs;
391
392 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
393 node->order);
394 for (cs = node->callees; cs; cs = cs->next_callee)
395 {
396 if (!ipa_edge_args_info_available_for_edge_p (cs))
397 continue;
398
399 fprintf (f, " callsite %s/%i -> %s/%i : \n",
400 xstrdup (node->name ()), node->order,
401 xstrdup (cs->callee->name ()),
402 cs->callee->order);
403 ipa_print_node_jump_functions_for_edge (f, cs);
404 }
405
406 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
407 {
408 struct cgraph_indirect_call_info *ii;
409 if (!ipa_edge_args_info_available_for_edge_p (cs))
410 continue;
411
412 ii = cs->indirect_info;
413 if (ii->agg_contents)
414 fprintf (f, " indirect %s callsite, calling param %i, "
415 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
416 ii->member_ptr ? "member ptr" : "aggregate",
417 ii->param_index, ii->offset,
418 ii->by_ref ? "by reference" : "by_value");
419 else
420 fprintf (f, " indirect %s callsite, calling param %i, "
421 "offset " HOST_WIDE_INT_PRINT_DEC,
422 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
423 ii->offset);
424
425 if (cs->call_stmt)
426 {
427 fprintf (f, ", for stmt ");
428 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
429 }
430 else
431 fprintf (f, "\n");
432 if (ii->polymorphic)
433 ii->context.dump (f);
434 ipa_print_node_jump_functions_for_edge (f, cs);
435 }
436 }
437
438 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
439
440 void
441 ipa_print_all_jump_functions (FILE *f)
442 {
443 struct cgraph_node *node;
444
445 fprintf (f, "\nJump functions:\n");
446 FOR_EACH_FUNCTION (node)
447 {
448 ipa_print_node_jump_functions (f, node);
449 }
450 }
451
452 /* Set JFUNC to be a known type jump function. */
453
454 static void
455 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
456 tree base_type, tree component_type)
457 {
458 /* Recording and propagating main variants increases change that types
459 will match. */
460 base_type = TYPE_MAIN_VARIANT (base_type);
461 component_type = TYPE_MAIN_VARIANT (component_type);
462
463 gcc_assert (contains_polymorphic_type_p (base_type)
464 && contains_polymorphic_type_p (component_type));
465 if (!flag_devirtualize)
466 return;
467 jfunc->type = IPA_JF_KNOWN_TYPE;
468 jfunc->value.known_type.offset = offset,
469 jfunc->value.known_type.base_type = base_type;
470 jfunc->value.known_type.component_type = component_type;
471 gcc_assert (component_type);
472 }
473
474 /* Set JFUNC to be a copy of another jmp (to be used by jump function
475 combination code). The two functions will share their rdesc. */
476
477 static void
478 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
479 struct ipa_jump_func *src)
480
481 {
482 gcc_checking_assert (src->type == IPA_JF_CONST);
483 dst->type = IPA_JF_CONST;
484 dst->value.constant = src->value.constant;
485 }
486
487 /* Set JFUNC to be a constant jmp function. */
488
489 static void
490 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
491 struct cgraph_edge *cs)
492 {
493 constant = unshare_expr (constant);
494 if (constant && EXPR_P (constant))
495 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
496 jfunc->type = IPA_JF_CONST;
497 jfunc->value.constant.value = unshare_expr_without_location (constant);
498
499 if (TREE_CODE (constant) == ADDR_EXPR
500 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
501 {
502 struct ipa_cst_ref_desc *rdesc;
503 if (!ipa_refdesc_pool)
504 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
505 sizeof (struct ipa_cst_ref_desc), 32);
506
507 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
508 rdesc->cs = cs;
509 rdesc->next_duplicate = NULL;
510 rdesc->refcount = 1;
511 jfunc->value.constant.rdesc = rdesc;
512 }
513 else
514 jfunc->value.constant.rdesc = NULL;
515 }
516
517 /* Set JFUNC to be a simple pass-through jump function. */
518 static void
519 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
520 bool agg_preserved, bool type_preserved)
521 {
522 jfunc->type = IPA_JF_PASS_THROUGH;
523 jfunc->value.pass_through.operand = NULL_TREE;
524 jfunc->value.pass_through.formal_id = formal_id;
525 jfunc->value.pass_through.operation = NOP_EXPR;
526 jfunc->value.pass_through.agg_preserved = agg_preserved;
527 jfunc->value.pass_through.type_preserved = type_preserved;
528 }
529
530 /* Set JFUNC to be an arithmetic pass through jump function. */
531
532 static void
533 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
534 tree operand, enum tree_code operation)
535 {
536 jfunc->type = IPA_JF_PASS_THROUGH;
537 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
538 jfunc->value.pass_through.formal_id = formal_id;
539 jfunc->value.pass_through.operation = operation;
540 jfunc->value.pass_through.agg_preserved = false;
541 jfunc->value.pass_through.type_preserved = false;
542 }
543
544 /* Set JFUNC to be an ancestor jump function. */
545
546 static void
547 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
548 tree type, int formal_id, bool agg_preserved,
549 bool type_preserved)
550 {
551 if (!flag_devirtualize)
552 type_preserved = false;
553 if (!type_preserved)
554 type = NULL_TREE;
555 if (type)
556 type = TYPE_MAIN_VARIANT (type);
557 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
558 jfunc->type = IPA_JF_ANCESTOR;
559 jfunc->value.ancestor.formal_id = formal_id;
560 jfunc->value.ancestor.offset = offset;
561 jfunc->value.ancestor.type = type_preserved ? type : NULL;
562 jfunc->value.ancestor.agg_preserved = agg_preserved;
563 jfunc->value.ancestor.type_preserved = type_preserved;
564 }
565
566 /* Extract the acual BINFO being described by JFUNC which must be a known type
567 jump function. */
568
569 tree
570 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
571 {
572 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
573 return NULL_TREE;
574
575 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
576
577 if (!base_binfo)
578 return NULL_TREE;
579 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
580 we have no ODR equivalency on those. This should be fixed by
581 propagating on types rather than binfos that would make type
582 matching here unnecesary. */
583 if (in_lto_p
584 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
585 || !TYPE_BINFO (jfunc->value.known_type.component_type)
586 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
587 {
588 if (!jfunc->value.known_type.offset)
589 return base_binfo;
590 return NULL;
591 }
592 return get_binfo_at_offset (base_binfo,
593 jfunc->value.known_type.offset,
594 jfunc->value.known_type.component_type);
595 }
596
597 /* Get IPA BB information about the given BB. FBI is the context of analyzis
598 of this function body. */
599
600 static struct ipa_bb_info *
601 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
602 {
603 gcc_checking_assert (fbi);
604 return &fbi->bb_infos[bb->index];
605 }
606
607 /* Structure to be passed in between detect_type_change and
608 check_stmt_for_type_change. */
609
610 struct prop_type_change_info
611 {
612 /* Offset into the object where there is the virtual method pointer we are
613 looking for. */
614 HOST_WIDE_INT offset;
615 /* The declaration or SSA_NAME pointer of the base that we are checking for
616 type change. */
617 tree object;
618 /* If we actually can tell the type that the object has changed to, it is
619 stored in this field. Otherwise it remains NULL_TREE. */
620 tree known_current_type;
621 /* Set to true if dynamic type change has been detected. */
622 bool type_maybe_changed;
623 /* Set to true if multiple types have been encountered. known_current_type
624 must be disregarded in that case. */
625 bool multiple_types_encountered;
626 };
627
628 /* Return true if STMT can modify a virtual method table pointer.
629
630 This function makes special assumptions about both constructors and
631 destructors which are all the functions that are allowed to alter the VMT
632 pointers. It assumes that destructors begin with assignment into all VMT
633 pointers and that constructors essentially look in the following way:
634
635 1) The very first thing they do is that they call constructors of ancestor
636 sub-objects that have them.
637
638 2) Then VMT pointers of this and all its ancestors is set to new values
639 corresponding to the type corresponding to the constructor.
640
641 3) Only afterwards, other stuff such as constructor of member sub-objects
642 and the code written by the user is run. Only this may include calling
643 virtual functions, directly or indirectly.
644
645 There is no way to call a constructor of an ancestor sub-object in any
646 other way.
647
648 This means that we do not have to care whether constructors get the correct
649 type information because they will always change it (in fact, if we define
650 the type to be given by the VMT pointer, it is undefined).
651
652 The most important fact to derive from the above is that if, for some
653 statement in the section 3, we try to detect whether the dynamic type has
654 changed, we can safely ignore all calls as we examine the function body
655 backwards until we reach statements in section 2 because these calls cannot
656 be ancestor constructors or destructors (if the input is not bogus) and so
657 do not change the dynamic type (this holds true only for automatically
658 allocated objects but at the moment we devirtualize only these). We then
659 must detect that statements in section 2 change the dynamic type and can try
660 to derive the new type. That is enough and we can stop, we will never see
661 the calls into constructors of sub-objects in this code. Therefore we can
662 safely ignore all call statements that we traverse.
663 */
664
665 static bool
666 stmt_may_be_vtbl_ptr_store (gimple stmt)
667 {
668 if (is_gimple_call (stmt))
669 return false;
670 if (gimple_clobber_p (stmt))
671 return false;
672 else if (is_gimple_assign (stmt))
673 {
674 tree lhs = gimple_assign_lhs (stmt);
675
676 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
677 {
678 if (flag_strict_aliasing
679 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
680 return false;
681
682 if (TREE_CODE (lhs) == COMPONENT_REF
683 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
684 return false;
685 /* In the future we might want to use get_base_ref_and_offset to find
686 if there is a field corresponding to the offset and if so, proceed
687 almost like if it was a component ref. */
688 }
689 }
690 return true;
691 }
692
693 /* If STMT can be proved to be an assignment to the virtual method table
694 pointer of ANALYZED_OBJ and the type associated with the new table
695 identified, return the type. Otherwise return NULL_TREE. */
696
697 static tree
698 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
699 {
700 HOST_WIDE_INT offset, size, max_size;
701 tree lhs, rhs, base, binfo;
702
703 if (!gimple_assign_single_p (stmt))
704 return NULL_TREE;
705
706 lhs = gimple_assign_lhs (stmt);
707 rhs = gimple_assign_rhs1 (stmt);
708 if (TREE_CODE (lhs) != COMPONENT_REF
709 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
710 return NULL_TREE;
711
712 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
713 if (offset != tci->offset
714 || size != POINTER_SIZE
715 || max_size != POINTER_SIZE)
716 return NULL_TREE;
717 if (TREE_CODE (base) == MEM_REF)
718 {
719 if (TREE_CODE (tci->object) != MEM_REF
720 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
721 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
722 TREE_OPERAND (base, 1)))
723 return NULL_TREE;
724 }
725 else if (tci->object != base)
726 return NULL_TREE;
727
728 binfo = vtable_pointer_value_to_binfo (rhs);
729
730 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
731 base of outer type. In this case we would need to either
732 work on binfos or translate it back to outer type and offset.
733 KNOWN_TYPE jump functions are not ready for that, yet. */
734 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
735 return NULL;
736
737 return BINFO_TYPE (binfo);
738 }
739
740 /* Callback of walk_aliased_vdefs and a helper function for
741 detect_type_change to check whether a particular statement may modify
742 the virtual table pointer, and if possible also determine the new type of
743 the (sub-)object. It stores its result into DATA, which points to a
744 prop_type_change_info structure. */
745
746 static bool
747 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
748 {
749 gimple stmt = SSA_NAME_DEF_STMT (vdef);
750 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
751
752 if (stmt_may_be_vtbl_ptr_store (stmt))
753 {
754 tree type;
755
756 type = extr_type_from_vtbl_ptr_store (stmt, tci);
757 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
758 if (tci->type_maybe_changed
759 && type != tci->known_current_type)
760 tci->multiple_types_encountered = true;
761 tci->known_current_type = type;
762 tci->type_maybe_changed = true;
763 return true;
764 }
765 else
766 return false;
767 }
768
769 /* See if ARG is PARAM_DECl describing instance passed by pointer
770 or reference in FUNCTION. Return false if the dynamic type may change
771 in between beggining of the function until CALL is invoked.
772
773 Generally functions are not allowed to change type of such instances,
774 but they call destructors. We assume that methods can not destroy the THIS
775 pointer. Also as a special cases, constructor and destructors may change
776 type of the THIS pointer. */
777
778 static bool
779 param_type_may_change_p (tree function, tree arg, gimple call)
780 {
781 /* Pure functions can not do any changes on the dynamic type;
782 that require writting to memory. */
783 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
784 return false;
785 /* We need to check if we are within inlined consturctor
786 or destructor (ideally we would have way to check that the
787 inline cdtor is actually working on ARG, but we don't have
788 easy tie on this, so punt on all non-pure cdtors.
789 We may also record the types of cdtors and once we know type
790 of the instance match them.
791
792 Also code unification optimizations may merge calls from
793 different blocks making return values unreliable. So
794 do nothing during late optimization. */
795 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
796 return true;
797 if (TREE_CODE (arg) == SSA_NAME
798 && SSA_NAME_IS_DEFAULT_DEF (arg)
799 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
800 {
801 /* Normal (non-THIS) argument. */
802 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
803 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
804 /* THIS pointer of an method - here we we want to watch constructors
805 and destructors as those definitely may change the dynamic
806 type. */
807 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
808 && !DECL_CXX_CONSTRUCTOR_P (function)
809 && !DECL_CXX_DESTRUCTOR_P (function)
810 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
811 {
812 /* Walk the inline stack and watch out for ctors/dtors. */
813 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
814 block = BLOCK_SUPERCONTEXT (block))
815 if (BLOCK_ABSTRACT_ORIGIN (block)
816 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
817 {
818 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
819
820 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
821 continue;
822 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
823 && (DECL_CXX_CONSTRUCTOR_P (fn)
824 || DECL_CXX_DESTRUCTOR_P (fn)))
825 return true;
826 }
827 return false;
828 }
829 }
830 return true;
831 }
832
833 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
834 callsite CALL) by looking for assignments to its virtual table pointer. If
835 it is, return true and fill in the jump function JFUNC with relevant type
836 information or set it to unknown. ARG is the object itself (not a pointer
837 to it, unless dereferenced). BASE is the base of the memory access as
838 returned by get_ref_base_and_extent, as is the offset.
839
840 This is helper function for detect_type_change and detect_type_change_ssa
841 that does the heavy work which is usually unnecesary. */
842
843 static bool
844 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
845 gimple call, struct ipa_jump_func *jfunc,
846 HOST_WIDE_INT offset)
847 {
848 struct prop_type_change_info tci;
849 ao_ref ao;
850 bool entry_reached = false;
851
852 gcc_checking_assert (DECL_P (arg)
853 || TREE_CODE (arg) == MEM_REF
854 || handled_component_p (arg));
855
856 comp_type = TYPE_MAIN_VARIANT (comp_type);
857
858 /* Const calls cannot call virtual methods through VMT and so type changes do
859 not matter. */
860 if (!flag_devirtualize || !gimple_vuse (call)
861 /* Be sure expected_type is polymorphic. */
862 || !comp_type
863 || TREE_CODE (comp_type) != RECORD_TYPE
864 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
865 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
866 return true;
867
868 ao_ref_init (&ao, arg);
869 ao.base = base;
870 ao.offset = offset;
871 ao.size = POINTER_SIZE;
872 ao.max_size = ao.size;
873
874 tci.offset = offset;
875 tci.object = get_base_address (arg);
876 tci.known_current_type = NULL_TREE;
877 tci.type_maybe_changed = false;
878 tci.multiple_types_encountered = false;
879
880 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
881 &tci, NULL, &entry_reached);
882 if (!tci.type_maybe_changed)
883 return false;
884
885 if (!tci.known_current_type
886 || tci.multiple_types_encountered
887 || offset != 0
888 /* When the walk reached function entry, it means that type
889 is set along some paths but not along others. */
890 || entry_reached)
891 jfunc->type = IPA_JF_UNKNOWN;
892 else
893 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
894
895 return true;
896 }
897
898 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
899 If it is, return true and fill in the jump function JFUNC with relevant type
900 information or set it to unknown. ARG is the object itself (not a pointer
901 to it, unless dereferenced). BASE is the base of the memory access as
902 returned by get_ref_base_and_extent, as is the offset. */
903
904 static bool
905 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
906 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
907 {
908 if (!flag_devirtualize)
909 return false;
910
911 if (TREE_CODE (base) == MEM_REF
912 && !param_type_may_change_p (current_function_decl,
913 TREE_OPERAND (base, 0),
914 call))
915 return false;
916 return detect_type_change_from_memory_writes (arg, base, comp_type,
917 call, jfunc, offset);
918 }
919
920 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
921 SSA name (its dereference will become the base and the offset is assumed to
922 be zero). */
923
924 static bool
925 detect_type_change_ssa (tree arg, tree comp_type,
926 gimple call, struct ipa_jump_func *jfunc)
927 {
928 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
929 if (!flag_devirtualize
930 || !POINTER_TYPE_P (TREE_TYPE (arg)))
931 return false;
932
933 if (!param_type_may_change_p (current_function_decl, arg, call))
934 return false;
935
936 arg = build2 (MEM_REF, ptr_type_node, arg,
937 build_int_cst (ptr_type_node, 0));
938
939 return detect_type_change_from_memory_writes (arg, arg, comp_type,
940 call, jfunc, 0);
941 }
942
943 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
944 boolean variable pointed to by DATA. */
945
946 static bool
947 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
948 void *data)
949 {
950 bool *b = (bool *) data;
951 *b = true;
952 return true;
953 }
954
955 /* Return true if we have already walked so many statements in AA that we
956 should really just start giving up. */
957
958 static bool
959 aa_overwalked (struct func_body_info *fbi)
960 {
961 gcc_checking_assert (fbi);
962 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
963 }
964
965 /* Find the nearest valid aa status for parameter specified by INDEX that
966 dominates BB. */
967
968 static struct param_aa_status *
969 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
970 int index)
971 {
972 while (true)
973 {
974 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
975 if (!bb)
976 return NULL;
977 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
978 if (!bi->param_aa_statuses.is_empty ()
979 && bi->param_aa_statuses[index].valid)
980 return &bi->param_aa_statuses[index];
981 }
982 }
983
984 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
985 structures and/or intialize the result with a dominating description as
986 necessary. */
987
988 static struct param_aa_status *
989 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
990 int index)
991 {
992 gcc_checking_assert (fbi);
993 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
994 if (bi->param_aa_statuses.is_empty ())
995 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
996 struct param_aa_status *paa = &bi->param_aa_statuses[index];
997 if (!paa->valid)
998 {
999 gcc_checking_assert (!paa->parm_modified
1000 && !paa->ref_modified
1001 && !paa->pt_modified);
1002 struct param_aa_status *dom_paa;
1003 dom_paa = find_dominating_aa_status (fbi, bb, index);
1004 if (dom_paa)
1005 *paa = *dom_paa;
1006 else
1007 paa->valid = true;
1008 }
1009
1010 return paa;
1011 }
1012
1013 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1014 a value known not to be modified in this function before reaching the
1015 statement STMT. FBI holds information about the function we have so far
1016 gathered but do not survive the summary building stage. */
1017
1018 static bool
1019 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1020 gimple stmt, tree parm_load)
1021 {
1022 struct param_aa_status *paa;
1023 bool modified = false;
1024 ao_ref refd;
1025
1026 /* FIXME: FBI can be NULL if we are being called from outside
1027 ipa_node_analysis or ipcp_transform_function, which currently happens
1028 during inlining analysis. It would be great to extend fbi's lifetime and
1029 always have it. Currently, we are just not afraid of too much walking in
1030 that case. */
1031 if (fbi)
1032 {
1033 if (aa_overwalked (fbi))
1034 return false;
1035 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1036 if (paa->parm_modified)
1037 return false;
1038 }
1039 else
1040 paa = NULL;
1041
1042 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1043 ao_ref_init (&refd, parm_load);
1044 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1045 &modified, NULL);
1046 if (fbi)
1047 fbi->aa_walked += walked;
1048 if (paa && modified)
1049 paa->parm_modified = true;
1050 return !modified;
1051 }
1052
1053 /* If STMT is an assignment that loads a value from an parameter declaration,
1054 return the index of the parameter in ipa_node_params which has not been
1055 modified. Otherwise return -1. */
1056
1057 static int
1058 load_from_unmodified_param (struct func_body_info *fbi,
1059 vec<ipa_param_descriptor> descriptors,
1060 gimple stmt)
1061 {
1062 int index;
1063 tree op1;
1064
1065 if (!gimple_assign_single_p (stmt))
1066 return -1;
1067
1068 op1 = gimple_assign_rhs1 (stmt);
1069 if (TREE_CODE (op1) != PARM_DECL)
1070 return -1;
1071
1072 index = ipa_get_param_decl_index_1 (descriptors, op1);
1073 if (index < 0
1074 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1075 return -1;
1076
1077 return index;
1078 }
1079
1080 /* Return true if memory reference REF (which must be a load through parameter
1081 with INDEX) loads data that are known to be unmodified in this function
1082 before reaching statement STMT. */
1083
1084 static bool
1085 parm_ref_data_preserved_p (struct func_body_info *fbi,
1086 int index, gimple stmt, tree ref)
1087 {
1088 struct param_aa_status *paa;
1089 bool modified = false;
1090 ao_ref refd;
1091
1092 /* FIXME: FBI can be NULL if we are being called from outside
1093 ipa_node_analysis or ipcp_transform_function, which currently happens
1094 during inlining analysis. It would be great to extend fbi's lifetime and
1095 always have it. Currently, we are just not afraid of too much walking in
1096 that case. */
1097 if (fbi)
1098 {
1099 if (aa_overwalked (fbi))
1100 return false;
1101 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1102 if (paa->ref_modified)
1103 return false;
1104 }
1105 else
1106 paa = NULL;
1107
1108 gcc_checking_assert (gimple_vuse (stmt));
1109 ao_ref_init (&refd, ref);
1110 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1111 &modified, NULL);
1112 if (fbi)
1113 fbi->aa_walked += walked;
1114 if (paa && modified)
1115 paa->ref_modified = true;
1116 return !modified;
1117 }
1118
1119 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1120 is known to be unmodified in this function before reaching call statement
1121 CALL into which it is passed. FBI describes the function body. */
1122
1123 static bool
1124 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1125 gimple call, tree parm)
1126 {
1127 bool modified = false;
1128 ao_ref refd;
1129
1130 /* It's unnecessary to calculate anything about memory contnets for a const
1131 function because it is not goin to use it. But do not cache the result
1132 either. Also, no such calculations for non-pointers. */
1133 if (!gimple_vuse (call)
1134 || !POINTER_TYPE_P (TREE_TYPE (parm))
1135 || aa_overwalked (fbi))
1136 return false;
1137
1138 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1139 index);
1140 if (paa->pt_modified)
1141 return false;
1142
1143 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1144 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1145 &modified, NULL);
1146 fbi->aa_walked += walked;
1147 if (modified)
1148 paa->pt_modified = true;
1149 return !modified;
1150 }
1151
1152 /* Return true if we can prove that OP is a memory reference loading unmodified
1153 data from an aggregate passed as a parameter and if the aggregate is passed
1154 by reference, that the alias type of the load corresponds to the type of the
1155 formal parameter (so that we can rely on this type for TBAA in callers).
1156 INFO and PARMS_AINFO describe parameters of the current function (but the
1157 latter can be NULL), STMT is the load statement. If function returns true,
1158 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1159 within the aggregate and whether it is a load from a value passed by
1160 reference respectively. */
1161
1162 static bool
1163 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1164 vec<ipa_param_descriptor> descriptors,
1165 gimple stmt, tree op, int *index_p,
1166 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1167 bool *by_ref_p)
1168 {
1169 int index;
1170 HOST_WIDE_INT size, max_size;
1171 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1172
1173 if (max_size == -1 || max_size != size || *offset_p < 0)
1174 return false;
1175
1176 if (DECL_P (base))
1177 {
1178 int index = ipa_get_param_decl_index_1 (descriptors, base);
1179 if (index >= 0
1180 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1181 {
1182 *index_p = index;
1183 *by_ref_p = false;
1184 if (size_p)
1185 *size_p = size;
1186 return true;
1187 }
1188 return false;
1189 }
1190
1191 if (TREE_CODE (base) != MEM_REF
1192 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1193 || !integer_zerop (TREE_OPERAND (base, 1)))
1194 return false;
1195
1196 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1197 {
1198 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1199 index = ipa_get_param_decl_index_1 (descriptors, parm);
1200 }
1201 else
1202 {
1203 /* This branch catches situations where a pointer parameter is not a
1204 gimple register, for example:
1205
1206 void hip7(S*) (struct S * p)
1207 {
1208 void (*<T2e4>) (struct S *) D.1867;
1209 struct S * p.1;
1210
1211 <bb 2>:
1212 p.1_1 = p;
1213 D.1867_2 = p.1_1->f;
1214 D.1867_2 ();
1215 gdp = &p;
1216 */
1217
1218 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1219 index = load_from_unmodified_param (fbi, descriptors, def);
1220 }
1221
1222 if (index >= 0
1223 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1224 {
1225 *index_p = index;
1226 *by_ref_p = true;
1227 if (size_p)
1228 *size_p = size;
1229 return true;
1230 }
1231 return false;
1232 }
1233
1234 /* Just like the previous function, just without the param_analysis_info
1235 pointer, for users outside of this file. */
1236
1237 bool
1238 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1239 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1240 bool *by_ref_p)
1241 {
1242 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1243 offset_p, NULL, by_ref_p);
1244 }
1245
1246 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1247 of an assignment statement STMT, try to determine whether we are actually
1248 handling any of the following cases and construct an appropriate jump
1249 function into JFUNC if so:
1250
1251 1) The passed value is loaded from a formal parameter which is not a gimple
1252 register (most probably because it is addressable, the value has to be
1253 scalar) and we can guarantee the value has not changed. This case can
1254 therefore be described by a simple pass-through jump function. For example:
1255
1256 foo (int a)
1257 {
1258 int a.0;
1259
1260 a.0_2 = a;
1261 bar (a.0_2);
1262
1263 2) The passed value can be described by a simple arithmetic pass-through
1264 jump function. E.g.
1265
1266 foo (int a)
1267 {
1268 int D.2064;
1269
1270 D.2064_4 = a.1(D) + 4;
1271 bar (D.2064_4);
1272
1273 This case can also occur in combination of the previous one, e.g.:
1274
1275 foo (int a, int z)
1276 {
1277 int a.0;
1278 int D.2064;
1279
1280 a.0_3 = a;
1281 D.2064_4 = a.0_3 + 4;
1282 foo (D.2064_4);
1283
1284 3) The passed value is an address of an object within another one (which
1285 also passed by reference). Such situations are described by an ancestor
1286 jump function and describe situations such as:
1287
1288 B::foo() (struct B * const this)
1289 {
1290 struct A * D.1845;
1291
1292 D.1845_2 = &this_1(D)->D.1748;
1293 A::bar (D.1845_2);
1294
1295 INFO is the structure describing individual parameters access different
1296 stages of IPA optimizations. PARMS_AINFO contains the information that is
1297 only needed for intraprocedural analysis. */
1298
1299 static void
1300 compute_complex_assign_jump_func (struct func_body_info *fbi,
1301 struct ipa_node_params *info,
1302 struct ipa_jump_func *jfunc,
1303 gimple call, gimple stmt, tree name,
1304 tree param_type)
1305 {
1306 HOST_WIDE_INT offset, size, max_size;
1307 tree op1, tc_ssa, base, ssa;
1308 int index;
1309
1310 op1 = gimple_assign_rhs1 (stmt);
1311
1312 if (TREE_CODE (op1) == SSA_NAME)
1313 {
1314 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1315 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1316 else
1317 index = load_from_unmodified_param (fbi, info->descriptors,
1318 SSA_NAME_DEF_STMT (op1));
1319 tc_ssa = op1;
1320 }
1321 else
1322 {
1323 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1324 tc_ssa = gimple_assign_lhs (stmt);
1325 }
1326
1327 if (index >= 0)
1328 {
1329 tree op2 = gimple_assign_rhs2 (stmt);
1330
1331 if (op2)
1332 {
1333 if (!is_gimple_ip_invariant (op2)
1334 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1335 && !useless_type_conversion_p (TREE_TYPE (name),
1336 TREE_TYPE (op1))))
1337 return;
1338
1339 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1340 gimple_assign_rhs_code (stmt));
1341 }
1342 else if (gimple_assign_single_p (stmt))
1343 {
1344 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1345 bool type_p = false;
1346
1347 if (param_type && POINTER_TYPE_P (param_type))
1348 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1349 call, jfunc);
1350 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1351 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1352 }
1353 return;
1354 }
1355
1356 if (TREE_CODE (op1) != ADDR_EXPR)
1357 return;
1358 op1 = TREE_OPERAND (op1, 0);
1359 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1360 return;
1361 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1362 if (TREE_CODE (base) != MEM_REF
1363 /* If this is a varying address, punt. */
1364 || max_size == -1
1365 || max_size != size)
1366 return;
1367 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1368 ssa = TREE_OPERAND (base, 0);
1369 if (TREE_CODE (ssa) != SSA_NAME
1370 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1371 || offset < 0)
1372 return;
1373
1374 /* Dynamic types are changed in constructors and destructors. */
1375 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1376 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1377 {
1378 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1379 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1380 call, jfunc, offset));
1381 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1382 ipa_set_ancestor_jf (jfunc, offset,
1383 type_p ? TREE_TYPE (param_type) : NULL, index,
1384 parm_ref_data_pass_through_p (fbi, index,
1385 call, ssa), type_p);
1386 }
1387 }
1388
1389 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1390 it looks like:
1391
1392 iftmp.1_3 = &obj_2(D)->D.1762;
1393
1394 The base of the MEM_REF must be a default definition SSA NAME of a
1395 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1396 whole MEM_REF expression is returned and the offset calculated from any
1397 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1398 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1399
1400 static tree
1401 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1402 {
1403 HOST_WIDE_INT size, max_size;
1404 tree expr, parm, obj;
1405
1406 if (!gimple_assign_single_p (assign))
1407 return NULL_TREE;
1408 expr = gimple_assign_rhs1 (assign);
1409
1410 if (TREE_CODE (expr) != ADDR_EXPR)
1411 return NULL_TREE;
1412 expr = TREE_OPERAND (expr, 0);
1413 obj = expr;
1414 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1415
1416 if (TREE_CODE (expr) != MEM_REF
1417 /* If this is a varying address, punt. */
1418 || max_size == -1
1419 || max_size != size
1420 || *offset < 0)
1421 return NULL_TREE;
1422 parm = TREE_OPERAND (expr, 0);
1423 if (TREE_CODE (parm) != SSA_NAME
1424 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1425 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1426 return NULL_TREE;
1427
1428 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1429 *obj_p = obj;
1430 return expr;
1431 }
1432
1433
1434 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1435 statement PHI, try to find out whether NAME is in fact a
1436 multiple-inheritance typecast from a descendant into an ancestor of a formal
1437 parameter and thus can be described by an ancestor jump function and if so,
1438 write the appropriate function into JFUNC.
1439
1440 Essentially we want to match the following pattern:
1441
1442 if (obj_2(D) != 0B)
1443 goto <bb 3>;
1444 else
1445 goto <bb 4>;
1446
1447 <bb 3>:
1448 iftmp.1_3 = &obj_2(D)->D.1762;
1449
1450 <bb 4>:
1451 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1452 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1453 return D.1879_6; */
1454
1455 static void
1456 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1457 struct ipa_node_params *info,
1458 struct ipa_jump_func *jfunc,
1459 gimple call, gimple phi, tree param_type)
1460 {
1461 HOST_WIDE_INT offset;
1462 gimple assign, cond;
1463 basic_block phi_bb, assign_bb, cond_bb;
1464 tree tmp, parm, expr, obj;
1465 int index, i;
1466
1467 if (gimple_phi_num_args (phi) != 2)
1468 return;
1469
1470 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1471 tmp = PHI_ARG_DEF (phi, 0);
1472 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1473 tmp = PHI_ARG_DEF (phi, 1);
1474 else
1475 return;
1476 if (TREE_CODE (tmp) != SSA_NAME
1477 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1478 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1479 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1480 return;
1481
1482 assign = SSA_NAME_DEF_STMT (tmp);
1483 assign_bb = gimple_bb (assign);
1484 if (!single_pred_p (assign_bb))
1485 return;
1486 expr = get_ancestor_addr_info (assign, &obj, &offset);
1487 if (!expr)
1488 return;
1489 parm = TREE_OPERAND (expr, 0);
1490 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1491 if (index < 0)
1492 return;
1493
1494 cond_bb = single_pred (assign_bb);
1495 cond = last_stmt (cond_bb);
1496 if (!cond
1497 || gimple_code (cond) != GIMPLE_COND
1498 || gimple_cond_code (cond) != NE_EXPR
1499 || gimple_cond_lhs (cond) != parm
1500 || !integer_zerop (gimple_cond_rhs (cond)))
1501 return;
1502
1503 phi_bb = gimple_bb (phi);
1504 for (i = 0; i < 2; i++)
1505 {
1506 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1507 if (pred != assign_bb && pred != cond_bb)
1508 return;
1509 }
1510
1511 bool type_p = false;
1512 if (param_type && POINTER_TYPE_P (param_type)
1513 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1514 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1515 call, jfunc, offset);
1516 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1517 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1518 index,
1519 parm_ref_data_pass_through_p (fbi, index, call, parm),
1520 type_p);
1521 }
1522
1523 /* Given OP which is passed as an actual argument to a called function,
1524 determine if it is possible to construct a KNOWN_TYPE jump function for it
1525 and if so, create one and store it to JFUNC.
1526 EXPECTED_TYPE represents a type the argument should be in */
1527
1528 static void
1529 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1530 gimple call, tree expected_type)
1531 {
1532 HOST_WIDE_INT offset, size, max_size;
1533 tree base;
1534
1535 if (!flag_devirtualize
1536 || TREE_CODE (op) != ADDR_EXPR
1537 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1538 /* Be sure expected_type is polymorphic. */
1539 || !expected_type
1540 || !contains_polymorphic_type_p (expected_type))
1541 return;
1542
1543 op = TREE_OPERAND (op, 0);
1544 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1545 if (!DECL_P (base)
1546 || max_size == -1
1547 || max_size != size
1548 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1549 return;
1550
1551 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1552 call, current_function_decl)
1553 /* Even if the var seems to be in construction by inline call stack,
1554 we may work out the actual type by walking memory writes. */
1555 && (is_global_var (base)
1556 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1557 return;
1558
1559 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1560 expected_type);
1561 }
1562
1563 /* Inspect the given TYPE and return true iff it has the same structure (the
1564 same number of fields of the same types) as a C++ member pointer. If
1565 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1566 corresponding fields there. */
1567
1568 static bool
1569 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1570 {
1571 tree fld;
1572
1573 if (TREE_CODE (type) != RECORD_TYPE)
1574 return false;
1575
1576 fld = TYPE_FIELDS (type);
1577 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1578 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1579 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1580 return false;
1581
1582 if (method_ptr)
1583 *method_ptr = fld;
1584
1585 fld = DECL_CHAIN (fld);
1586 if (!fld || INTEGRAL_TYPE_P (fld)
1587 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1588 return false;
1589 if (delta)
1590 *delta = fld;
1591
1592 if (DECL_CHAIN (fld))
1593 return false;
1594
1595 return true;
1596 }
1597
1598 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1599 return the rhs of its defining statement. Otherwise return RHS as it
1600 is. */
1601
1602 static inline tree
1603 get_ssa_def_if_simple_copy (tree rhs)
1604 {
1605 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1606 {
1607 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1608
1609 if (gimple_assign_single_p (def_stmt))
1610 rhs = gimple_assign_rhs1 (def_stmt);
1611 else
1612 break;
1613 }
1614 return rhs;
1615 }
1616
1617 /* Simple linked list, describing known contents of an aggregate beforere
1618 call. */
1619
1620 struct ipa_known_agg_contents_list
1621 {
1622 /* Offset and size of the described part of the aggregate. */
1623 HOST_WIDE_INT offset, size;
1624 /* Known constant value or NULL if the contents is known to be unknown. */
1625 tree constant;
1626 /* Pointer to the next structure in the list. */
1627 struct ipa_known_agg_contents_list *next;
1628 };
1629
1630 /* Find the proper place in linked list of ipa_known_agg_contents_list
1631 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1632 unless there is a partial overlap, in which case return NULL, or such
1633 element is already there, in which case set *ALREADY_THERE to true. */
1634
1635 static struct ipa_known_agg_contents_list **
1636 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1637 HOST_WIDE_INT lhs_offset,
1638 HOST_WIDE_INT lhs_size,
1639 bool *already_there)
1640 {
1641 struct ipa_known_agg_contents_list **p = list;
1642 while (*p && (*p)->offset < lhs_offset)
1643 {
1644 if ((*p)->offset + (*p)->size > lhs_offset)
1645 return NULL;
1646 p = &(*p)->next;
1647 }
1648
1649 if (*p && (*p)->offset < lhs_offset + lhs_size)
1650 {
1651 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1652 /* We already know this value is subsequently overwritten with
1653 something else. */
1654 *already_there = true;
1655 else
1656 /* Otherwise this is a partial overlap which we cannot
1657 represent. */
1658 return NULL;
1659 }
1660 return p;
1661 }
1662
1663 /* Build aggregate jump function from LIST, assuming there are exactly
1664 CONST_COUNT constant entries there and that th offset of the passed argument
1665 is ARG_OFFSET and store it into JFUNC. */
1666
1667 static void
1668 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1669 int const_count, HOST_WIDE_INT arg_offset,
1670 struct ipa_jump_func *jfunc)
1671 {
1672 vec_alloc (jfunc->agg.items, const_count);
1673 while (list)
1674 {
1675 if (list->constant)
1676 {
1677 struct ipa_agg_jf_item item;
1678 item.offset = list->offset - arg_offset;
1679 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1680 item.value = unshare_expr_without_location (list->constant);
1681 jfunc->agg.items->quick_push (item);
1682 }
1683 list = list->next;
1684 }
1685 }
1686
1687 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1688 in ARG is filled in with constant values. ARG can either be an aggregate
1689 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1690 aggregate. JFUNC is the jump function into which the constants are
1691 subsequently stored. */
1692
1693 static void
1694 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1695 struct ipa_jump_func *jfunc)
1696 {
1697 struct ipa_known_agg_contents_list *list = NULL;
1698 int item_count = 0, const_count = 0;
1699 HOST_WIDE_INT arg_offset, arg_size;
1700 gimple_stmt_iterator gsi;
1701 tree arg_base;
1702 bool check_ref, by_ref;
1703 ao_ref r;
1704
1705 /* The function operates in three stages. First, we prepare check_ref, r,
1706 arg_base and arg_offset based on what is actually passed as an actual
1707 argument. */
1708
1709 if (POINTER_TYPE_P (arg_type))
1710 {
1711 by_ref = true;
1712 if (TREE_CODE (arg) == SSA_NAME)
1713 {
1714 tree type_size;
1715 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1716 return;
1717 check_ref = true;
1718 arg_base = arg;
1719 arg_offset = 0;
1720 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1721 arg_size = tree_to_uhwi (type_size);
1722 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1723 }
1724 else if (TREE_CODE (arg) == ADDR_EXPR)
1725 {
1726 HOST_WIDE_INT arg_max_size;
1727
1728 arg = TREE_OPERAND (arg, 0);
1729 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1730 &arg_max_size);
1731 if (arg_max_size == -1
1732 || arg_max_size != arg_size
1733 || arg_offset < 0)
1734 return;
1735 if (DECL_P (arg_base))
1736 {
1737 check_ref = false;
1738 ao_ref_init (&r, arg_base);
1739 }
1740 else
1741 return;
1742 }
1743 else
1744 return;
1745 }
1746 else
1747 {
1748 HOST_WIDE_INT arg_max_size;
1749
1750 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1751
1752 by_ref = false;
1753 check_ref = false;
1754 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1755 &arg_max_size);
1756 if (arg_max_size == -1
1757 || arg_max_size != arg_size
1758 || arg_offset < 0)
1759 return;
1760
1761 ao_ref_init (&r, arg);
1762 }
1763
1764 /* Second stage walks back the BB, looks at individual statements and as long
1765 as it is confident of how the statements affect contents of the
1766 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1767 describing it. */
1768 gsi = gsi_for_stmt (call);
1769 gsi_prev (&gsi);
1770 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1771 {
1772 struct ipa_known_agg_contents_list *n, **p;
1773 gimple stmt = gsi_stmt (gsi);
1774 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1775 tree lhs, rhs, lhs_base;
1776
1777 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1778 continue;
1779 if (!gimple_assign_single_p (stmt))
1780 break;
1781
1782 lhs = gimple_assign_lhs (stmt);
1783 rhs = gimple_assign_rhs1 (stmt);
1784 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1785 || TREE_CODE (lhs) == BIT_FIELD_REF
1786 || contains_bitfld_component_ref_p (lhs))
1787 break;
1788
1789 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1790 &lhs_max_size);
1791 if (lhs_max_size == -1
1792 || lhs_max_size != lhs_size)
1793 break;
1794
1795 if (check_ref)
1796 {
1797 if (TREE_CODE (lhs_base) != MEM_REF
1798 || TREE_OPERAND (lhs_base, 0) != arg_base
1799 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1800 break;
1801 }
1802 else if (lhs_base != arg_base)
1803 {
1804 if (DECL_P (lhs_base))
1805 continue;
1806 else
1807 break;
1808 }
1809
1810 bool already_there = false;
1811 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1812 &already_there);
1813 if (!p)
1814 break;
1815 if (already_there)
1816 continue;
1817
1818 rhs = get_ssa_def_if_simple_copy (rhs);
1819 n = XALLOCA (struct ipa_known_agg_contents_list);
1820 n->size = lhs_size;
1821 n->offset = lhs_offset;
1822 if (is_gimple_ip_invariant (rhs))
1823 {
1824 n->constant = rhs;
1825 const_count++;
1826 }
1827 else
1828 n->constant = NULL_TREE;
1829 n->next = *p;
1830 *p = n;
1831
1832 item_count++;
1833 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1834 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1835 break;
1836 }
1837
1838 /* Third stage just goes over the list and creates an appropriate vector of
1839 ipa_agg_jf_item structures out of it, of sourse only if there are
1840 any known constants to begin with. */
1841
1842 if (const_count)
1843 {
1844 jfunc->agg.by_ref = by_ref;
1845 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1846 }
1847 }
1848
1849 static tree
1850 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1851 {
1852 int n;
1853 tree type = (e->callee
1854 ? TREE_TYPE (e->callee->decl)
1855 : gimple_call_fntype (e->call_stmt));
1856 tree t = TYPE_ARG_TYPES (type);
1857
1858 for (n = 0; n < i; n++)
1859 {
1860 if (!t)
1861 break;
1862 t = TREE_CHAIN (t);
1863 }
1864 if (t)
1865 return TREE_VALUE (t);
1866 if (!e->callee)
1867 return NULL;
1868 t = DECL_ARGUMENTS (e->callee->decl);
1869 for (n = 0; n < i; n++)
1870 {
1871 if (!t)
1872 return NULL;
1873 t = TREE_CHAIN (t);
1874 }
1875 if (t)
1876 return TREE_TYPE (t);
1877 return NULL;
1878 }
1879
1880 /* Compute jump function for all arguments of callsite CS and insert the
1881 information in the jump_functions array in the ipa_edge_args corresponding
1882 to this callsite. */
1883
1884 static void
1885 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1886 struct cgraph_edge *cs)
1887 {
1888 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1889 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1890 gimple call = cs->call_stmt;
1891 int n, arg_num = gimple_call_num_args (call);
1892 bool useful_context = false;
1893
1894 if (arg_num == 0 || args->jump_functions)
1895 return;
1896 vec_safe_grow_cleared (args->jump_functions, arg_num);
1897 if (flag_devirtualize)
1898 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1899
1900 if (gimple_call_internal_p (call))
1901 return;
1902 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1903 return;
1904
1905 for (n = 0; n < arg_num; n++)
1906 {
1907 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1908 tree arg = gimple_call_arg (call, n);
1909 tree param_type = ipa_get_callee_param_type (cs, n);
1910 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1911 {
1912 tree instance;
1913 struct ipa_polymorphic_call_context context (cs->caller->decl,
1914 arg, cs->call_stmt,
1915 &instance);
1916 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1917 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1918 if (!context.useless_p ())
1919 useful_context = true;
1920 }
1921
1922 if (is_gimple_ip_invariant (arg))
1923 ipa_set_jf_constant (jfunc, arg, cs);
1924 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1925 && TREE_CODE (arg) == PARM_DECL)
1926 {
1927 int index = ipa_get_param_decl_index (info, arg);
1928
1929 gcc_assert (index >=0);
1930 /* Aggregate passed by value, check for pass-through, otherwise we
1931 will attempt to fill in aggregate contents later in this
1932 for cycle. */
1933 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1934 {
1935 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1936 continue;
1937 }
1938 }
1939 else if (TREE_CODE (arg) == SSA_NAME)
1940 {
1941 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1942 {
1943 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1944 if (index >= 0)
1945 {
1946 bool agg_p, type_p;
1947 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1948 if (param_type && POINTER_TYPE_P (param_type))
1949 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1950 call, jfunc);
1951 else
1952 type_p = false;
1953 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1954 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1955 type_p);
1956 }
1957 }
1958 else
1959 {
1960 gimple stmt = SSA_NAME_DEF_STMT (arg);
1961 if (is_gimple_assign (stmt))
1962 compute_complex_assign_jump_func (fbi, info, jfunc,
1963 call, stmt, arg, param_type);
1964 else if (gimple_code (stmt) == GIMPLE_PHI)
1965 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1966 call, stmt, param_type);
1967 }
1968 }
1969 else
1970 compute_known_type_jump_func (arg, jfunc, call,
1971 param_type
1972 && POINTER_TYPE_P (param_type)
1973 ? TREE_TYPE (param_type)
1974 : NULL);
1975
1976 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1977 passed (because type conversions are ignored in gimple). Usually we can
1978 safely get type from function declaration, but in case of K&R prototypes or
1979 variadic functions we can try our luck with type of the pointer passed.
1980 TODO: Since we look for actual initialization of the memory object, we may better
1981 work out the type based on the memory stores we find. */
1982 if (!param_type)
1983 param_type = TREE_TYPE (arg);
1984
1985 if ((jfunc->type != IPA_JF_PASS_THROUGH
1986 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1987 && (jfunc->type != IPA_JF_ANCESTOR
1988 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1989 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1990 || POINTER_TYPE_P (param_type)))
1991 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1992 }
1993 if (!useful_context)
1994 vec_free (args->polymorphic_call_contexts);
1995 }
1996
1997 /* Compute jump functions for all edges - both direct and indirect - outgoing
1998 from BB. */
1999
2000 static void
2001 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
2002 {
2003 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2004 int i;
2005 struct cgraph_edge *cs;
2006
2007 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2008 {
2009 struct cgraph_node *callee = cs->callee;
2010
2011 if (callee)
2012 {
2013 callee->ultimate_alias_target ();
2014 /* We do not need to bother analyzing calls to unknown functions
2015 unless they may become known during lto/whopr. */
2016 if (!callee->definition && !flag_lto)
2017 continue;
2018 }
2019 ipa_compute_jump_functions_for_edge (fbi, cs);
2020 }
2021 }
2022
2023 /* If STMT looks like a statement loading a value from a member pointer formal
2024 parameter, return that parameter and store the offset of the field to
2025 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2026 might be clobbered). If USE_DELTA, then we look for a use of the delta
2027 field rather than the pfn. */
2028
2029 static tree
2030 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2031 HOST_WIDE_INT *offset_p)
2032 {
2033 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2034
2035 if (!gimple_assign_single_p (stmt))
2036 return NULL_TREE;
2037
2038 rhs = gimple_assign_rhs1 (stmt);
2039 if (TREE_CODE (rhs) == COMPONENT_REF)
2040 {
2041 ref_field = TREE_OPERAND (rhs, 1);
2042 rhs = TREE_OPERAND (rhs, 0);
2043 }
2044 else
2045 ref_field = NULL_TREE;
2046 if (TREE_CODE (rhs) != MEM_REF)
2047 return NULL_TREE;
2048 rec = TREE_OPERAND (rhs, 0);
2049 if (TREE_CODE (rec) != ADDR_EXPR)
2050 return NULL_TREE;
2051 rec = TREE_OPERAND (rec, 0);
2052 if (TREE_CODE (rec) != PARM_DECL
2053 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2054 return NULL_TREE;
2055 ref_offset = TREE_OPERAND (rhs, 1);
2056
2057 if (use_delta)
2058 fld = delta_field;
2059 else
2060 fld = ptr_field;
2061 if (offset_p)
2062 *offset_p = int_bit_position (fld);
2063
2064 if (ref_field)
2065 {
2066 if (integer_nonzerop (ref_offset))
2067 return NULL_TREE;
2068 return ref_field == fld ? rec : NULL_TREE;
2069 }
2070 else
2071 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2072 : NULL_TREE;
2073 }
2074
2075 /* Returns true iff T is an SSA_NAME defined by a statement. */
2076
2077 static bool
2078 ipa_is_ssa_with_stmt_def (tree t)
2079 {
2080 if (TREE_CODE (t) == SSA_NAME
2081 && !SSA_NAME_IS_DEFAULT_DEF (t))
2082 return true;
2083 else
2084 return false;
2085 }
2086
2087 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2088 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2089 indirect call graph edge. */
2090
2091 static struct cgraph_edge *
2092 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2093 {
2094 struct cgraph_edge *cs;
2095
2096 cs = node->get_edge (stmt);
2097 cs->indirect_info->param_index = param_index;
2098 cs->indirect_info->agg_contents = 0;
2099 cs->indirect_info->member_ptr = 0;
2100 return cs;
2101 }
2102
2103 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2104 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2105 intermediate information about each formal parameter. Currently it checks
2106 whether the call calls a pointer that is a formal parameter and if so, the
2107 parameter is marked with the called flag and an indirect call graph edge
2108 describing the call is created. This is very simple for ordinary pointers
2109 represented in SSA but not-so-nice when it comes to member pointers. The
2110 ugly part of this function does nothing more than trying to match the
2111 pattern of such a call. An example of such a pattern is the gimple dump
2112 below, the call is on the last line:
2113
2114 <bb 2>:
2115 f$__delta_5 = f.__delta;
2116 f$__pfn_24 = f.__pfn;
2117
2118 or
2119 <bb 2>:
2120 f$__delta_5 = MEM[(struct *)&f];
2121 f$__pfn_24 = MEM[(struct *)&f + 4B];
2122
2123 and a few lines below:
2124
2125 <bb 5>
2126 D.2496_3 = (int) f$__pfn_24;
2127 D.2497_4 = D.2496_3 & 1;
2128 if (D.2497_4 != 0)
2129 goto <bb 3>;
2130 else
2131 goto <bb 4>;
2132
2133 <bb 6>:
2134 D.2500_7 = (unsigned int) f$__delta_5;
2135 D.2501_8 = &S + D.2500_7;
2136 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2137 D.2503_10 = *D.2502_9;
2138 D.2504_12 = f$__pfn_24 + -1;
2139 D.2505_13 = (unsigned int) D.2504_12;
2140 D.2506_14 = D.2503_10 + D.2505_13;
2141 D.2507_15 = *D.2506_14;
2142 iftmp.11_16 = (String:: *) D.2507_15;
2143
2144 <bb 7>:
2145 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2146 D.2500_19 = (unsigned int) f$__delta_5;
2147 D.2508_20 = &S + D.2500_19;
2148 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2149
2150 Such patterns are results of simple calls to a member pointer:
2151
2152 int doprinting (int (MyString::* f)(int) const)
2153 {
2154 MyString S ("somestring");
2155
2156 return (S.*f)(4);
2157 }
2158
2159 Moreover, the function also looks for called pointers loaded from aggregates
2160 passed by value or reference. */
2161
2162 static void
2163 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2164 tree target)
2165 {
2166 struct ipa_node_params *info = fbi->info;
2167 HOST_WIDE_INT offset;
2168 bool by_ref;
2169
2170 if (SSA_NAME_IS_DEFAULT_DEF (target))
2171 {
2172 tree var = SSA_NAME_VAR (target);
2173 int index = ipa_get_param_decl_index (info, var);
2174 if (index >= 0)
2175 ipa_note_param_call (fbi->node, index, call);
2176 return;
2177 }
2178
2179 int index;
2180 gimple def = SSA_NAME_DEF_STMT (target);
2181 if (gimple_assign_single_p (def)
2182 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2183 gimple_assign_rhs1 (def), &index, &offset,
2184 NULL, &by_ref))
2185 {
2186 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2187 cs->indirect_info->offset = offset;
2188 cs->indirect_info->agg_contents = 1;
2189 cs->indirect_info->by_ref = by_ref;
2190 return;
2191 }
2192
2193 /* Now we need to try to match the complex pattern of calling a member
2194 pointer. */
2195 if (gimple_code (def) != GIMPLE_PHI
2196 || gimple_phi_num_args (def) != 2
2197 || !POINTER_TYPE_P (TREE_TYPE (target))
2198 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2199 return;
2200
2201 /* First, we need to check whether one of these is a load from a member
2202 pointer that is a parameter to this function. */
2203 tree n1 = PHI_ARG_DEF (def, 0);
2204 tree n2 = PHI_ARG_DEF (def, 1);
2205 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2206 return;
2207 gimple d1 = SSA_NAME_DEF_STMT (n1);
2208 gimple d2 = SSA_NAME_DEF_STMT (n2);
2209
2210 tree rec;
2211 basic_block bb, virt_bb;
2212 basic_block join = gimple_bb (def);
2213 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2214 {
2215 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2216 return;
2217
2218 bb = EDGE_PRED (join, 0)->src;
2219 virt_bb = gimple_bb (d2);
2220 }
2221 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2222 {
2223 bb = EDGE_PRED (join, 1)->src;
2224 virt_bb = gimple_bb (d1);
2225 }
2226 else
2227 return;
2228
2229 /* Second, we need to check that the basic blocks are laid out in the way
2230 corresponding to the pattern. */
2231
2232 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2233 || single_pred (virt_bb) != bb
2234 || single_succ (virt_bb) != join)
2235 return;
2236
2237 /* Third, let's see that the branching is done depending on the least
2238 significant bit of the pfn. */
2239
2240 gimple branch = last_stmt (bb);
2241 if (!branch || gimple_code (branch) != GIMPLE_COND)
2242 return;
2243
2244 if ((gimple_cond_code (branch) != NE_EXPR
2245 && gimple_cond_code (branch) != EQ_EXPR)
2246 || !integer_zerop (gimple_cond_rhs (branch)))
2247 return;
2248
2249 tree cond = gimple_cond_lhs (branch);
2250 if (!ipa_is_ssa_with_stmt_def (cond))
2251 return;
2252
2253 def = SSA_NAME_DEF_STMT (cond);
2254 if (!is_gimple_assign (def)
2255 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2256 || !integer_onep (gimple_assign_rhs2 (def)))
2257 return;
2258
2259 cond = gimple_assign_rhs1 (def);
2260 if (!ipa_is_ssa_with_stmt_def (cond))
2261 return;
2262
2263 def = SSA_NAME_DEF_STMT (cond);
2264
2265 if (is_gimple_assign (def)
2266 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2267 {
2268 cond = gimple_assign_rhs1 (def);
2269 if (!ipa_is_ssa_with_stmt_def (cond))
2270 return;
2271 def = SSA_NAME_DEF_STMT (cond);
2272 }
2273
2274 tree rec2;
2275 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2276 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2277 == ptrmemfunc_vbit_in_delta),
2278 NULL);
2279 if (rec != rec2)
2280 return;
2281
2282 index = ipa_get_param_decl_index (info, rec);
2283 if (index >= 0
2284 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2285 {
2286 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2287 cs->indirect_info->offset = offset;
2288 cs->indirect_info->agg_contents = 1;
2289 cs->indirect_info->member_ptr = 1;
2290 }
2291
2292 return;
2293 }
2294
2295 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2296 object referenced in the expression is a formal parameter of the caller
2297 FBI->node (described by FBI->info), create a call note for the
2298 statement. */
2299
2300 static void
2301 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2302 gimple call, tree target)
2303 {
2304 tree obj = OBJ_TYPE_REF_OBJECT (target);
2305 int index;
2306 HOST_WIDE_INT anc_offset;
2307
2308 if (!flag_devirtualize)
2309 return;
2310
2311 if (TREE_CODE (obj) != SSA_NAME)
2312 return;
2313
2314 struct ipa_node_params *info = fbi->info;
2315 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2316 {
2317 struct ipa_jump_func jfunc;
2318 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2319 return;
2320
2321 anc_offset = 0;
2322 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2323 gcc_assert (index >= 0);
2324 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2325 call, &jfunc))
2326 return;
2327 }
2328 else
2329 {
2330 struct ipa_jump_func jfunc;
2331 gimple stmt = SSA_NAME_DEF_STMT (obj);
2332 tree expr;
2333
2334 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2335 if (!expr)
2336 return;
2337 index = ipa_get_param_decl_index (info,
2338 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2339 gcc_assert (index >= 0);
2340 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2341 call, &jfunc, anc_offset))
2342 return;
2343 }
2344
2345 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2346 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2347 ii->offset = anc_offset;
2348 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2349 ii->otr_type = obj_type_ref_class (target);
2350 ii->polymorphic = 1;
2351 }
2352
2353 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2354 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2355 containing intermediate information about each formal parameter. */
2356
2357 static void
2358 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2359 {
2360 tree target = gimple_call_fn (call);
2361
2362 if (!target
2363 || (TREE_CODE (target) != SSA_NAME
2364 && !virtual_method_call_p (target)))
2365 return;
2366
2367 struct cgraph_edge *cs = fbi->node->get_edge (call);
2368 /* If we previously turned the call into a direct call, there is
2369 no need to analyze. */
2370 if (cs && !cs->indirect_unknown_callee)
2371 return;
2372
2373 if (cs->indirect_info->polymorphic)
2374 {
2375 tree instance;
2376 tree target = gimple_call_fn (call);
2377 ipa_polymorphic_call_context context (current_function_decl,
2378 target, call, &instance);
2379
2380 gcc_checking_assert (cs->indirect_info->otr_type
2381 == obj_type_ref_class (target));
2382 gcc_checking_assert (cs->indirect_info->otr_token
2383 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2384
2385 cs->indirect_info->vptr_changed
2386 = !context.get_dynamic_type (instance,
2387 OBJ_TYPE_REF_OBJECT (target),
2388 obj_type_ref_class (target), call);
2389 cs->indirect_info->context = context;
2390 }
2391
2392 if (TREE_CODE (target) == SSA_NAME)
2393 ipa_analyze_indirect_call_uses (fbi, call, target);
2394 else if (virtual_method_call_p (target))
2395 ipa_analyze_virtual_call_uses (fbi, call, target);
2396 }
2397
2398
2399 /* Analyze the call statement STMT with respect to formal parameters (described
2400 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2401 formal parameters are called. */
2402
2403 static void
2404 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2405 {
2406 if (is_gimple_call (stmt))
2407 ipa_analyze_call_uses (fbi, stmt);
2408 }
2409
2410 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2411 If OP is a parameter declaration, mark it as used in the info structure
2412 passed in DATA. */
2413
2414 static bool
2415 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2416 {
2417 struct ipa_node_params *info = (struct ipa_node_params *) data;
2418
2419 op = get_base_address (op);
2420 if (op
2421 && TREE_CODE (op) == PARM_DECL)
2422 {
2423 int index = ipa_get_param_decl_index (info, op);
2424 gcc_assert (index >= 0);
2425 ipa_set_param_used (info, index, true);
2426 }
2427
2428 return false;
2429 }
2430
2431 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2432 the findings in various structures of the associated ipa_node_params
2433 structure, such as parameter flags, notes etc. FBI holds various data about
2434 the function being analyzed. */
2435
2436 static void
2437 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2438 {
2439 gimple_stmt_iterator gsi;
2440 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2441 {
2442 gimple stmt = gsi_stmt (gsi);
2443
2444 if (is_gimple_debug (stmt))
2445 continue;
2446
2447 ipa_analyze_stmt_uses (fbi, stmt);
2448 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2449 visit_ref_for_mod_analysis,
2450 visit_ref_for_mod_analysis,
2451 visit_ref_for_mod_analysis);
2452 }
2453 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2454 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2455 visit_ref_for_mod_analysis,
2456 visit_ref_for_mod_analysis,
2457 visit_ref_for_mod_analysis);
2458 }
2459
2460 /* Calculate controlled uses of parameters of NODE. */
2461
2462 static void
2463 ipa_analyze_controlled_uses (struct cgraph_node *node)
2464 {
2465 struct ipa_node_params *info = IPA_NODE_REF (node);
2466
2467 for (int i = 0; i < ipa_get_param_count (info); i++)
2468 {
2469 tree parm = ipa_get_param (info, i);
2470 int controlled_uses = 0;
2471
2472 /* For SSA regs see if parameter is used. For non-SSA we compute
2473 the flag during modification analysis. */
2474 if (is_gimple_reg (parm))
2475 {
2476 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2477 parm);
2478 if (ddef && !has_zero_uses (ddef))
2479 {
2480 imm_use_iterator imm_iter;
2481 use_operand_p use_p;
2482
2483 ipa_set_param_used (info, i, true);
2484 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2485 if (!is_gimple_call (USE_STMT (use_p)))
2486 {
2487 if (!is_gimple_debug (USE_STMT (use_p)))
2488 {
2489 controlled_uses = IPA_UNDESCRIBED_USE;
2490 break;
2491 }
2492 }
2493 else
2494 controlled_uses++;
2495 }
2496 else
2497 controlled_uses = 0;
2498 }
2499 else
2500 controlled_uses = IPA_UNDESCRIBED_USE;
2501 ipa_set_controlled_uses (info, i, controlled_uses);
2502 }
2503 }
2504
2505 /* Free stuff in BI. */
2506
2507 static void
2508 free_ipa_bb_info (struct ipa_bb_info *bi)
2509 {
2510 bi->cg_edges.release ();
2511 bi->param_aa_statuses.release ();
2512 }
2513
2514 /* Dominator walker driving the analysis. */
2515
2516 class analysis_dom_walker : public dom_walker
2517 {
2518 public:
2519 analysis_dom_walker (struct func_body_info *fbi)
2520 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2521
2522 virtual void before_dom_children (basic_block);
2523
2524 private:
2525 struct func_body_info *m_fbi;
2526 };
2527
2528 void
2529 analysis_dom_walker::before_dom_children (basic_block bb)
2530 {
2531 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2532 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2533 }
2534
2535 /* Initialize the array describing properties of of formal parameters
2536 of NODE, analyze their uses and compute jump functions associated
2537 with actual arguments of calls from within NODE. */
2538
2539 void
2540 ipa_analyze_node (struct cgraph_node *node)
2541 {
2542 struct func_body_info fbi;
2543 struct ipa_node_params *info;
2544
2545 ipa_check_create_node_params ();
2546 ipa_check_create_edge_args ();
2547 info = IPA_NODE_REF (node);
2548
2549 if (info->analysis_done)
2550 return;
2551 info->analysis_done = 1;
2552
2553 if (ipa_func_spec_opts_forbid_analysis_p (node))
2554 {
2555 for (int i = 0; i < ipa_get_param_count (info); i++)
2556 {
2557 ipa_set_param_used (info, i, true);
2558 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2559 }
2560 return;
2561 }
2562
2563 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2564 push_cfun (func);
2565 calculate_dominance_info (CDI_DOMINATORS);
2566 ipa_initialize_node_params (node);
2567 ipa_analyze_controlled_uses (node);
2568
2569 fbi.node = node;
2570 fbi.info = IPA_NODE_REF (node);
2571 fbi.bb_infos = vNULL;
2572 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2573 fbi.param_count = ipa_get_param_count (info);
2574 fbi.aa_walked = 0;
2575
2576 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2577 {
2578 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2579 bi->cg_edges.safe_push (cs);
2580 }
2581
2582 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2583 {
2584 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2585 bi->cg_edges.safe_push (cs);
2586 }
2587
2588 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2589
2590 int i;
2591 struct ipa_bb_info *bi;
2592 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2593 free_ipa_bb_info (bi);
2594 fbi.bb_infos.release ();
2595 free_dominance_info (CDI_DOMINATORS);
2596 pop_cfun ();
2597 }
2598
2599 /* Update the jump function DST when the call graph edge corresponding to SRC is
2600 is being inlined, knowing that DST is of type ancestor and src of known
2601 type. */
2602
2603 static void
2604 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2605 struct ipa_jump_func *dst)
2606 {
2607 HOST_WIDE_INT combined_offset;
2608 tree combined_type;
2609
2610 if (!ipa_get_jf_ancestor_type_preserved (dst))
2611 {
2612 dst->type = IPA_JF_UNKNOWN;
2613 return;
2614 }
2615
2616 combined_offset = ipa_get_jf_known_type_offset (src)
2617 + ipa_get_jf_ancestor_offset (dst);
2618 combined_type = ipa_get_jf_ancestor_type (dst);
2619
2620 ipa_set_jf_known_type (dst, combined_offset,
2621 ipa_get_jf_known_type_base_type (src),
2622 combined_type);
2623 }
2624
2625 /* Update the jump functions associated with call graph edge E when the call
2626 graph edge CS is being inlined, assuming that E->caller is already (possibly
2627 indirectly) inlined into CS->callee and that E has not been inlined. */
2628
2629 static void
2630 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2631 struct cgraph_edge *e)
2632 {
2633 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2634 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2635 int count = ipa_get_cs_argument_count (args);
2636 int i;
2637
2638 for (i = 0; i < count; i++)
2639 {
2640 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2641 struct ipa_polymorphic_call_context *dst_ctx
2642 = ipa_get_ith_polymorhic_call_context (args, i);
2643
2644 if (dst->type == IPA_JF_ANCESTOR)
2645 {
2646 struct ipa_jump_func *src;
2647 int dst_fid = dst->value.ancestor.formal_id;
2648 struct ipa_polymorphic_call_context *src_ctx
2649 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2650
2651 /* Variable number of arguments can cause havoc if we try to access
2652 one that does not exist in the inlined edge. So make sure we
2653 don't. */
2654 if (dst_fid >= ipa_get_cs_argument_count (top))
2655 {
2656 dst->type = IPA_JF_UNKNOWN;
2657 continue;
2658 }
2659
2660 src = ipa_get_ith_jump_func (top, dst_fid);
2661
2662 if (src_ctx && !src_ctx->useless_p ())
2663 {
2664 struct ipa_polymorphic_call_context ctx = *src_ctx;
2665
2666 /* TODO: Make type preserved safe WRT contexts. */
2667 if (!dst->value.ancestor.agg_preserved)
2668 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2669 ctx.offset_by (dst->value.ancestor.offset);
2670 if (!ctx.useless_p ())
2671 {
2672 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2673 count);
2674 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2675 }
2676 }
2677
2678 if (src->agg.items
2679 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2680 {
2681 struct ipa_agg_jf_item *item;
2682 int j;
2683
2684 /* Currently we do not produce clobber aggregate jump functions,
2685 replace with merging when we do. */
2686 gcc_assert (!dst->agg.items);
2687
2688 dst->agg.items = vec_safe_copy (src->agg.items);
2689 dst->agg.by_ref = src->agg.by_ref;
2690 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2691 item->offset -= dst->value.ancestor.offset;
2692 }
2693
2694 if (src->type == IPA_JF_KNOWN_TYPE)
2695 combine_known_type_and_ancestor_jfs (src, dst);
2696 else if (src->type == IPA_JF_PASS_THROUGH
2697 && src->value.pass_through.operation == NOP_EXPR)
2698 {
2699 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2700 dst->value.ancestor.agg_preserved &=
2701 src->value.pass_through.agg_preserved;
2702 dst->value.ancestor.type_preserved &=
2703 src->value.pass_through.type_preserved;
2704 }
2705 else if (src->type == IPA_JF_ANCESTOR)
2706 {
2707 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2708 dst->value.ancestor.offset += src->value.ancestor.offset;
2709 dst->value.ancestor.agg_preserved &=
2710 src->value.ancestor.agg_preserved;
2711 dst->value.ancestor.type_preserved &=
2712 src->value.ancestor.type_preserved;
2713 }
2714 else
2715 dst->type = IPA_JF_UNKNOWN;
2716 }
2717 else if (dst->type == IPA_JF_PASS_THROUGH)
2718 {
2719 struct ipa_jump_func *src;
2720 /* We must check range due to calls with variable number of arguments
2721 and we cannot combine jump functions with operations. */
2722 if (dst->value.pass_through.operation == NOP_EXPR
2723 && (dst->value.pass_through.formal_id
2724 < ipa_get_cs_argument_count (top)))
2725 {
2726 int dst_fid = dst->value.pass_through.formal_id;
2727 src = ipa_get_ith_jump_func (top, dst_fid);
2728 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2729 struct ipa_polymorphic_call_context *src_ctx
2730 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2731
2732 if (src_ctx && !src_ctx->useless_p ())
2733 {
2734 struct ipa_polymorphic_call_context ctx = *src_ctx;
2735
2736 /* TODO: Make type preserved safe WRT contexts. */
2737 if (!dst->value.ancestor.agg_preserved)
2738 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2739 if (!ctx.useless_p ())
2740 {
2741 if (!dst_ctx)
2742 {
2743 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2744 count);
2745 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2746 }
2747 dst_ctx->combine_with (ctx);
2748 }
2749 }
2750 switch (src->type)
2751 {
2752 case IPA_JF_UNKNOWN:
2753 dst->type = IPA_JF_UNKNOWN;
2754 break;
2755 case IPA_JF_KNOWN_TYPE:
2756 if (ipa_get_jf_pass_through_type_preserved (dst))
2757 ipa_set_jf_known_type (dst,
2758 ipa_get_jf_known_type_offset (src),
2759 ipa_get_jf_known_type_base_type (src),
2760 ipa_get_jf_known_type_component_type (src));
2761 else
2762 dst->type = IPA_JF_UNKNOWN;
2763 break;
2764 case IPA_JF_CONST:
2765 ipa_set_jf_cst_copy (dst, src);
2766 break;
2767
2768 case IPA_JF_PASS_THROUGH:
2769 {
2770 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2771 enum tree_code operation;
2772 operation = ipa_get_jf_pass_through_operation (src);
2773
2774 if (operation == NOP_EXPR)
2775 {
2776 bool agg_p, type_p;
2777 agg_p = dst_agg_p
2778 && ipa_get_jf_pass_through_agg_preserved (src);
2779 type_p = ipa_get_jf_pass_through_type_preserved (src)
2780 && ipa_get_jf_pass_through_type_preserved (dst);
2781 ipa_set_jf_simple_pass_through (dst, formal_id,
2782 agg_p, type_p);
2783 }
2784 else
2785 {
2786 tree operand = ipa_get_jf_pass_through_operand (src);
2787 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2788 operation);
2789 }
2790 break;
2791 }
2792 case IPA_JF_ANCESTOR:
2793 {
2794 bool agg_p, type_p;
2795 agg_p = dst_agg_p
2796 && ipa_get_jf_ancestor_agg_preserved (src);
2797 type_p = ipa_get_jf_ancestor_type_preserved (src)
2798 && ipa_get_jf_pass_through_type_preserved (dst);
2799 ipa_set_ancestor_jf (dst,
2800 ipa_get_jf_ancestor_offset (src),
2801 ipa_get_jf_ancestor_type (src),
2802 ipa_get_jf_ancestor_formal_id (src),
2803 agg_p, type_p);
2804 break;
2805 }
2806 default:
2807 gcc_unreachable ();
2808 }
2809
2810 if (src->agg.items
2811 && (dst_agg_p || !src->agg.by_ref))
2812 {
2813 /* Currently we do not produce clobber aggregate jump
2814 functions, replace with merging when we do. */
2815 gcc_assert (!dst->agg.items);
2816
2817 dst->agg.by_ref = src->agg.by_ref;
2818 dst->agg.items = vec_safe_copy (src->agg.items);
2819 }
2820 }
2821 else
2822 dst->type = IPA_JF_UNKNOWN;
2823 }
2824 }
2825 }
2826
2827 /* If TARGET is an addr_expr of a function declaration, make it the
2828 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2829 Otherwise, return NULL. */
2830
2831 struct cgraph_edge *
2832 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2833 bool speculative)
2834 {
2835 struct cgraph_node *callee;
2836 struct inline_edge_summary *es = inline_edge_summary (ie);
2837 bool unreachable = false;
2838
2839 if (TREE_CODE (target) == ADDR_EXPR)
2840 target = TREE_OPERAND (target, 0);
2841 if (TREE_CODE (target) != FUNCTION_DECL)
2842 {
2843 target = canonicalize_constructor_val (target, NULL);
2844 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2845 {
2846 if (ie->indirect_info->member_ptr)
2847 /* Member pointer call that goes through a VMT lookup. */
2848 return NULL;
2849
2850 if (dump_enabled_p ())
2851 {
2852 location_t loc = gimple_location_safe (ie->call_stmt);
2853 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2854 "discovered direct call to non-function in %s/%i, "
2855 "making it __builtin_unreachable\n",
2856 ie->caller->name (), ie->caller->order);
2857 }
2858
2859 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2860 callee = cgraph_node::get_create (target);
2861 unreachable = true;
2862 }
2863 else
2864 callee = cgraph_node::get (target);
2865 }
2866 else
2867 callee = cgraph_node::get (target);
2868
2869 /* Because may-edges are not explicitely represented and vtable may be external,
2870 we may create the first reference to the object in the unit. */
2871 if (!callee || callee->global.inlined_to)
2872 {
2873
2874 /* We are better to ensure we can refer to it.
2875 In the case of static functions we are out of luck, since we already
2876 removed its body. In the case of public functions we may or may
2877 not introduce the reference. */
2878 if (!canonicalize_constructor_val (target, NULL)
2879 || !TREE_PUBLIC (target))
2880 {
2881 if (dump_file)
2882 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2883 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2884 xstrdup (ie->caller->name ()),
2885 ie->caller->order,
2886 xstrdup (ie->callee->name ()),
2887 ie->callee->order);
2888 return NULL;
2889 }
2890 callee = cgraph_node::get_create (target);
2891 }
2892
2893 /* If the edge is already speculated. */
2894 if (speculative && ie->speculative)
2895 {
2896 struct cgraph_edge *e2;
2897 struct ipa_ref *ref;
2898 ie->speculative_call_info (e2, ie, ref);
2899 if (e2->callee->ultimate_alias_target ()
2900 != callee->ultimate_alias_target ())
2901 {
2902 if (dump_file)
2903 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2904 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2905 xstrdup (ie->caller->name ()),
2906 ie->caller->order,
2907 xstrdup (callee->name ()),
2908 callee->order,
2909 xstrdup (e2->callee->name ()),
2910 e2->callee->order);
2911 }
2912 else
2913 {
2914 if (dump_file)
2915 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2916 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2917 xstrdup (ie->caller->name ()),
2918 ie->caller->order,
2919 xstrdup (callee->name ()),
2920 callee->order);
2921 }
2922 return NULL;
2923 }
2924
2925 if (!dbg_cnt (devirt))
2926 return NULL;
2927
2928 ipa_check_create_node_params ();
2929
2930 /* We can not make edges to inline clones. It is bug that someone removed
2931 the cgraph node too early. */
2932 gcc_assert (!callee->global.inlined_to);
2933
2934 if (dump_file && !unreachable)
2935 {
2936 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2937 "(%s/%i -> %s/%i), for stmt ",
2938 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2939 speculative ? "speculative" : "known",
2940 xstrdup (ie->caller->name ()),
2941 ie->caller->order,
2942 xstrdup (callee->name ()),
2943 callee->order);
2944 if (ie->call_stmt)
2945 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2946 else
2947 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2948 }
2949 if (dump_enabled_p ())
2950 {
2951 location_t loc = gimple_location_safe (ie->call_stmt);
2952
2953 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2954 "converting indirect call in %s to direct call to %s\n",
2955 ie->caller->name (), callee->name ());
2956 }
2957 if (!speculative)
2958 ie = ie->make_direct (callee);
2959 else
2960 {
2961 if (!callee->can_be_discarded_p ())
2962 {
2963 cgraph_node *alias;
2964 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2965 if (alias)
2966 callee = alias;
2967 }
2968 ie = ie->make_speculative
2969 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2970 }
2971 es = inline_edge_summary (ie);
2972 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2973 - eni_size_weights.call_cost);
2974 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2975 - eni_time_weights.call_cost);
2976
2977 return ie;
2978 }
2979
2980 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2981 return NULL if there is not any. BY_REF specifies whether the value has to
2982 be passed by reference or by value. */
2983
2984 tree
2985 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2986 HOST_WIDE_INT offset, bool by_ref)
2987 {
2988 struct ipa_agg_jf_item *item;
2989 int i;
2990
2991 if (by_ref != agg->by_ref)
2992 return NULL;
2993
2994 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2995 if (item->offset == offset)
2996 {
2997 /* Currently we do not have clobber values, return NULL for them once
2998 we do. */
2999 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3000 return item->value;
3001 }
3002 return NULL;
3003 }
3004
3005 /* Remove a reference to SYMBOL from the list of references of a node given by
3006 reference description RDESC. Return true if the reference has been
3007 successfully found and removed. */
3008
3009 static bool
3010 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3011 {
3012 struct ipa_ref *to_del;
3013 struct cgraph_edge *origin;
3014
3015 origin = rdesc->cs;
3016 if (!origin)
3017 return false;
3018 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3019 origin->lto_stmt_uid);
3020 if (!to_del)
3021 return false;
3022
3023 to_del->remove_reference ();
3024 if (dump_file)
3025 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3026 xstrdup (origin->caller->name ()),
3027 origin->caller->order, xstrdup (symbol->name ()));
3028 return true;
3029 }
3030
3031 /* If JFUNC has a reference description with refcount different from
3032 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3033 NULL. JFUNC must be a constant jump function. */
3034
3035 static struct ipa_cst_ref_desc *
3036 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3037 {
3038 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3039 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3040 return rdesc;
3041 else
3042 return NULL;
3043 }
3044
3045 /* If the value of constant jump function JFUNC is an address of a function
3046 declaration, return the associated call graph node. Otherwise return
3047 NULL. */
3048
3049 static cgraph_node *
3050 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3051 {
3052 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3053 tree cst = ipa_get_jf_constant (jfunc);
3054 if (TREE_CODE (cst) != ADDR_EXPR
3055 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3056 return NULL;
3057
3058 return cgraph_node::get (TREE_OPERAND (cst, 0));
3059 }
3060
3061
3062 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3063 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3064 the edge specified in the rdesc. Return false if either the symbol or the
3065 reference could not be found, otherwise return true. */
3066
3067 static bool
3068 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3069 {
3070 struct ipa_cst_ref_desc *rdesc;
3071 if (jfunc->type == IPA_JF_CONST
3072 && (rdesc = jfunc_rdesc_usable (jfunc))
3073 && --rdesc->refcount == 0)
3074 {
3075 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3076 if (!symbol)
3077 return false;
3078
3079 return remove_described_reference (symbol, rdesc);
3080 }
3081 return true;
3082 }
3083
3084 /* Try to find a destination for indirect edge IE that corresponds to a simple
3085 call or a call of a member function pointer and where the destination is a
3086 pointer formal parameter described by jump function JFUNC. If it can be
3087 determined, return the newly direct edge, otherwise return NULL.
3088 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3089
3090 static struct cgraph_edge *
3091 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3092 struct ipa_jump_func *jfunc,
3093 struct ipa_node_params *new_root_info)
3094 {
3095 struct cgraph_edge *cs;
3096 tree target;
3097 bool agg_contents = ie->indirect_info->agg_contents;
3098
3099 if (ie->indirect_info->agg_contents)
3100 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3101 ie->indirect_info->offset,
3102 ie->indirect_info->by_ref);
3103 else
3104 target = ipa_value_from_jfunc (new_root_info, jfunc);
3105 if (!target)
3106 return NULL;
3107 cs = ipa_make_edge_direct_to_target (ie, target);
3108
3109 if (cs && !agg_contents)
3110 {
3111 bool ok;
3112 gcc_checking_assert (cs->callee
3113 && (cs != ie
3114 || jfunc->type != IPA_JF_CONST
3115 || !cgraph_node_for_jfunc (jfunc)
3116 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3117 ok = try_decrement_rdesc_refcount (jfunc);
3118 gcc_checking_assert (ok);
3119 }
3120
3121 return cs;
3122 }
3123
3124 /* Return the target to be used in cases of impossible devirtualization. IE
3125 and target (the latter can be NULL) are dumped when dumping is enabled. */
3126
3127 tree
3128 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3129 {
3130 if (dump_file)
3131 {
3132 if (target)
3133 fprintf (dump_file,
3134 "Type inconsistent devirtualization: %s/%i->%s\n",
3135 ie->caller->name (), ie->caller->order,
3136 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3137 else
3138 fprintf (dump_file,
3139 "No devirtualization target in %s/%i\n",
3140 ie->caller->name (), ie->caller->order);
3141 }
3142 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3143 cgraph_node::get_create (new_target);
3144 return new_target;
3145 }
3146
3147 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3148 call based on a formal parameter which is described by jump function JFUNC
3149 and if it can be determined, make it direct and return the direct edge.
3150 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3151 are relative to. */
3152
3153 static struct cgraph_edge *
3154 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3155 struct ipa_jump_func *jfunc,
3156 struct ipa_node_params *new_root_info,
3157 struct ipa_polymorphic_call_context *ctx_ptr)
3158 {
3159 tree binfo, target = NULL;
3160 bool speculative = false;
3161 bool updated = false;
3162
3163 if (!flag_devirtualize)
3164 return NULL;
3165
3166 /* If this is call of a function parameter, restrict its type
3167 based on knowlede of the context. */
3168 if (ctx_ptr && !ie->indirect_info->by_ref)
3169 {
3170 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3171
3172 ctx.offset_by (ie->indirect_info->offset);
3173
3174 if (ie->indirect_info->vptr_changed)
3175 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3176 ie->indirect_info->otr_type);
3177
3178 updated = ie->indirect_info->context.combine_with
3179 (ctx, ie->indirect_info->otr_type);
3180 }
3181
3182 /* Try to do lookup via known virtual table pointer value. */
3183 if (!ie->indirect_info->by_ref
3184 && (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively))
3185 {
3186 tree vtable;
3187 unsigned HOST_WIDE_INT offset;
3188 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3189 ie->indirect_info->offset,
3190 true);
3191 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3192 {
3193 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3194 vtable, offset);
3195 if (t)
3196 {
3197 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3198 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3199 || !possible_polymorphic_call_target_p
3200 (ie, cgraph_node::get (t)))
3201 {
3202 /* Do not speculate builtin_unreachable, it is stpid! */
3203 if (!ie->indirect_info->vptr_changed)
3204 target = ipa_impossible_devirt_target (ie, target);
3205 }
3206 else
3207 {
3208 target = t;
3209 speculative = ie->indirect_info->vptr_changed;
3210 }
3211 }
3212 }
3213 }
3214
3215 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3216
3217 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3218 {
3219 struct ipa_polymorphic_call_context ctx (binfo,
3220 ie->indirect_info->otr_type,
3221 ie->indirect_info->offset);
3222 updated |= ie->indirect_info->context.combine_with
3223 (ctx, ie->indirect_info->otr_type);
3224 }
3225
3226 if (updated)
3227 {
3228 ipa_polymorphic_call_context context (ie);
3229 vec <cgraph_node *>targets;
3230 bool final;
3231
3232 targets = possible_polymorphic_call_targets
3233 (ie->indirect_info->otr_type,
3234 ie->indirect_info->otr_token,
3235 context, &final);
3236 if (final && targets.length () <= 1)
3237 {
3238 if (targets.length () == 1)
3239 target = targets[0]->decl;
3240 else
3241 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3242 }
3243 else if (!target && flag_devirtualize_speculatively
3244 && !ie->speculative && ie->maybe_hot_p ())
3245 {
3246 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3247 ie->indirect_info->otr_token,
3248 ie->indirect_info->context);
3249 if (n)
3250 {
3251 target = n->decl;
3252 speculative = true;
3253 }
3254 }
3255 }
3256
3257 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3258 {
3259 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3260 ie->indirect_info->otr_type);
3261 if (binfo)
3262 {
3263 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3264 binfo);
3265 if (t)
3266 {
3267 target = t;
3268 speculative = false;
3269 }
3270 }
3271 }
3272
3273 if (target)
3274 {
3275 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3276 {
3277 if (speculative)
3278 return NULL;
3279 target = ipa_impossible_devirt_target (ie, target);
3280 }
3281 return ipa_make_edge_direct_to_target (ie, target, speculative);
3282 }
3283 else
3284 return NULL;
3285 }
3286
3287 /* Update the param called notes associated with NODE when CS is being inlined,
3288 assuming NODE is (potentially indirectly) inlined into CS->callee.
3289 Moreover, if the callee is discovered to be constant, create a new cgraph
3290 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3291 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3292
3293 static bool
3294 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3295 struct cgraph_node *node,
3296 vec<cgraph_edge *> *new_edges)
3297 {
3298 struct ipa_edge_args *top;
3299 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3300 struct ipa_node_params *new_root_info;
3301 bool res = false;
3302
3303 ipa_check_create_edge_args ();
3304 top = IPA_EDGE_REF (cs);
3305 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3306 ? cs->caller->global.inlined_to
3307 : cs->caller);
3308
3309 for (ie = node->indirect_calls; ie; ie = next_ie)
3310 {
3311 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3312 struct ipa_jump_func *jfunc;
3313 int param_index;
3314
3315 next_ie = ie->next_callee;
3316
3317 if (ici->param_index == -1)
3318 continue;
3319
3320 /* We must check range due to calls with variable number of arguments: */
3321 if (ici->param_index >= ipa_get_cs_argument_count (top))
3322 {
3323 ici->param_index = -1;
3324 continue;
3325 }
3326
3327 param_index = ici->param_index;
3328 jfunc = ipa_get_ith_jump_func (top, param_index);
3329
3330 if (!flag_indirect_inlining)
3331 new_direct_edge = NULL;
3332 else if (ici->polymorphic)
3333 {
3334 ipa_polymorphic_call_context *ctx;
3335 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3336 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3337 new_root_info,
3338 ctx);
3339 }
3340 else
3341 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3342 new_root_info);
3343 /* If speculation was removed, then we need to do nothing. */
3344 if (new_direct_edge && new_direct_edge != ie)
3345 {
3346 new_direct_edge->indirect_inlining_edge = 1;
3347 top = IPA_EDGE_REF (cs);
3348 res = true;
3349 }
3350 else if (new_direct_edge)
3351 {
3352 new_direct_edge->indirect_inlining_edge = 1;
3353 if (new_direct_edge->call_stmt)
3354 new_direct_edge->call_stmt_cannot_inline_p
3355 = !gimple_check_call_matching_types (
3356 new_direct_edge->call_stmt,
3357 new_direct_edge->callee->decl, false);
3358 if (new_edges)
3359 {
3360 new_edges->safe_push (new_direct_edge);
3361 res = true;
3362 }
3363 top = IPA_EDGE_REF (cs);
3364 }
3365 else if (jfunc->type == IPA_JF_PASS_THROUGH
3366 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3367 {
3368 if ((ici->agg_contents
3369 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3370 || (ici->polymorphic
3371 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3372 ici->param_index = -1;
3373 else
3374 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3375 }
3376 else if (jfunc->type == IPA_JF_ANCESTOR)
3377 {
3378 if ((ici->agg_contents
3379 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3380 || (ici->polymorphic
3381 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3382 ici->param_index = -1;
3383 else
3384 {
3385 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3386 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3387 }
3388 }
3389 else
3390 /* Either we can find a destination for this edge now or never. */
3391 ici->param_index = -1;
3392 }
3393
3394 return res;
3395 }
3396
3397 /* Recursively traverse subtree of NODE (including node) made of inlined
3398 cgraph_edges when CS has been inlined and invoke
3399 update_indirect_edges_after_inlining on all nodes and
3400 update_jump_functions_after_inlining on all non-inlined edges that lead out
3401 of this subtree. Newly discovered indirect edges will be added to
3402 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3403 created. */
3404
3405 static bool
3406 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3407 struct cgraph_node *node,
3408 vec<cgraph_edge *> *new_edges)
3409 {
3410 struct cgraph_edge *e;
3411 bool res;
3412
3413 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3414
3415 for (e = node->callees; e; e = e->next_callee)
3416 if (!e->inline_failed)
3417 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3418 else
3419 update_jump_functions_after_inlining (cs, e);
3420 for (e = node->indirect_calls; e; e = e->next_callee)
3421 update_jump_functions_after_inlining (cs, e);
3422
3423 return res;
3424 }
3425
3426 /* Combine two controlled uses counts as done during inlining. */
3427
3428 static int
3429 combine_controlled_uses_counters (int c, int d)
3430 {
3431 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3432 return IPA_UNDESCRIBED_USE;
3433 else
3434 return c + d - 1;
3435 }
3436
3437 /* Propagate number of controlled users from CS->caleee to the new root of the
3438 tree of inlined nodes. */
3439
3440 static void
3441 propagate_controlled_uses (struct cgraph_edge *cs)
3442 {
3443 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3444 struct cgraph_node *new_root = cs->caller->global.inlined_to
3445 ? cs->caller->global.inlined_to : cs->caller;
3446 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3447 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3448 int count, i;
3449
3450 count = MIN (ipa_get_cs_argument_count (args),
3451 ipa_get_param_count (old_root_info));
3452 for (i = 0; i < count; i++)
3453 {
3454 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3455 struct ipa_cst_ref_desc *rdesc;
3456
3457 if (jf->type == IPA_JF_PASS_THROUGH)
3458 {
3459 int src_idx, c, d;
3460 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3461 c = ipa_get_controlled_uses (new_root_info, src_idx);
3462 d = ipa_get_controlled_uses (old_root_info, i);
3463
3464 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3465 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3466 c = combine_controlled_uses_counters (c, d);
3467 ipa_set_controlled_uses (new_root_info, src_idx, c);
3468 if (c == 0 && new_root_info->ipcp_orig_node)
3469 {
3470 struct cgraph_node *n;
3471 struct ipa_ref *ref;
3472 tree t = new_root_info->known_vals[src_idx];
3473
3474 if (t && TREE_CODE (t) == ADDR_EXPR
3475 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3476 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3477 && (ref = new_root->find_reference (n, NULL, 0)))
3478 {
3479 if (dump_file)
3480 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3481 "reference from %s/%i to %s/%i.\n",
3482 xstrdup (new_root->name ()),
3483 new_root->order,
3484 xstrdup (n->name ()), n->order);
3485 ref->remove_reference ();
3486 }
3487 }
3488 }
3489 else if (jf->type == IPA_JF_CONST
3490 && (rdesc = jfunc_rdesc_usable (jf)))
3491 {
3492 int d = ipa_get_controlled_uses (old_root_info, i);
3493 int c = rdesc->refcount;
3494 rdesc->refcount = combine_controlled_uses_counters (c, d);
3495 if (rdesc->refcount == 0)
3496 {
3497 tree cst = ipa_get_jf_constant (jf);
3498 struct cgraph_node *n;
3499 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3500 && TREE_CODE (TREE_OPERAND (cst, 0))
3501 == FUNCTION_DECL);
3502 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3503 if (n)
3504 {
3505 struct cgraph_node *clone;
3506 bool ok;
3507 ok = remove_described_reference (n, rdesc);
3508 gcc_checking_assert (ok);
3509
3510 clone = cs->caller;
3511 while (clone->global.inlined_to
3512 && clone != rdesc->cs->caller
3513 && IPA_NODE_REF (clone)->ipcp_orig_node)
3514 {
3515 struct ipa_ref *ref;
3516 ref = clone->find_reference (n, NULL, 0);
3517 if (ref)
3518 {
3519 if (dump_file)
3520 fprintf (dump_file, "ipa-prop: Removing "
3521 "cloning-created reference "
3522 "from %s/%i to %s/%i.\n",
3523 xstrdup (clone->name ()),
3524 clone->order,
3525 xstrdup (n->name ()),
3526 n->order);
3527 ref->remove_reference ();
3528 }
3529 clone = clone->callers->caller;
3530 }
3531 }
3532 }
3533 }
3534 }
3535
3536 for (i = ipa_get_param_count (old_root_info);
3537 i < ipa_get_cs_argument_count (args);
3538 i++)
3539 {
3540 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3541
3542 if (jf->type == IPA_JF_CONST)
3543 {
3544 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3545 if (rdesc)
3546 rdesc->refcount = IPA_UNDESCRIBED_USE;
3547 }
3548 else if (jf->type == IPA_JF_PASS_THROUGH)
3549 ipa_set_controlled_uses (new_root_info,
3550 jf->value.pass_through.formal_id,
3551 IPA_UNDESCRIBED_USE);
3552 }
3553 }
3554
3555 /* Update jump functions and call note functions on inlining the call site CS.
3556 CS is expected to lead to a node already cloned by
3557 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3558 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3559 created. */
3560
3561 bool
3562 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3563 vec<cgraph_edge *> *new_edges)
3564 {
3565 bool changed;
3566 /* Do nothing if the preparation phase has not been carried out yet
3567 (i.e. during early inlining). */
3568 if (!ipa_node_params_vector.exists ())
3569 return false;
3570 gcc_assert (ipa_edge_args_vector);
3571
3572 propagate_controlled_uses (cs);
3573 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3574
3575 return changed;
3576 }
3577
3578 /* Frees all dynamically allocated structures that the argument info points
3579 to. */
3580
3581 void
3582 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3583 {
3584 vec_free (args->jump_functions);
3585 memset (args, 0, sizeof (*args));
3586 }
3587
3588 /* Free all ipa_edge structures. */
3589
3590 void
3591 ipa_free_all_edge_args (void)
3592 {
3593 int i;
3594 struct ipa_edge_args *args;
3595
3596 if (!ipa_edge_args_vector)
3597 return;
3598
3599 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3600 ipa_free_edge_args_substructures (args);
3601
3602 vec_free (ipa_edge_args_vector);
3603 }
3604
3605 /* Frees all dynamically allocated structures that the param info points
3606 to. */
3607
3608 void
3609 ipa_free_node_params_substructures (struct ipa_node_params *info)
3610 {
3611 info->descriptors.release ();
3612 free (info->lattices);
3613 /* Lattice values and their sources are deallocated with their alocation
3614 pool. */
3615 info->known_vals.release ();
3616 memset (info, 0, sizeof (*info));
3617 }
3618
3619 /* Free all ipa_node_params structures. */
3620
3621 void
3622 ipa_free_all_node_params (void)
3623 {
3624 int i;
3625 struct ipa_node_params *info;
3626
3627 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3628 ipa_free_node_params_substructures (info);
3629
3630 ipa_node_params_vector.release ();
3631 }
3632
3633 /* Set the aggregate replacements of NODE to be AGGVALS. */
3634
3635 void
3636 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3637 struct ipa_agg_replacement_value *aggvals)
3638 {
3639 if (vec_safe_length (ipa_node_agg_replacements)
3640 <= (unsigned) symtab->cgraph_max_uid)
3641 vec_safe_grow_cleared (ipa_node_agg_replacements,
3642 symtab->cgraph_max_uid + 1);
3643
3644 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3645 }
3646
3647 /* Hook that is called by cgraph.c when an edge is removed. */
3648
3649 static void
3650 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3651 {
3652 struct ipa_edge_args *args;
3653
3654 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3655 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3656 return;
3657
3658 args = IPA_EDGE_REF (cs);
3659 if (args->jump_functions)
3660 {
3661 struct ipa_jump_func *jf;
3662 int i;
3663 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3664 {
3665 struct ipa_cst_ref_desc *rdesc;
3666 try_decrement_rdesc_refcount (jf);
3667 if (jf->type == IPA_JF_CONST
3668 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3669 && rdesc->cs == cs)
3670 rdesc->cs = NULL;
3671 }
3672 }
3673
3674 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3675 }
3676
3677 /* Hook that is called by cgraph.c when a node is removed. */
3678
3679 static void
3680 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3681 {
3682 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3683 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3684 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3685 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3686 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3687 }
3688
3689 /* Hook that is called by cgraph.c when an edge is duplicated. */
3690
3691 static void
3692 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3693 __attribute__((unused)) void *data)
3694 {
3695 struct ipa_edge_args *old_args, *new_args;
3696 unsigned int i;
3697
3698 ipa_check_create_edge_args ();
3699
3700 old_args = IPA_EDGE_REF (src);
3701 new_args = IPA_EDGE_REF (dst);
3702
3703 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3704 if (old_args->polymorphic_call_contexts)
3705 new_args->polymorphic_call_contexts
3706 = vec_safe_copy (old_args->polymorphic_call_contexts);
3707
3708 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3709 {
3710 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3711 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3712
3713 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3714
3715 if (src_jf->type == IPA_JF_CONST)
3716 {
3717 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3718
3719 if (!src_rdesc)
3720 dst_jf->value.constant.rdesc = NULL;
3721 else if (src->caller == dst->caller)
3722 {
3723 struct ipa_ref *ref;
3724 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3725 gcc_checking_assert (n);
3726 ref = src->caller->find_reference (n, src->call_stmt,
3727 src->lto_stmt_uid);
3728 gcc_checking_assert (ref);
3729 dst->caller->clone_reference (ref, ref->stmt);
3730
3731 gcc_checking_assert (ipa_refdesc_pool);
3732 struct ipa_cst_ref_desc *dst_rdesc
3733 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3734 dst_rdesc->cs = dst;
3735 dst_rdesc->refcount = src_rdesc->refcount;
3736 dst_rdesc->next_duplicate = NULL;
3737 dst_jf->value.constant.rdesc = dst_rdesc;
3738 }
3739 else if (src_rdesc->cs == src)
3740 {
3741 struct ipa_cst_ref_desc *dst_rdesc;
3742 gcc_checking_assert (ipa_refdesc_pool);
3743 dst_rdesc
3744 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3745 dst_rdesc->cs = dst;
3746 dst_rdesc->refcount = src_rdesc->refcount;
3747 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3748 src_rdesc->next_duplicate = dst_rdesc;
3749 dst_jf->value.constant.rdesc = dst_rdesc;
3750 }
3751 else
3752 {
3753 struct ipa_cst_ref_desc *dst_rdesc;
3754 /* This can happen during inlining, when a JFUNC can refer to a
3755 reference taken in a function up in the tree of inline clones.
3756 We need to find the duplicate that refers to our tree of
3757 inline clones. */
3758
3759 gcc_assert (dst->caller->global.inlined_to);
3760 for (dst_rdesc = src_rdesc->next_duplicate;
3761 dst_rdesc;
3762 dst_rdesc = dst_rdesc->next_duplicate)
3763 {
3764 struct cgraph_node *top;
3765 top = dst_rdesc->cs->caller->global.inlined_to
3766 ? dst_rdesc->cs->caller->global.inlined_to
3767 : dst_rdesc->cs->caller;
3768 if (dst->caller->global.inlined_to == top)
3769 break;
3770 }
3771 gcc_assert (dst_rdesc);
3772 dst_jf->value.constant.rdesc = dst_rdesc;
3773 }
3774 }
3775 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3776 && src->caller == dst->caller)
3777 {
3778 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3779 ? dst->caller->global.inlined_to : dst->caller;
3780 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3781 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3782
3783 int c = ipa_get_controlled_uses (root_info, idx);
3784 if (c != IPA_UNDESCRIBED_USE)
3785 {
3786 c++;
3787 ipa_set_controlled_uses (root_info, idx, c);
3788 }
3789 }
3790 }
3791 }
3792
3793 /* Hook that is called by cgraph.c when a node is duplicated. */
3794
3795 static void
3796 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3797 ATTRIBUTE_UNUSED void *data)
3798 {
3799 struct ipa_node_params *old_info, *new_info;
3800 struct ipa_agg_replacement_value *old_av, *new_av;
3801
3802 ipa_check_create_node_params ();
3803 old_info = IPA_NODE_REF (src);
3804 new_info = IPA_NODE_REF (dst);
3805
3806 new_info->descriptors = old_info->descriptors.copy ();
3807 new_info->lattices = NULL;
3808 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3809
3810 new_info->analysis_done = old_info->analysis_done;
3811 new_info->node_enqueued = old_info->node_enqueued;
3812
3813 old_av = ipa_get_agg_replacements_for_node (src);
3814 if (!old_av)
3815 return;
3816
3817 new_av = NULL;
3818 while (old_av)
3819 {
3820 struct ipa_agg_replacement_value *v;
3821
3822 v = ggc_alloc<ipa_agg_replacement_value> ();
3823 memcpy (v, old_av, sizeof (*v));
3824 v->next = new_av;
3825 new_av = v;
3826 old_av = old_av->next;
3827 }
3828 ipa_set_node_agg_value_chain (dst, new_av);
3829 }
3830
3831
3832 /* Analyze newly added function into callgraph. */
3833
3834 static void
3835 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3836 {
3837 if (node->has_gimple_body_p ())
3838 ipa_analyze_node (node);
3839 }
3840
3841 /* Register our cgraph hooks if they are not already there. */
3842
3843 void
3844 ipa_register_cgraph_hooks (void)
3845 {
3846 if (!edge_removal_hook_holder)
3847 edge_removal_hook_holder =
3848 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3849 if (!node_removal_hook_holder)
3850 node_removal_hook_holder =
3851 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3852 if (!edge_duplication_hook_holder)
3853 edge_duplication_hook_holder =
3854 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3855 if (!node_duplication_hook_holder)
3856 node_duplication_hook_holder =
3857 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3858 function_insertion_hook_holder =
3859 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3860 }
3861
3862 /* Unregister our cgraph hooks if they are not already there. */
3863
3864 static void
3865 ipa_unregister_cgraph_hooks (void)
3866 {
3867 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3868 edge_removal_hook_holder = NULL;
3869 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3870 node_removal_hook_holder = NULL;
3871 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3872 edge_duplication_hook_holder = NULL;
3873 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3874 node_duplication_hook_holder = NULL;
3875 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3876 function_insertion_hook_holder = NULL;
3877 }
3878
3879 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3880 longer needed after ipa-cp. */
3881
3882 void
3883 ipa_free_all_structures_after_ipa_cp (void)
3884 {
3885 if (!optimize)
3886 {
3887 ipa_free_all_edge_args ();
3888 ipa_free_all_node_params ();
3889 free_alloc_pool (ipcp_sources_pool);
3890 free_alloc_pool (ipcp_values_pool);
3891 free_alloc_pool (ipcp_agg_lattice_pool);
3892 ipa_unregister_cgraph_hooks ();
3893 if (ipa_refdesc_pool)
3894 free_alloc_pool (ipa_refdesc_pool);
3895 }
3896 }
3897
3898 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3899 longer needed after indirect inlining. */
3900
3901 void
3902 ipa_free_all_structures_after_iinln (void)
3903 {
3904 ipa_free_all_edge_args ();
3905 ipa_free_all_node_params ();
3906 ipa_unregister_cgraph_hooks ();
3907 if (ipcp_sources_pool)
3908 free_alloc_pool (ipcp_sources_pool);
3909 if (ipcp_values_pool)
3910 free_alloc_pool (ipcp_values_pool);
3911 if (ipcp_agg_lattice_pool)
3912 free_alloc_pool (ipcp_agg_lattice_pool);
3913 if (ipa_refdesc_pool)
3914 free_alloc_pool (ipa_refdesc_pool);
3915 }
3916
3917 /* Print ipa_tree_map data structures of all functions in the
3918 callgraph to F. */
3919
3920 void
3921 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3922 {
3923 int i, count;
3924 struct ipa_node_params *info;
3925
3926 if (!node->definition)
3927 return;
3928 info = IPA_NODE_REF (node);
3929 fprintf (f, " function %s/%i parameter descriptors:\n",
3930 node->name (), node->order);
3931 count = ipa_get_param_count (info);
3932 for (i = 0; i < count; i++)
3933 {
3934 int c;
3935
3936 fprintf (f, " ");
3937 ipa_dump_param (f, info, i);
3938 if (ipa_is_param_used (info, i))
3939 fprintf (f, " used");
3940 c = ipa_get_controlled_uses (info, i);
3941 if (c == IPA_UNDESCRIBED_USE)
3942 fprintf (f, " undescribed_use");
3943 else
3944 fprintf (f, " controlled_uses=%i", c);
3945 fprintf (f, "\n");
3946 }
3947 }
3948
3949 /* Print ipa_tree_map data structures of all functions in the
3950 callgraph to F. */
3951
3952 void
3953 ipa_print_all_params (FILE * f)
3954 {
3955 struct cgraph_node *node;
3956
3957 fprintf (f, "\nFunction parameters:\n");
3958 FOR_EACH_FUNCTION (node)
3959 ipa_print_node_params (f, node);
3960 }
3961
3962 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3963
3964 vec<tree>
3965 ipa_get_vector_of_formal_parms (tree fndecl)
3966 {
3967 vec<tree> args;
3968 int count;
3969 tree parm;
3970
3971 gcc_assert (!flag_wpa);
3972 count = count_formal_params (fndecl);
3973 args.create (count);
3974 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3975 args.quick_push (parm);
3976
3977 return args;
3978 }
3979
3980 /* Return a heap allocated vector containing types of formal parameters of
3981 function type FNTYPE. */
3982
3983 vec<tree>
3984 ipa_get_vector_of_formal_parm_types (tree fntype)
3985 {
3986 vec<tree> types;
3987 int count = 0;
3988 tree t;
3989
3990 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3991 count++;
3992
3993 types.create (count);
3994 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3995 types.quick_push (TREE_VALUE (t));
3996
3997 return types;
3998 }
3999
4000 /* Modify the function declaration FNDECL and its type according to the plan in
4001 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4002 to reflect the actual parameters being modified which are determined by the
4003 base_index field. */
4004
4005 void
4006 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4007 {
4008 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4009 tree orig_type = TREE_TYPE (fndecl);
4010 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4011
4012 /* The following test is an ugly hack, some functions simply don't have any
4013 arguments in their type. This is probably a bug but well... */
4014 bool care_for_types = (old_arg_types != NULL_TREE);
4015 bool last_parm_void;
4016 vec<tree> otypes;
4017 if (care_for_types)
4018 {
4019 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4020 == void_type_node);
4021 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4022 if (last_parm_void)
4023 gcc_assert (oparms.length () + 1 == otypes.length ());
4024 else
4025 gcc_assert (oparms.length () == otypes.length ());
4026 }
4027 else
4028 {
4029 last_parm_void = false;
4030 otypes.create (0);
4031 }
4032
4033 int len = adjustments.length ();
4034 tree *link = &DECL_ARGUMENTS (fndecl);
4035 tree new_arg_types = NULL;
4036 for (int i = 0; i < len; i++)
4037 {
4038 struct ipa_parm_adjustment *adj;
4039 gcc_assert (link);
4040
4041 adj = &adjustments[i];
4042 tree parm;
4043 if (adj->op == IPA_PARM_OP_NEW)
4044 parm = NULL;
4045 else
4046 parm = oparms[adj->base_index];
4047 adj->base = parm;
4048
4049 if (adj->op == IPA_PARM_OP_COPY)
4050 {
4051 if (care_for_types)
4052 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4053 new_arg_types);
4054 *link = parm;
4055 link = &DECL_CHAIN (parm);
4056 }
4057 else if (adj->op != IPA_PARM_OP_REMOVE)
4058 {
4059 tree new_parm;
4060 tree ptype;
4061
4062 if (adj->by_ref)
4063 ptype = build_pointer_type (adj->type);
4064 else
4065 {
4066 ptype = adj->type;
4067 if (is_gimple_reg_type (ptype))
4068 {
4069 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4070 if (TYPE_ALIGN (ptype) < malign)
4071 ptype = build_aligned_type (ptype, malign);
4072 }
4073 }
4074
4075 if (care_for_types)
4076 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4077
4078 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4079 ptype);
4080 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4081 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4082 DECL_ARTIFICIAL (new_parm) = 1;
4083 DECL_ARG_TYPE (new_parm) = ptype;
4084 DECL_CONTEXT (new_parm) = fndecl;
4085 TREE_USED (new_parm) = 1;
4086 DECL_IGNORED_P (new_parm) = 1;
4087 layout_decl (new_parm, 0);
4088
4089 if (adj->op == IPA_PARM_OP_NEW)
4090 adj->base = NULL;
4091 else
4092 adj->base = parm;
4093 adj->new_decl = new_parm;
4094
4095 *link = new_parm;
4096 link = &DECL_CHAIN (new_parm);
4097 }
4098 }
4099
4100 *link = NULL_TREE;
4101
4102 tree new_reversed = NULL;
4103 if (care_for_types)
4104 {
4105 new_reversed = nreverse (new_arg_types);
4106 if (last_parm_void)
4107 {
4108 if (new_reversed)
4109 TREE_CHAIN (new_arg_types) = void_list_node;
4110 else
4111 new_reversed = void_list_node;
4112 }
4113 }
4114
4115 /* Use copy_node to preserve as much as possible from original type
4116 (debug info, attribute lists etc.)
4117 Exception is METHOD_TYPEs must have THIS argument.
4118 When we are asked to remove it, we need to build new FUNCTION_TYPE
4119 instead. */
4120 tree new_type = NULL;
4121 if (TREE_CODE (orig_type) != METHOD_TYPE
4122 || (adjustments[0].op == IPA_PARM_OP_COPY
4123 && adjustments[0].base_index == 0))
4124 {
4125 new_type = build_distinct_type_copy (orig_type);
4126 TYPE_ARG_TYPES (new_type) = new_reversed;
4127 }
4128 else
4129 {
4130 new_type
4131 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4132 new_reversed));
4133 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4134 DECL_VINDEX (fndecl) = NULL_TREE;
4135 }
4136
4137 /* When signature changes, we need to clear builtin info. */
4138 if (DECL_BUILT_IN (fndecl))
4139 {
4140 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4141 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4142 }
4143
4144 TREE_TYPE (fndecl) = new_type;
4145 DECL_VIRTUAL_P (fndecl) = 0;
4146 DECL_LANG_SPECIFIC (fndecl) = NULL;
4147 otypes.release ();
4148 oparms.release ();
4149 }
4150
4151 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4152 If this is a directly recursive call, CS must be NULL. Otherwise it must
4153 contain the corresponding call graph edge. */
4154
4155 void
4156 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4157 ipa_parm_adjustment_vec adjustments)
4158 {
4159 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4160 vec<tree> vargs;
4161 vec<tree, va_gc> **debug_args = NULL;
4162 gimple new_stmt;
4163 gimple_stmt_iterator gsi, prev_gsi;
4164 tree callee_decl;
4165 int i, len;
4166
4167 len = adjustments.length ();
4168 vargs.create (len);
4169 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4170 current_node->remove_stmt_references (stmt);
4171
4172 gsi = gsi_for_stmt (stmt);
4173 prev_gsi = gsi;
4174 gsi_prev (&prev_gsi);
4175 for (i = 0; i < len; i++)
4176 {
4177 struct ipa_parm_adjustment *adj;
4178
4179 adj = &adjustments[i];
4180
4181 if (adj->op == IPA_PARM_OP_COPY)
4182 {
4183 tree arg = gimple_call_arg (stmt, adj->base_index);
4184
4185 vargs.quick_push (arg);
4186 }
4187 else if (adj->op != IPA_PARM_OP_REMOVE)
4188 {
4189 tree expr, base, off;
4190 location_t loc;
4191 unsigned int deref_align = 0;
4192 bool deref_base = false;
4193
4194 /* We create a new parameter out of the value of the old one, we can
4195 do the following kind of transformations:
4196
4197 - A scalar passed by reference is converted to a scalar passed by
4198 value. (adj->by_ref is false and the type of the original
4199 actual argument is a pointer to a scalar).
4200
4201 - A part of an aggregate is passed instead of the whole aggregate.
4202 The part can be passed either by value or by reference, this is
4203 determined by value of adj->by_ref. Moreover, the code below
4204 handles both situations when the original aggregate is passed by
4205 value (its type is not a pointer) and when it is passed by
4206 reference (it is a pointer to an aggregate).
4207
4208 When the new argument is passed by reference (adj->by_ref is true)
4209 it must be a part of an aggregate and therefore we form it by
4210 simply taking the address of a reference inside the original
4211 aggregate. */
4212
4213 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4214 base = gimple_call_arg (stmt, adj->base_index);
4215 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4216 : EXPR_LOCATION (base);
4217
4218 if (TREE_CODE (base) != ADDR_EXPR
4219 && POINTER_TYPE_P (TREE_TYPE (base)))
4220 off = build_int_cst (adj->alias_ptr_type,
4221 adj->offset / BITS_PER_UNIT);
4222 else
4223 {
4224 HOST_WIDE_INT base_offset;
4225 tree prev_base;
4226 bool addrof;
4227
4228 if (TREE_CODE (base) == ADDR_EXPR)
4229 {
4230 base = TREE_OPERAND (base, 0);
4231 addrof = true;
4232 }
4233 else
4234 addrof = false;
4235 prev_base = base;
4236 base = get_addr_base_and_unit_offset (base, &base_offset);
4237 /* Aggregate arguments can have non-invariant addresses. */
4238 if (!base)
4239 {
4240 base = build_fold_addr_expr (prev_base);
4241 off = build_int_cst (adj->alias_ptr_type,
4242 adj->offset / BITS_PER_UNIT);
4243 }
4244 else if (TREE_CODE (base) == MEM_REF)
4245 {
4246 if (!addrof)
4247 {
4248 deref_base = true;
4249 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4250 }
4251 off = build_int_cst (adj->alias_ptr_type,
4252 base_offset
4253 + adj->offset / BITS_PER_UNIT);
4254 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4255 off);
4256 base = TREE_OPERAND (base, 0);
4257 }
4258 else
4259 {
4260 off = build_int_cst (adj->alias_ptr_type,
4261 base_offset
4262 + adj->offset / BITS_PER_UNIT);
4263 base = build_fold_addr_expr (base);
4264 }
4265 }
4266
4267 if (!adj->by_ref)
4268 {
4269 tree type = adj->type;
4270 unsigned int align;
4271 unsigned HOST_WIDE_INT misalign;
4272
4273 if (deref_base)
4274 {
4275 align = deref_align;
4276 misalign = 0;
4277 }
4278 else
4279 {
4280 get_pointer_alignment_1 (base, &align, &misalign);
4281 if (TYPE_ALIGN (type) > align)
4282 align = TYPE_ALIGN (type);
4283 }
4284 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4285 * BITS_PER_UNIT);
4286 misalign = misalign & (align - 1);
4287 if (misalign != 0)
4288 align = (misalign & -misalign);
4289 if (align < TYPE_ALIGN (type))
4290 type = build_aligned_type (type, align);
4291 base = force_gimple_operand_gsi (&gsi, base,
4292 true, NULL, true, GSI_SAME_STMT);
4293 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4294 /* If expr is not a valid gimple call argument emit
4295 a load into a temporary. */
4296 if (is_gimple_reg_type (TREE_TYPE (expr)))
4297 {
4298 gimple tem = gimple_build_assign (NULL_TREE, expr);
4299 if (gimple_in_ssa_p (cfun))
4300 {
4301 gimple_set_vuse (tem, gimple_vuse (stmt));
4302 expr = make_ssa_name (TREE_TYPE (expr), tem);
4303 }
4304 else
4305 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4306 gimple_assign_set_lhs (tem, expr);
4307 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4308 }
4309 }
4310 else
4311 {
4312 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4313 expr = build_fold_addr_expr (expr);
4314 expr = force_gimple_operand_gsi (&gsi, expr,
4315 true, NULL, true, GSI_SAME_STMT);
4316 }
4317 vargs.quick_push (expr);
4318 }
4319 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4320 {
4321 unsigned int ix;
4322 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4323 gimple def_temp;
4324
4325 arg = gimple_call_arg (stmt, adj->base_index);
4326 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4327 {
4328 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4329 continue;
4330 arg = fold_convert_loc (gimple_location (stmt),
4331 TREE_TYPE (origin), arg);
4332 }
4333 if (debug_args == NULL)
4334 debug_args = decl_debug_args_insert (callee_decl);
4335 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4336 if (ddecl == origin)
4337 {
4338 ddecl = (**debug_args)[ix + 1];
4339 break;
4340 }
4341 if (ddecl == NULL)
4342 {
4343 ddecl = make_node (DEBUG_EXPR_DECL);
4344 DECL_ARTIFICIAL (ddecl) = 1;
4345 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4346 DECL_MODE (ddecl) = DECL_MODE (origin);
4347
4348 vec_safe_push (*debug_args, origin);
4349 vec_safe_push (*debug_args, ddecl);
4350 }
4351 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4352 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4353 }
4354 }
4355
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4357 {
4358 fprintf (dump_file, "replacing stmt:");
4359 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4360 }
4361
4362 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4363 vargs.release ();
4364 if (gimple_call_lhs (stmt))
4365 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4366
4367 gimple_set_block (new_stmt, gimple_block (stmt));
4368 if (gimple_has_location (stmt))
4369 gimple_set_location (new_stmt, gimple_location (stmt));
4370 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4371 gimple_call_copy_flags (new_stmt, stmt);
4372 if (gimple_in_ssa_p (cfun))
4373 {
4374 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4375 if (gimple_vdef (stmt))
4376 {
4377 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4378 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4379 }
4380 }
4381
4382 if (dump_file && (dump_flags & TDF_DETAILS))
4383 {
4384 fprintf (dump_file, "with stmt:");
4385 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4386 fprintf (dump_file, "\n");
4387 }
4388 gsi_replace (&gsi, new_stmt, true);
4389 if (cs)
4390 cs->set_call_stmt (new_stmt);
4391 do
4392 {
4393 current_node->record_stmt_references (gsi_stmt (gsi));
4394 gsi_prev (&gsi);
4395 }
4396 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4397 }
4398
4399 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4400 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4401 specifies whether the function should care about type incompatibility the
4402 current and new expressions. If it is false, the function will leave
4403 incompatibility issues to the caller. Return true iff the expression
4404 was modified. */
4405
4406 bool
4407 ipa_modify_expr (tree *expr, bool convert,
4408 ipa_parm_adjustment_vec adjustments)
4409 {
4410 struct ipa_parm_adjustment *cand
4411 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4412 if (!cand)
4413 return false;
4414
4415 tree src;
4416 if (cand->by_ref)
4417 src = build_simple_mem_ref (cand->new_decl);
4418 else
4419 src = cand->new_decl;
4420
4421 if (dump_file && (dump_flags & TDF_DETAILS))
4422 {
4423 fprintf (dump_file, "About to replace expr ");
4424 print_generic_expr (dump_file, *expr, 0);
4425 fprintf (dump_file, " with ");
4426 print_generic_expr (dump_file, src, 0);
4427 fprintf (dump_file, "\n");
4428 }
4429
4430 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4431 {
4432 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4433 *expr = vce;
4434 }
4435 else
4436 *expr = src;
4437 return true;
4438 }
4439
4440 /* If T is an SSA_NAME, return NULL if it is not a default def or
4441 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4442 the base variable is always returned, regardless if it is a default
4443 def. Return T if it is not an SSA_NAME. */
4444
4445 static tree
4446 get_ssa_base_param (tree t, bool ignore_default_def)
4447 {
4448 if (TREE_CODE (t) == SSA_NAME)
4449 {
4450 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4451 return SSA_NAME_VAR (t);
4452 else
4453 return NULL_TREE;
4454 }
4455 return t;
4456 }
4457
4458 /* Given an expression, return an adjustment entry specifying the
4459 transformation to be done on EXPR. If no suitable adjustment entry
4460 was found, returns NULL.
4461
4462 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4463 default def, otherwise bail on them.
4464
4465 If CONVERT is non-NULL, this function will set *CONVERT if the
4466 expression provided is a component reference. ADJUSTMENTS is the
4467 adjustments vector. */
4468
4469 ipa_parm_adjustment *
4470 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4471 ipa_parm_adjustment_vec adjustments,
4472 bool ignore_default_def)
4473 {
4474 if (TREE_CODE (**expr) == BIT_FIELD_REF
4475 || TREE_CODE (**expr) == IMAGPART_EXPR
4476 || TREE_CODE (**expr) == REALPART_EXPR)
4477 {
4478 *expr = &TREE_OPERAND (**expr, 0);
4479 if (convert)
4480 *convert = true;
4481 }
4482
4483 HOST_WIDE_INT offset, size, max_size;
4484 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4485 if (!base || size == -1 || max_size == -1)
4486 return NULL;
4487
4488 if (TREE_CODE (base) == MEM_REF)
4489 {
4490 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4491 base = TREE_OPERAND (base, 0);
4492 }
4493
4494 base = get_ssa_base_param (base, ignore_default_def);
4495 if (!base || TREE_CODE (base) != PARM_DECL)
4496 return NULL;
4497
4498 struct ipa_parm_adjustment *cand = NULL;
4499 unsigned int len = adjustments.length ();
4500 for (unsigned i = 0; i < len; i++)
4501 {
4502 struct ipa_parm_adjustment *adj = &adjustments[i];
4503
4504 if (adj->base == base
4505 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4506 {
4507 cand = adj;
4508 break;
4509 }
4510 }
4511
4512 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4513 return NULL;
4514 return cand;
4515 }
4516
4517 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4518
4519 static bool
4520 index_in_adjustments_multiple_times_p (int base_index,
4521 ipa_parm_adjustment_vec adjustments)
4522 {
4523 int i, len = adjustments.length ();
4524 bool one = false;
4525
4526 for (i = 0; i < len; i++)
4527 {
4528 struct ipa_parm_adjustment *adj;
4529 adj = &adjustments[i];
4530
4531 if (adj->base_index == base_index)
4532 {
4533 if (one)
4534 return true;
4535 else
4536 one = true;
4537 }
4538 }
4539 return false;
4540 }
4541
4542
4543 /* Return adjustments that should have the same effect on function parameters
4544 and call arguments as if they were first changed according to adjustments in
4545 INNER and then by adjustments in OUTER. */
4546
4547 ipa_parm_adjustment_vec
4548 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4549 ipa_parm_adjustment_vec outer)
4550 {
4551 int i, outlen = outer.length ();
4552 int inlen = inner.length ();
4553 int removals = 0;
4554 ipa_parm_adjustment_vec adjustments, tmp;
4555
4556 tmp.create (inlen);
4557 for (i = 0; i < inlen; i++)
4558 {
4559 struct ipa_parm_adjustment *n;
4560 n = &inner[i];
4561
4562 if (n->op == IPA_PARM_OP_REMOVE)
4563 removals++;
4564 else
4565 {
4566 /* FIXME: Handling of new arguments are not implemented yet. */
4567 gcc_assert (n->op != IPA_PARM_OP_NEW);
4568 tmp.quick_push (*n);
4569 }
4570 }
4571
4572 adjustments.create (outlen + removals);
4573 for (i = 0; i < outlen; i++)
4574 {
4575 struct ipa_parm_adjustment r;
4576 struct ipa_parm_adjustment *out = &outer[i];
4577 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4578
4579 memset (&r, 0, sizeof (r));
4580 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4581 if (out->op == IPA_PARM_OP_REMOVE)
4582 {
4583 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4584 {
4585 r.op = IPA_PARM_OP_REMOVE;
4586 adjustments.quick_push (r);
4587 }
4588 continue;
4589 }
4590 else
4591 {
4592 /* FIXME: Handling of new arguments are not implemented yet. */
4593 gcc_assert (out->op != IPA_PARM_OP_NEW);
4594 }
4595
4596 r.base_index = in->base_index;
4597 r.type = out->type;
4598
4599 /* FIXME: Create nonlocal value too. */
4600
4601 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4602 r.op = IPA_PARM_OP_COPY;
4603 else if (in->op == IPA_PARM_OP_COPY)
4604 r.offset = out->offset;
4605 else if (out->op == IPA_PARM_OP_COPY)
4606 r.offset = in->offset;
4607 else
4608 r.offset = in->offset + out->offset;
4609 adjustments.quick_push (r);
4610 }
4611
4612 for (i = 0; i < inlen; i++)
4613 {
4614 struct ipa_parm_adjustment *n = &inner[i];
4615
4616 if (n->op == IPA_PARM_OP_REMOVE)
4617 adjustments.quick_push (*n);
4618 }
4619
4620 tmp.release ();
4621 return adjustments;
4622 }
4623
4624 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4625 friendly way, assuming they are meant to be applied to FNDECL. */
4626
4627 void
4628 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4629 tree fndecl)
4630 {
4631 int i, len = adjustments.length ();
4632 bool first = true;
4633 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4634
4635 fprintf (file, "IPA param adjustments: ");
4636 for (i = 0; i < len; i++)
4637 {
4638 struct ipa_parm_adjustment *adj;
4639 adj = &adjustments[i];
4640
4641 if (!first)
4642 fprintf (file, " ");
4643 else
4644 first = false;
4645
4646 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4647 print_generic_expr (file, parms[adj->base_index], 0);
4648 if (adj->base)
4649 {
4650 fprintf (file, ", base: ");
4651 print_generic_expr (file, adj->base, 0);
4652 }
4653 if (adj->new_decl)
4654 {
4655 fprintf (file, ", new_decl: ");
4656 print_generic_expr (file, adj->new_decl, 0);
4657 }
4658 if (adj->new_ssa_base)
4659 {
4660 fprintf (file, ", new_ssa_base: ");
4661 print_generic_expr (file, adj->new_ssa_base, 0);
4662 }
4663
4664 if (adj->op == IPA_PARM_OP_COPY)
4665 fprintf (file, ", copy_param");
4666 else if (adj->op == IPA_PARM_OP_REMOVE)
4667 fprintf (file, ", remove_param");
4668 else
4669 fprintf (file, ", offset %li", (long) adj->offset);
4670 if (adj->by_ref)
4671 fprintf (file, ", by_ref");
4672 print_node_brief (file, ", type: ", adj->type, 0);
4673 fprintf (file, "\n");
4674 }
4675 parms.release ();
4676 }
4677
4678 /* Dump the AV linked list. */
4679
4680 void
4681 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4682 {
4683 bool comma = false;
4684 fprintf (f, " Aggregate replacements:");
4685 for (; av; av = av->next)
4686 {
4687 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4688 av->index, av->offset);
4689 print_generic_expr (f, av->value, 0);
4690 comma = true;
4691 }
4692 fprintf (f, "\n");
4693 }
4694
4695 /* Stream out jump function JUMP_FUNC to OB. */
4696
4697 static void
4698 ipa_write_jump_function (struct output_block *ob,
4699 struct ipa_jump_func *jump_func)
4700 {
4701 struct ipa_agg_jf_item *item;
4702 struct bitpack_d bp;
4703 int i, count;
4704
4705 streamer_write_uhwi (ob, jump_func->type);
4706 switch (jump_func->type)
4707 {
4708 case IPA_JF_UNKNOWN:
4709 break;
4710 case IPA_JF_KNOWN_TYPE:
4711 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4712 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4713 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4714 break;
4715 case IPA_JF_CONST:
4716 gcc_assert (
4717 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4718 stream_write_tree (ob, jump_func->value.constant.value, true);
4719 break;
4720 case IPA_JF_PASS_THROUGH:
4721 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4722 if (jump_func->value.pass_through.operation == NOP_EXPR)
4723 {
4724 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4725 bp = bitpack_create (ob->main_stream);
4726 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4727 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4728 streamer_write_bitpack (&bp);
4729 }
4730 else
4731 {
4732 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4733 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4734 }
4735 break;
4736 case IPA_JF_ANCESTOR:
4737 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4738 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4739 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4740 bp = bitpack_create (ob->main_stream);
4741 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4742 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4743 streamer_write_bitpack (&bp);
4744 break;
4745 }
4746
4747 count = vec_safe_length (jump_func->agg.items);
4748 streamer_write_uhwi (ob, count);
4749 if (count)
4750 {
4751 bp = bitpack_create (ob->main_stream);
4752 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4753 streamer_write_bitpack (&bp);
4754 }
4755
4756 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4757 {
4758 streamer_write_uhwi (ob, item->offset);
4759 stream_write_tree (ob, item->value, true);
4760 }
4761 }
4762
4763 /* Read in jump function JUMP_FUNC from IB. */
4764
4765 static void
4766 ipa_read_jump_function (struct lto_input_block *ib,
4767 struct ipa_jump_func *jump_func,
4768 struct cgraph_edge *cs,
4769 struct data_in *data_in)
4770 {
4771 enum jump_func_type jftype;
4772 enum tree_code operation;
4773 int i, count;
4774
4775 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4776 switch (jftype)
4777 {
4778 case IPA_JF_UNKNOWN:
4779 jump_func->type = IPA_JF_UNKNOWN;
4780 break;
4781 case IPA_JF_KNOWN_TYPE:
4782 {
4783 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4784 tree base_type = stream_read_tree (ib, data_in);
4785 tree component_type = stream_read_tree (ib, data_in);
4786
4787 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4788 break;
4789 }
4790 case IPA_JF_CONST:
4791 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4792 break;
4793 case IPA_JF_PASS_THROUGH:
4794 operation = (enum tree_code) streamer_read_uhwi (ib);
4795 if (operation == NOP_EXPR)
4796 {
4797 int formal_id = streamer_read_uhwi (ib);
4798 struct bitpack_d bp = streamer_read_bitpack (ib);
4799 bool agg_preserved = bp_unpack_value (&bp, 1);
4800 bool type_preserved = bp_unpack_value (&bp, 1);
4801 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4802 type_preserved);
4803 }
4804 else
4805 {
4806 tree operand = stream_read_tree (ib, data_in);
4807 int formal_id = streamer_read_uhwi (ib);
4808 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4809 operation);
4810 }
4811 break;
4812 case IPA_JF_ANCESTOR:
4813 {
4814 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4815 tree type = stream_read_tree (ib, data_in);
4816 int formal_id = streamer_read_uhwi (ib);
4817 struct bitpack_d bp = streamer_read_bitpack (ib);
4818 bool agg_preserved = bp_unpack_value (&bp, 1);
4819 bool type_preserved = bp_unpack_value (&bp, 1);
4820
4821 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4822 type_preserved);
4823 break;
4824 }
4825 }
4826
4827 count = streamer_read_uhwi (ib);
4828 vec_alloc (jump_func->agg.items, count);
4829 if (count)
4830 {
4831 struct bitpack_d bp = streamer_read_bitpack (ib);
4832 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4833 }
4834 for (i = 0; i < count; i++)
4835 {
4836 struct ipa_agg_jf_item item;
4837 item.offset = streamer_read_uhwi (ib);
4838 item.value = stream_read_tree (ib, data_in);
4839 jump_func->agg.items->quick_push (item);
4840 }
4841 }
4842
4843 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4844 relevant to indirect inlining to OB. */
4845
4846 static void
4847 ipa_write_indirect_edge_info (struct output_block *ob,
4848 struct cgraph_edge *cs)
4849 {
4850 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4851 struct bitpack_d bp;
4852
4853 streamer_write_hwi (ob, ii->param_index);
4854 bp = bitpack_create (ob->main_stream);
4855 bp_pack_value (&bp, ii->polymorphic, 1);
4856 bp_pack_value (&bp, ii->agg_contents, 1);
4857 bp_pack_value (&bp, ii->member_ptr, 1);
4858 bp_pack_value (&bp, ii->by_ref, 1);
4859 bp_pack_value (&bp, ii->vptr_changed, 1);
4860 streamer_write_bitpack (&bp);
4861 if (ii->agg_contents || ii->polymorphic)
4862 streamer_write_hwi (ob, ii->offset);
4863 else
4864 gcc_assert (ii->offset == 0);
4865
4866 if (ii->polymorphic)
4867 {
4868 streamer_write_hwi (ob, ii->otr_token);
4869 stream_write_tree (ob, ii->otr_type, true);
4870 ii->context.stream_out (ob);
4871 }
4872 }
4873
4874 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4875 relevant to indirect inlining from IB. */
4876
4877 static void
4878 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4879 struct data_in *data_in,
4880 struct cgraph_edge *cs)
4881 {
4882 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4883 struct bitpack_d bp;
4884
4885 ii->param_index = (int) streamer_read_hwi (ib);
4886 bp = streamer_read_bitpack (ib);
4887 ii->polymorphic = bp_unpack_value (&bp, 1);
4888 ii->agg_contents = bp_unpack_value (&bp, 1);
4889 ii->member_ptr = bp_unpack_value (&bp, 1);
4890 ii->by_ref = bp_unpack_value (&bp, 1);
4891 ii->vptr_changed = bp_unpack_value (&bp, 1);
4892 if (ii->agg_contents || ii->polymorphic)
4893 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4894 else
4895 ii->offset = 0;
4896 if (ii->polymorphic)
4897 {
4898 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4899 ii->otr_type = stream_read_tree (ib, data_in);
4900 ii->context.stream_in (ib, data_in);
4901 }
4902 }
4903
4904 /* Stream out NODE info to OB. */
4905
4906 static void
4907 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4908 {
4909 int node_ref;
4910 lto_symtab_encoder_t encoder;
4911 struct ipa_node_params *info = IPA_NODE_REF (node);
4912 int j;
4913 struct cgraph_edge *e;
4914 struct bitpack_d bp;
4915
4916 encoder = ob->decl_state->symtab_node_encoder;
4917 node_ref = lto_symtab_encoder_encode (encoder, node);
4918 streamer_write_uhwi (ob, node_ref);
4919
4920 streamer_write_uhwi (ob, ipa_get_param_count (info));
4921 for (j = 0; j < ipa_get_param_count (info); j++)
4922 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4923 bp = bitpack_create (ob->main_stream);
4924 gcc_assert (info->analysis_done
4925 || ipa_get_param_count (info) == 0);
4926 gcc_assert (!info->node_enqueued);
4927 gcc_assert (!info->ipcp_orig_node);
4928 for (j = 0; j < ipa_get_param_count (info); j++)
4929 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4930 streamer_write_bitpack (&bp);
4931 for (j = 0; j < ipa_get_param_count (info); j++)
4932 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4933 for (e = node->callees; e; e = e->next_callee)
4934 {
4935 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4936
4937 streamer_write_uhwi (ob,
4938 ipa_get_cs_argument_count (args) * 2
4939 + (args->polymorphic_call_contexts != NULL));
4940 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4941 {
4942 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4943 if (args->polymorphic_call_contexts != NULL)
4944 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4945 }
4946 }
4947 for (e = node->indirect_calls; e; e = e->next_callee)
4948 {
4949 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4950
4951 streamer_write_uhwi (ob,
4952 ipa_get_cs_argument_count (args) * 2
4953 + (args->polymorphic_call_contexts != NULL));
4954 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4955 {
4956 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4957 if (args->polymorphic_call_contexts != NULL)
4958 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4959 }
4960 ipa_write_indirect_edge_info (ob, e);
4961 }
4962 }
4963
4964 /* Stream in NODE info from IB. */
4965
4966 static void
4967 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4968 struct data_in *data_in)
4969 {
4970 struct ipa_node_params *info = IPA_NODE_REF (node);
4971 int k;
4972 struct cgraph_edge *e;
4973 struct bitpack_d bp;
4974
4975 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4976
4977 for (k = 0; k < ipa_get_param_count (info); k++)
4978 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4979
4980 bp = streamer_read_bitpack (ib);
4981 if (ipa_get_param_count (info) != 0)
4982 info->analysis_done = true;
4983 info->node_enqueued = false;
4984 for (k = 0; k < ipa_get_param_count (info); k++)
4985 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4986 for (k = 0; k < ipa_get_param_count (info); k++)
4987 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4988 for (e = node->callees; e; e = e->next_callee)
4989 {
4990 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4991 int count = streamer_read_uhwi (ib);
4992 bool contexts_computed = count & 1;
4993 count /= 2;
4994
4995 if (!count)
4996 continue;
4997 vec_safe_grow_cleared (args->jump_functions, count);
4998 if (contexts_computed)
4999 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5000
5001 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5002 {
5003 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5004 data_in);
5005 if (contexts_computed)
5006 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5007 }
5008 }
5009 for (e = node->indirect_calls; e; e = e->next_callee)
5010 {
5011 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5012 int count = streamer_read_uhwi (ib);
5013 bool contexts_computed = count & 1;
5014 count /= 2;
5015
5016 if (count)
5017 {
5018 vec_safe_grow_cleared (args->jump_functions, count);
5019 if (contexts_computed)
5020 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5021 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5022 {
5023 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5024 data_in);
5025 if (contexts_computed)
5026 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5027 }
5028 }
5029 ipa_read_indirect_edge_info (ib, data_in, e);
5030 }
5031 }
5032
5033 /* Write jump functions for nodes in SET. */
5034
5035 void
5036 ipa_prop_write_jump_functions (void)
5037 {
5038 struct cgraph_node *node;
5039 struct output_block *ob;
5040 unsigned int count = 0;
5041 lto_symtab_encoder_iterator lsei;
5042 lto_symtab_encoder_t encoder;
5043
5044
5045 if (!ipa_node_params_vector.exists ())
5046 return;
5047
5048 ob = create_output_block (LTO_section_jump_functions);
5049 encoder = ob->decl_state->symtab_node_encoder;
5050 ob->symbol = NULL;
5051 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5052 lsei_next_function_in_partition (&lsei))
5053 {
5054 node = lsei_cgraph_node (lsei);
5055 if (node->has_gimple_body_p ()
5056 && IPA_NODE_REF (node) != NULL)
5057 count++;
5058 }
5059
5060 streamer_write_uhwi (ob, count);
5061
5062 /* Process all of the functions. */
5063 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5064 lsei_next_function_in_partition (&lsei))
5065 {
5066 node = lsei_cgraph_node (lsei);
5067 if (node->has_gimple_body_p ()
5068 && IPA_NODE_REF (node) != NULL)
5069 ipa_write_node_info (ob, node);
5070 }
5071 streamer_write_char_stream (ob->main_stream, 0);
5072 produce_asm (ob, NULL);
5073 destroy_output_block (ob);
5074 }
5075
5076 /* Read section in file FILE_DATA of length LEN with data DATA. */
5077
5078 static void
5079 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5080 size_t len)
5081 {
5082 const struct lto_function_header *header =
5083 (const struct lto_function_header *) data;
5084 const int cfg_offset = sizeof (struct lto_function_header);
5085 const int main_offset = cfg_offset + header->cfg_size;
5086 const int string_offset = main_offset + header->main_size;
5087 struct data_in *data_in;
5088 unsigned int i;
5089 unsigned int count;
5090
5091 lto_input_block ib_main ((const char *) data + main_offset,
5092 header->main_size);
5093
5094 data_in =
5095 lto_data_in_create (file_data, (const char *) data + string_offset,
5096 header->string_size, vNULL);
5097 count = streamer_read_uhwi (&ib_main);
5098
5099 for (i = 0; i < count; i++)
5100 {
5101 unsigned int index;
5102 struct cgraph_node *node;
5103 lto_symtab_encoder_t encoder;
5104
5105 index = streamer_read_uhwi (&ib_main);
5106 encoder = file_data->symtab_node_encoder;
5107 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5108 index));
5109 gcc_assert (node->definition);
5110 ipa_read_node_info (&ib_main, node, data_in);
5111 }
5112 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5113 len);
5114 lto_data_in_delete (data_in);
5115 }
5116
5117 /* Read ipcp jump functions. */
5118
5119 void
5120 ipa_prop_read_jump_functions (void)
5121 {
5122 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5123 struct lto_file_decl_data *file_data;
5124 unsigned int j = 0;
5125
5126 ipa_check_create_node_params ();
5127 ipa_check_create_edge_args ();
5128 ipa_register_cgraph_hooks ();
5129
5130 while ((file_data = file_data_vec[j++]))
5131 {
5132 size_t len;
5133 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5134
5135 if (data)
5136 ipa_prop_read_section (file_data, data, len);
5137 }
5138 }
5139
5140 /* After merging units, we can get mismatch in argument counts.
5141 Also decl merging might've rendered parameter lists obsolete.
5142 Also compute called_with_variable_arg info. */
5143
5144 void
5145 ipa_update_after_lto_read (void)
5146 {
5147 ipa_check_create_node_params ();
5148 ipa_check_create_edge_args ();
5149 }
5150
5151 void
5152 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5153 {
5154 int node_ref;
5155 unsigned int count = 0;
5156 lto_symtab_encoder_t encoder;
5157 struct ipa_agg_replacement_value *aggvals, *av;
5158
5159 aggvals = ipa_get_agg_replacements_for_node (node);
5160 encoder = ob->decl_state->symtab_node_encoder;
5161 node_ref = lto_symtab_encoder_encode (encoder, node);
5162 streamer_write_uhwi (ob, node_ref);
5163
5164 for (av = aggvals; av; av = av->next)
5165 count++;
5166 streamer_write_uhwi (ob, count);
5167
5168 for (av = aggvals; av; av = av->next)
5169 {
5170 struct bitpack_d bp;
5171
5172 streamer_write_uhwi (ob, av->offset);
5173 streamer_write_uhwi (ob, av->index);
5174 stream_write_tree (ob, av->value, true);
5175
5176 bp = bitpack_create (ob->main_stream);
5177 bp_pack_value (&bp, av->by_ref, 1);
5178 streamer_write_bitpack (&bp);
5179 }
5180 }
5181
5182 /* Stream in the aggregate value replacement chain for NODE from IB. */
5183
5184 static void
5185 read_agg_replacement_chain (struct lto_input_block *ib,
5186 struct cgraph_node *node,
5187 struct data_in *data_in)
5188 {
5189 struct ipa_agg_replacement_value *aggvals = NULL;
5190 unsigned int count, i;
5191
5192 count = streamer_read_uhwi (ib);
5193 for (i = 0; i <count; i++)
5194 {
5195 struct ipa_agg_replacement_value *av;
5196 struct bitpack_d bp;
5197
5198 av = ggc_alloc<ipa_agg_replacement_value> ();
5199 av->offset = streamer_read_uhwi (ib);
5200 av->index = streamer_read_uhwi (ib);
5201 av->value = stream_read_tree (ib, data_in);
5202 bp = streamer_read_bitpack (ib);
5203 av->by_ref = bp_unpack_value (&bp, 1);
5204 av->next = aggvals;
5205 aggvals = av;
5206 }
5207 ipa_set_node_agg_value_chain (node, aggvals);
5208 }
5209
5210 /* Write all aggregate replacement for nodes in set. */
5211
5212 void
5213 ipa_prop_write_all_agg_replacement (void)
5214 {
5215 struct cgraph_node *node;
5216 struct output_block *ob;
5217 unsigned int count = 0;
5218 lto_symtab_encoder_iterator lsei;
5219 lto_symtab_encoder_t encoder;
5220
5221 if (!ipa_node_agg_replacements)
5222 return;
5223
5224 ob = create_output_block (LTO_section_ipcp_transform);
5225 encoder = ob->decl_state->symtab_node_encoder;
5226 ob->symbol = NULL;
5227 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5228 lsei_next_function_in_partition (&lsei))
5229 {
5230 node = lsei_cgraph_node (lsei);
5231 if (node->has_gimple_body_p ()
5232 && ipa_get_agg_replacements_for_node (node) != NULL)
5233 count++;
5234 }
5235
5236 streamer_write_uhwi (ob, count);
5237
5238 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5239 lsei_next_function_in_partition (&lsei))
5240 {
5241 node = lsei_cgraph_node (lsei);
5242 if (node->has_gimple_body_p ()
5243 && ipa_get_agg_replacements_for_node (node) != NULL)
5244 write_agg_replacement_chain (ob, node);
5245 }
5246 streamer_write_char_stream (ob->main_stream, 0);
5247 produce_asm (ob, NULL);
5248 destroy_output_block (ob);
5249 }
5250
5251 /* Read replacements section in file FILE_DATA of length LEN with data
5252 DATA. */
5253
5254 static void
5255 read_replacements_section (struct lto_file_decl_data *file_data,
5256 const char *data,
5257 size_t len)
5258 {
5259 const struct lto_function_header *header =
5260 (const struct lto_function_header *) data;
5261 const int cfg_offset = sizeof (struct lto_function_header);
5262 const int main_offset = cfg_offset + header->cfg_size;
5263 const int string_offset = main_offset + header->main_size;
5264 struct data_in *data_in;
5265 unsigned int i;
5266 unsigned int count;
5267
5268 lto_input_block ib_main ((const char *) data + main_offset,
5269 header->main_size);
5270
5271 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5272 header->string_size, vNULL);
5273 count = streamer_read_uhwi (&ib_main);
5274
5275 for (i = 0; i < count; i++)
5276 {
5277 unsigned int index;
5278 struct cgraph_node *node;
5279 lto_symtab_encoder_t encoder;
5280
5281 index = streamer_read_uhwi (&ib_main);
5282 encoder = file_data->symtab_node_encoder;
5283 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5284 index));
5285 gcc_assert (node->definition);
5286 read_agg_replacement_chain (&ib_main, node, data_in);
5287 }
5288 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5289 len);
5290 lto_data_in_delete (data_in);
5291 }
5292
5293 /* Read IPA-CP aggregate replacements. */
5294
5295 void
5296 ipa_prop_read_all_agg_replacement (void)
5297 {
5298 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5299 struct lto_file_decl_data *file_data;
5300 unsigned int j = 0;
5301
5302 while ((file_data = file_data_vec[j++]))
5303 {
5304 size_t len;
5305 const char *data = lto_get_section_data (file_data,
5306 LTO_section_ipcp_transform,
5307 NULL, &len);
5308 if (data)
5309 read_replacements_section (file_data, data, len);
5310 }
5311 }
5312
5313 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5314 NODE. */
5315
5316 static void
5317 adjust_agg_replacement_values (struct cgraph_node *node,
5318 struct ipa_agg_replacement_value *aggval)
5319 {
5320 struct ipa_agg_replacement_value *v;
5321 int i, c = 0, d = 0, *adj;
5322
5323 if (!node->clone.combined_args_to_skip)
5324 return;
5325
5326 for (v = aggval; v; v = v->next)
5327 {
5328 gcc_assert (v->index >= 0);
5329 if (c < v->index)
5330 c = v->index;
5331 }
5332 c++;
5333
5334 adj = XALLOCAVEC (int, c);
5335 for (i = 0; i < c; i++)
5336 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5337 {
5338 adj[i] = -1;
5339 d++;
5340 }
5341 else
5342 adj[i] = i - d;
5343
5344 for (v = aggval; v; v = v->next)
5345 v->index = adj[v->index];
5346 }
5347
5348 /* Dominator walker driving the ipcp modification phase. */
5349
5350 class ipcp_modif_dom_walker : public dom_walker
5351 {
5352 public:
5353 ipcp_modif_dom_walker (struct func_body_info *fbi,
5354 vec<ipa_param_descriptor> descs,
5355 struct ipa_agg_replacement_value *av,
5356 bool *sc, bool *cc)
5357 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5358 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5359
5360 virtual void before_dom_children (basic_block);
5361
5362 private:
5363 struct func_body_info *m_fbi;
5364 vec<ipa_param_descriptor> m_descriptors;
5365 struct ipa_agg_replacement_value *m_aggval;
5366 bool *m_something_changed, *m_cfg_changed;
5367 };
5368
5369 void
5370 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5371 {
5372 gimple_stmt_iterator gsi;
5373 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5374 {
5375 struct ipa_agg_replacement_value *v;
5376 gimple stmt = gsi_stmt (gsi);
5377 tree rhs, val, t;
5378 HOST_WIDE_INT offset, size;
5379 int index;
5380 bool by_ref, vce;
5381
5382 if (!gimple_assign_load_p (stmt))
5383 continue;
5384 rhs = gimple_assign_rhs1 (stmt);
5385 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5386 continue;
5387
5388 vce = false;
5389 t = rhs;
5390 while (handled_component_p (t))
5391 {
5392 /* V_C_E can do things like convert an array of integers to one
5393 bigger integer and similar things we do not handle below. */
5394 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5395 {
5396 vce = true;
5397 break;
5398 }
5399 t = TREE_OPERAND (t, 0);
5400 }
5401 if (vce)
5402 continue;
5403
5404 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5405 &offset, &size, &by_ref))
5406 continue;
5407 for (v = m_aggval; v; v = v->next)
5408 if (v->index == index
5409 && v->offset == offset)
5410 break;
5411 if (!v
5412 || v->by_ref != by_ref
5413 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5414 continue;
5415
5416 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5417 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5418 {
5419 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5420 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5421 else if (TYPE_SIZE (TREE_TYPE (rhs))
5422 == TYPE_SIZE (TREE_TYPE (v->value)))
5423 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5424 else
5425 {
5426 if (dump_file)
5427 {
5428 fprintf (dump_file, " const ");
5429 print_generic_expr (dump_file, v->value, 0);
5430 fprintf (dump_file, " can't be converted to type of ");
5431 print_generic_expr (dump_file, rhs, 0);
5432 fprintf (dump_file, "\n");
5433 }
5434 continue;
5435 }
5436 }
5437 else
5438 val = v->value;
5439
5440 if (dump_file && (dump_flags & TDF_DETAILS))
5441 {
5442 fprintf (dump_file, "Modifying stmt:\n ");
5443 print_gimple_stmt (dump_file, stmt, 0, 0);
5444 }
5445 gimple_assign_set_rhs_from_tree (&gsi, val);
5446 update_stmt (stmt);
5447
5448 if (dump_file && (dump_flags & TDF_DETAILS))
5449 {
5450 fprintf (dump_file, "into:\n ");
5451 print_gimple_stmt (dump_file, stmt, 0, 0);
5452 fprintf (dump_file, "\n");
5453 }
5454
5455 *m_something_changed = true;
5456 if (maybe_clean_eh_stmt (stmt)
5457 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5458 *m_cfg_changed = true;
5459 }
5460
5461 }
5462
5463 /* IPCP transformation phase doing propagation of aggregate values. */
5464
5465 unsigned int
5466 ipcp_transform_function (struct cgraph_node *node)
5467 {
5468 vec<ipa_param_descriptor> descriptors = vNULL;
5469 struct func_body_info fbi;
5470 struct ipa_agg_replacement_value *aggval;
5471 int param_count;
5472 bool cfg_changed = false, something_changed = false;
5473
5474 gcc_checking_assert (cfun);
5475 gcc_checking_assert (current_function_decl);
5476
5477 if (dump_file)
5478 fprintf (dump_file, "Modification phase of node %s/%i\n",
5479 node->name (), node->order);
5480
5481 aggval = ipa_get_agg_replacements_for_node (node);
5482 if (!aggval)
5483 return 0;
5484 param_count = count_formal_params (node->decl);
5485 if (param_count == 0)
5486 return 0;
5487 adjust_agg_replacement_values (node, aggval);
5488 if (dump_file)
5489 ipa_dump_agg_replacement_values (dump_file, aggval);
5490
5491 fbi.node = node;
5492 fbi.info = NULL;
5493 fbi.bb_infos = vNULL;
5494 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5495 fbi.param_count = param_count;
5496 fbi.aa_walked = 0;
5497
5498 descriptors.safe_grow_cleared (param_count);
5499 ipa_populate_param_decls (node, descriptors);
5500 calculate_dominance_info (CDI_DOMINATORS);
5501 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5502 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5503
5504 int i;
5505 struct ipa_bb_info *bi;
5506 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5507 free_ipa_bb_info (bi);
5508 fbi.bb_infos.release ();
5509 free_dominance_info (CDI_DOMINATORS);
5510 (*ipa_node_agg_replacements)[node->uid] = NULL;
5511 descriptors.release ();
5512
5513 if (!something_changed)
5514 return 0;
5515 else if (cfg_changed)
5516 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5517 else
5518 return TODO_update_ssa_only_virtuals;
5519 }