ipa-prop.c (detect_type_change): Shor circuit testing of type changes on THIS pointer.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64 struct param_analysis_info
65 {
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
68 };
69
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
83
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
86 {
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94 };
95
96 /* Allocation pool for reference descriptions. */
97
98 static alloc_pool ipa_refdesc_pool;
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
120 {
121 int i, count;
122
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
127
128 return -1;
129 }
130
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134 int
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136 {
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138 }
139
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
142
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
146 {
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
151
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
157 {
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
172
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
175
176 return count;
177 }
178
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185 {
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192 }
193
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199 {
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204 }
205
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
209
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
212 {
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
215 if (!info->descriptors.exists ())
216 {
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
219 }
220 }
221
222 /* Print the jump functions associated with call graph edge CS to file F. */
223
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226 {
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
249 }
250 else if (type == IPA_JF_CONST)
251 {
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
294 }
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC,
360 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
361 ii->offset);
362
363 if (cs->call_stmt)
364 {
365 fprintf (f, ", for stmt ");
366 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
367 }
368 else
369 fprintf (f, "\n");
370 ipa_print_node_jump_functions_for_edge (f, cs);
371 }
372 }
373
374 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
375
376 void
377 ipa_print_all_jump_functions (FILE *f)
378 {
379 struct cgraph_node *node;
380
381 fprintf (f, "\nJump functions:\n");
382 FOR_EACH_FUNCTION (node)
383 {
384 ipa_print_node_jump_functions (f, node);
385 }
386 }
387
388 /* Set JFUNC to be a known type jump function. */
389
390 static void
391 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
392 tree base_type, tree component_type)
393 {
394 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
395 && TYPE_BINFO (component_type));
396 jfunc->type = IPA_JF_KNOWN_TYPE;
397 jfunc->value.known_type.offset = offset,
398 jfunc->value.known_type.base_type = base_type;
399 jfunc->value.known_type.component_type = component_type;
400 gcc_assert (component_type);
401 }
402
403 /* Set JFUNC to be a copy of another jmp (to be used by jump function
404 combination code). The two functions will share their rdesc. */
405
406 static void
407 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
408 struct ipa_jump_func *src)
409
410 {
411 gcc_checking_assert (src->type == IPA_JF_CONST);
412 dst->type = IPA_JF_CONST;
413 dst->value.constant = src->value.constant;
414 }
415
416 /* Set JFUNC to be a constant jmp function. */
417
418 static void
419 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
420 struct cgraph_edge *cs)
421 {
422 constant = unshare_expr (constant);
423 if (constant && EXPR_P (constant))
424 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
425 jfunc->type = IPA_JF_CONST;
426 jfunc->value.constant.value = unshare_expr_without_location (constant);
427
428 if (TREE_CODE (constant) == ADDR_EXPR
429 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
430 {
431 struct ipa_cst_ref_desc *rdesc;
432 if (!ipa_refdesc_pool)
433 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
434 sizeof (struct ipa_cst_ref_desc), 32);
435
436 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
437 rdesc->cs = cs;
438 rdesc->next_duplicate = NULL;
439 rdesc->refcount = 1;
440 jfunc->value.constant.rdesc = rdesc;
441 }
442 else
443 jfunc->value.constant.rdesc = NULL;
444 }
445
446 /* Set JFUNC to be a simple pass-through jump function. */
447 static void
448 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
449 bool agg_preserved, bool type_preserved)
450 {
451 jfunc->type = IPA_JF_PASS_THROUGH;
452 jfunc->value.pass_through.operand = NULL_TREE;
453 jfunc->value.pass_through.formal_id = formal_id;
454 jfunc->value.pass_through.operation = NOP_EXPR;
455 jfunc->value.pass_through.agg_preserved = agg_preserved;
456 jfunc->value.pass_through.type_preserved = type_preserved;
457 }
458
459 /* Set JFUNC to be an arithmetic pass through jump function. */
460
461 static void
462 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
463 tree operand, enum tree_code operation)
464 {
465 jfunc->type = IPA_JF_PASS_THROUGH;
466 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
467 jfunc->value.pass_through.formal_id = formal_id;
468 jfunc->value.pass_through.operation = operation;
469 jfunc->value.pass_through.agg_preserved = false;
470 jfunc->value.pass_through.type_preserved = false;
471 }
472
473 /* Set JFUNC to be an ancestor jump function. */
474
475 static void
476 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
477 tree type, int formal_id, bool agg_preserved,
478 bool type_preserved)
479 {
480 jfunc->type = IPA_JF_ANCESTOR;
481 jfunc->value.ancestor.formal_id = formal_id;
482 jfunc->value.ancestor.offset = offset;
483 jfunc->value.ancestor.type = type;
484 jfunc->value.ancestor.agg_preserved = agg_preserved;
485 jfunc->value.ancestor.type_preserved = type_preserved;
486 }
487
488 /* Extract the acual BINFO being described by JFUNC which must be a known type
489 jump function. */
490
491 tree
492 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
493 {
494 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
495 if (!base_binfo)
496 return NULL_TREE;
497 return get_binfo_at_offset (base_binfo,
498 jfunc->value.known_type.offset,
499 jfunc->value.known_type.component_type);
500 }
501
502 /* Structure to be passed in between detect_type_change and
503 check_stmt_for_type_change. */
504
505 struct type_change_info
506 {
507 /* Offset into the object where there is the virtual method pointer we are
508 looking for. */
509 HOST_WIDE_INT offset;
510 /* The declaration or SSA_NAME pointer of the base that we are checking for
511 type change. */
512 tree object;
513 /* If we actually can tell the type that the object has changed to, it is
514 stored in this field. Otherwise it remains NULL_TREE. */
515 tree known_current_type;
516 /* Set to true if dynamic type change has been detected. */
517 bool type_maybe_changed;
518 /* Set to true if multiple types have been encountered. known_current_type
519 must be disregarded in that case. */
520 bool multiple_types_encountered;
521 };
522
523 /* Return true if STMT can modify a virtual method table pointer.
524
525 This function makes special assumptions about both constructors and
526 destructors which are all the functions that are allowed to alter the VMT
527 pointers. It assumes that destructors begin with assignment into all VMT
528 pointers and that constructors essentially look in the following way:
529
530 1) The very first thing they do is that they call constructors of ancestor
531 sub-objects that have them.
532
533 2) Then VMT pointers of this and all its ancestors is set to new values
534 corresponding to the type corresponding to the constructor.
535
536 3) Only afterwards, other stuff such as constructor of member sub-objects
537 and the code written by the user is run. Only this may include calling
538 virtual functions, directly or indirectly.
539
540 There is no way to call a constructor of an ancestor sub-object in any
541 other way.
542
543 This means that we do not have to care whether constructors get the correct
544 type information because they will always change it (in fact, if we define
545 the type to be given by the VMT pointer, it is undefined).
546
547 The most important fact to derive from the above is that if, for some
548 statement in the section 3, we try to detect whether the dynamic type has
549 changed, we can safely ignore all calls as we examine the function body
550 backwards until we reach statements in section 2 because these calls cannot
551 be ancestor constructors or destructors (if the input is not bogus) and so
552 do not change the dynamic type (this holds true only for automatically
553 allocated objects but at the moment we devirtualize only these). We then
554 must detect that statements in section 2 change the dynamic type and can try
555 to derive the new type. That is enough and we can stop, we will never see
556 the calls into constructors of sub-objects in this code. Therefore we can
557 safely ignore all call statements that we traverse.
558 */
559
560 static bool
561 stmt_may_be_vtbl_ptr_store (gimple stmt)
562 {
563 if (is_gimple_call (stmt))
564 return false;
565 else if (gimple_clobber_p (stmt))
566 return false;
567 else if (is_gimple_assign (stmt))
568 {
569 tree lhs = gimple_assign_lhs (stmt);
570
571 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
572 {
573 if (flag_strict_aliasing
574 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
575 return false;
576
577 if (TREE_CODE (lhs) == COMPONENT_REF
578 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
579 return false;
580 /* In the future we might want to use get_base_ref_and_offset to find
581 if there is a field corresponding to the offset and if so, proceed
582 almost like if it was a component ref. */
583 }
584 }
585 return true;
586 }
587
588 /* If STMT can be proved to be an assignment to the virtual method table
589 pointer of ANALYZED_OBJ and the type associated with the new table
590 identified, return the type. Otherwise return NULL_TREE. */
591
592 static tree
593 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
594 {
595 HOST_WIDE_INT offset, size, max_size;
596 tree lhs, rhs, base, binfo;
597
598 if (!gimple_assign_single_p (stmt))
599 return NULL_TREE;
600
601 lhs = gimple_assign_lhs (stmt);
602 rhs = gimple_assign_rhs1 (stmt);
603 if (TREE_CODE (lhs) != COMPONENT_REF
604 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
605 return NULL_TREE;
606
607 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
608 if (offset != tci->offset
609 || size != POINTER_SIZE
610 || max_size != POINTER_SIZE)
611 return NULL_TREE;
612 if (TREE_CODE (base) == MEM_REF)
613 {
614 if (TREE_CODE (tci->object) != MEM_REF
615 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
616 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
617 TREE_OPERAND (base, 1)))
618 return NULL_TREE;
619 }
620 else if (tci->object != base)
621 return NULL_TREE;
622
623 binfo = vtable_pointer_value_to_binfo (rhs);
624
625 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
626 base of outer type. In this case we would need to either
627 work on binfos or translate it back to outer type and offset.
628 KNOWN_TYPE jump functions are not ready for that, yet. */
629 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
630 return NULL;
631
632 return BINFO_TYPE (binfo);
633 }
634
635 /* Callback of walk_aliased_vdefs and a helper function for
636 detect_type_change to check whether a particular statement may modify
637 the virtual table pointer, and if possible also determine the new type of
638 the (sub-)object. It stores its result into DATA, which points to a
639 type_change_info structure. */
640
641 static bool
642 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
643 {
644 gimple stmt = SSA_NAME_DEF_STMT (vdef);
645 struct type_change_info *tci = (struct type_change_info *) data;
646
647 if (stmt_may_be_vtbl_ptr_store (stmt))
648 {
649 tree type;
650 type = extr_type_from_vtbl_ptr_store (stmt, tci);
651 if (tci->type_maybe_changed
652 && type != tci->known_current_type)
653 tci->multiple_types_encountered = true;
654 tci->known_current_type = type;
655 tci->type_maybe_changed = true;
656 return true;
657 }
658 else
659 return false;
660 }
661
662
663
664 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
665 callsite CALL) by looking for assignments to its virtual table pointer. If
666 it is, return true and fill in the jump function JFUNC with relevant type
667 information or set it to unknown. ARG is the object itself (not a pointer
668 to it, unless dereferenced). BASE is the base of the memory access as
669 returned by get_ref_base_and_extent, as is the offset. */
670
671 static bool
672 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
673 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
674 {
675 struct type_change_info tci;
676 ao_ref ao;
677
678 gcc_checking_assert (DECL_P (arg)
679 || TREE_CODE (arg) == MEM_REF
680 || handled_component_p (arg));
681 /* Const calls cannot call virtual methods through VMT and so type changes do
682 not matter. */
683 if (!flag_devirtualize || !gimple_vuse (call)
684 /* Be sure expected_type is polymorphic. */
685 || !comp_type
686 || TREE_CODE (comp_type) != RECORD_TYPE
687 || !TYPE_BINFO (comp_type)
688 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
689 return false;
690
691 /* C++ methods are not allowed to change THIS pointer unless they
692 are constructors or destructors. */
693 if (TREE_CODE (base) == MEM_REF
694 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
695 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
696 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
697 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
698 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
699 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
700 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
701 == DECL_ARGUMENTS (current_function_decl)))
702 return false;
703
704 ao_ref_init (&ao, arg);
705 ao.base = base;
706 ao.offset = offset;
707 ao.size = POINTER_SIZE;
708 ao.max_size = ao.size;
709
710 tci.offset = offset;
711 tci.object = get_base_address (arg);
712 tci.known_current_type = NULL_TREE;
713 tci.type_maybe_changed = false;
714 tci.multiple_types_encountered = false;
715
716 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
717 &tci, NULL);
718 if (!tci.type_maybe_changed)
719 return false;
720
721 if (!tci.known_current_type
722 || tci.multiple_types_encountered
723 || offset != 0)
724 jfunc->type = IPA_JF_UNKNOWN;
725 else
726 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
727
728 return true;
729 }
730
731 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
732 SSA name (its dereference will become the base and the offset is assumed to
733 be zero). */
734
735 static bool
736 detect_type_change_ssa (tree arg, tree comp_type,
737 gimple call, struct ipa_jump_func *jfunc)
738 {
739 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
740 if (!flag_devirtualize
741 || !POINTER_TYPE_P (TREE_TYPE (arg)))
742 return false;
743
744 arg = build2 (MEM_REF, ptr_type_node, arg,
745 build_int_cst (ptr_type_node, 0));
746
747 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
748 }
749
750 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
751 boolean variable pointed to by DATA. */
752
753 static bool
754 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
755 void *data)
756 {
757 bool *b = (bool *) data;
758 *b = true;
759 return true;
760 }
761
762 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
763 a value known not to be modified in this function before reaching the
764 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
765 information about the parameter. */
766
767 static bool
768 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
769 gimple stmt, tree parm_load)
770 {
771 bool modified = false;
772 bitmap *visited_stmts;
773 ao_ref refd;
774
775 if (parm_ainfo && parm_ainfo->parm_modified)
776 return false;
777
778 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
779 ao_ref_init (&refd, parm_load);
780 /* We can cache visited statements only when parm_ainfo is available and when
781 we are looking at a naked load of the whole parameter. */
782 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
783 visited_stmts = NULL;
784 else
785 visited_stmts = &parm_ainfo->parm_visited_statements;
786 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
787 visited_stmts);
788 if (parm_ainfo && modified)
789 parm_ainfo->parm_modified = true;
790 return !modified;
791 }
792
793 /* If STMT is an assignment that loads a value from an parameter declaration,
794 return the index of the parameter in ipa_node_params which has not been
795 modified. Otherwise return -1. */
796
797 static int
798 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
799 struct param_analysis_info *parms_ainfo,
800 gimple stmt)
801 {
802 int index;
803 tree op1;
804
805 if (!gimple_assign_single_p (stmt))
806 return -1;
807
808 op1 = gimple_assign_rhs1 (stmt);
809 if (TREE_CODE (op1) != PARM_DECL)
810 return -1;
811
812 index = ipa_get_param_decl_index_1 (descriptors, op1);
813 if (index < 0
814 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
815 : NULL, stmt, op1))
816 return -1;
817
818 return index;
819 }
820
821 /* Return true if memory reference REF loads data that are known to be
822 unmodified in this function before reaching statement STMT. PARM_AINFO, if
823 non-NULL, is a pointer to a structure containing temporary information about
824 PARM. */
825
826 static bool
827 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
828 gimple stmt, tree ref)
829 {
830 bool modified = false;
831 ao_ref refd;
832
833 gcc_checking_assert (gimple_vuse (stmt));
834 if (parm_ainfo && parm_ainfo->ref_modified)
835 return false;
836
837 ao_ref_init (&refd, ref);
838 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
839 NULL);
840 if (parm_ainfo && modified)
841 parm_ainfo->ref_modified = true;
842 return !modified;
843 }
844
845 /* Return true if the data pointed to by PARM is known to be unmodified in this
846 function before reaching call statement CALL into which it is passed.
847 PARM_AINFO is a pointer to a structure containing temporary information
848 about PARM. */
849
850 static bool
851 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
852 gimple call, tree parm)
853 {
854 bool modified = false;
855 ao_ref refd;
856
857 /* It's unnecessary to calculate anything about memory contnets for a const
858 function because it is not goin to use it. But do not cache the result
859 either. Also, no such calculations for non-pointers. */
860 if (!gimple_vuse (call)
861 || !POINTER_TYPE_P (TREE_TYPE (parm)))
862 return false;
863
864 if (parm_ainfo->pt_modified)
865 return false;
866
867 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
868 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
869 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
870 if (modified)
871 parm_ainfo->pt_modified = true;
872 return !modified;
873 }
874
875 /* Return true if we can prove that OP is a memory reference loading unmodified
876 data from an aggregate passed as a parameter and if the aggregate is passed
877 by reference, that the alias type of the load corresponds to the type of the
878 formal parameter (so that we can rely on this type for TBAA in callers).
879 INFO and PARMS_AINFO describe parameters of the current function (but the
880 latter can be NULL), STMT is the load statement. If function returns true,
881 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
882 within the aggregate and whether it is a load from a value passed by
883 reference respectively. */
884
885 static bool
886 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
887 struct param_analysis_info *parms_ainfo, gimple stmt,
888 tree op, int *index_p, HOST_WIDE_INT *offset_p,
889 HOST_WIDE_INT *size_p, bool *by_ref_p)
890 {
891 int index;
892 HOST_WIDE_INT size, max_size;
893 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
894
895 if (max_size == -1 || max_size != size || *offset_p < 0)
896 return false;
897
898 if (DECL_P (base))
899 {
900 int index = ipa_get_param_decl_index_1 (descriptors, base);
901 if (index >= 0
902 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
903 : NULL, stmt, op))
904 {
905 *index_p = index;
906 *by_ref_p = false;
907 if (size_p)
908 *size_p = size;
909 return true;
910 }
911 return false;
912 }
913
914 if (TREE_CODE (base) != MEM_REF
915 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
916 || !integer_zerop (TREE_OPERAND (base, 1)))
917 return false;
918
919 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
920 {
921 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
922 index = ipa_get_param_decl_index_1 (descriptors, parm);
923 }
924 else
925 {
926 /* This branch catches situations where a pointer parameter is not a
927 gimple register, for example:
928
929 void hip7(S*) (struct S * p)
930 {
931 void (*<T2e4>) (struct S *) D.1867;
932 struct S * p.1;
933
934 <bb 2>:
935 p.1_1 = p;
936 D.1867_2 = p.1_1->f;
937 D.1867_2 ();
938 gdp = &p;
939 */
940
941 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
942 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
943 }
944
945 if (index >= 0
946 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
947 stmt, op))
948 {
949 *index_p = index;
950 *by_ref_p = true;
951 if (size_p)
952 *size_p = size;
953 return true;
954 }
955 return false;
956 }
957
958 /* Just like the previous function, just without the param_analysis_info
959 pointer, for users outside of this file. */
960
961 bool
962 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
963 tree op, int *index_p, HOST_WIDE_INT *offset_p,
964 bool *by_ref_p)
965 {
966 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
967 offset_p, NULL, by_ref_p);
968 }
969
970 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
971 of an assignment statement STMT, try to determine whether we are actually
972 handling any of the following cases and construct an appropriate jump
973 function into JFUNC if so:
974
975 1) The passed value is loaded from a formal parameter which is not a gimple
976 register (most probably because it is addressable, the value has to be
977 scalar) and we can guarantee the value has not changed. This case can
978 therefore be described by a simple pass-through jump function. For example:
979
980 foo (int a)
981 {
982 int a.0;
983
984 a.0_2 = a;
985 bar (a.0_2);
986
987 2) The passed value can be described by a simple arithmetic pass-through
988 jump function. E.g.
989
990 foo (int a)
991 {
992 int D.2064;
993
994 D.2064_4 = a.1(D) + 4;
995 bar (D.2064_4);
996
997 This case can also occur in combination of the previous one, e.g.:
998
999 foo (int a, int z)
1000 {
1001 int a.0;
1002 int D.2064;
1003
1004 a.0_3 = a;
1005 D.2064_4 = a.0_3 + 4;
1006 foo (D.2064_4);
1007
1008 3) The passed value is an address of an object within another one (which
1009 also passed by reference). Such situations are described by an ancestor
1010 jump function and describe situations such as:
1011
1012 B::foo() (struct B * const this)
1013 {
1014 struct A * D.1845;
1015
1016 D.1845_2 = &this_1(D)->D.1748;
1017 A::bar (D.1845_2);
1018
1019 INFO is the structure describing individual parameters access different
1020 stages of IPA optimizations. PARMS_AINFO contains the information that is
1021 only needed for intraprocedural analysis. */
1022
1023 static void
1024 compute_complex_assign_jump_func (struct ipa_node_params *info,
1025 struct param_analysis_info *parms_ainfo,
1026 struct ipa_jump_func *jfunc,
1027 gimple call, gimple stmt, tree name,
1028 tree param_type)
1029 {
1030 HOST_WIDE_INT offset, size, max_size;
1031 tree op1, tc_ssa, base, ssa;
1032 int index;
1033
1034 op1 = gimple_assign_rhs1 (stmt);
1035
1036 if (TREE_CODE (op1) == SSA_NAME)
1037 {
1038 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1039 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1040 else
1041 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1042 SSA_NAME_DEF_STMT (op1));
1043 tc_ssa = op1;
1044 }
1045 else
1046 {
1047 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1048 tc_ssa = gimple_assign_lhs (stmt);
1049 }
1050
1051 if (index >= 0)
1052 {
1053 tree op2 = gimple_assign_rhs2 (stmt);
1054
1055 if (op2)
1056 {
1057 if (!is_gimple_ip_invariant (op2)
1058 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1059 && !useless_type_conversion_p (TREE_TYPE (name),
1060 TREE_TYPE (op1))))
1061 return;
1062
1063 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1064 gimple_assign_rhs_code (stmt));
1065 }
1066 else if (gimple_assign_single_p (stmt))
1067 {
1068 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1069 call, tc_ssa);
1070 bool type_p = false;
1071
1072 if (param_type && POINTER_TYPE_P (param_type))
1073 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1074 call, jfunc);
1075 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1076 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1077 }
1078 return;
1079 }
1080
1081 if (TREE_CODE (op1) != ADDR_EXPR)
1082 return;
1083 op1 = TREE_OPERAND (op1, 0);
1084 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1085 return;
1086 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1087 if (TREE_CODE (base) != MEM_REF
1088 /* If this is a varying address, punt. */
1089 || max_size == -1
1090 || max_size != size)
1091 return;
1092 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1093 ssa = TREE_OPERAND (base, 0);
1094 if (TREE_CODE (ssa) != SSA_NAME
1095 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1096 || offset < 0)
1097 return;
1098
1099 /* Dynamic types are changed in constructors and destructors. */
1100 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1101 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1102 {
1103 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1104 call, jfunc, offset);
1105 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1106 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1107 parm_ref_data_pass_through_p (&parms_ainfo[index],
1108 call, ssa), type_p);
1109 }
1110 }
1111
1112 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1113 it looks like:
1114
1115 iftmp.1_3 = &obj_2(D)->D.1762;
1116
1117 The base of the MEM_REF must be a default definition SSA NAME of a
1118 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1119 whole MEM_REF expression is returned and the offset calculated from any
1120 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1121 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1122
1123 static tree
1124 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1125 {
1126 HOST_WIDE_INT size, max_size;
1127 tree expr, parm, obj;
1128
1129 if (!gimple_assign_single_p (assign))
1130 return NULL_TREE;
1131 expr = gimple_assign_rhs1 (assign);
1132
1133 if (TREE_CODE (expr) != ADDR_EXPR)
1134 return NULL_TREE;
1135 expr = TREE_OPERAND (expr, 0);
1136 obj = expr;
1137 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1138
1139 if (TREE_CODE (expr) != MEM_REF
1140 /* If this is a varying address, punt. */
1141 || max_size == -1
1142 || max_size != size
1143 || *offset < 0)
1144 return NULL_TREE;
1145 parm = TREE_OPERAND (expr, 0);
1146 if (TREE_CODE (parm) != SSA_NAME
1147 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1148 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1149 return NULL_TREE;
1150
1151 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1152 *obj_p = obj;
1153 return expr;
1154 }
1155
1156
1157 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1158 statement PHI, try to find out whether NAME is in fact a
1159 multiple-inheritance typecast from a descendant into an ancestor of a formal
1160 parameter and thus can be described by an ancestor jump function and if so,
1161 write the appropriate function into JFUNC.
1162
1163 Essentially we want to match the following pattern:
1164
1165 if (obj_2(D) != 0B)
1166 goto <bb 3>;
1167 else
1168 goto <bb 4>;
1169
1170 <bb 3>:
1171 iftmp.1_3 = &obj_2(D)->D.1762;
1172
1173 <bb 4>:
1174 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1175 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1176 return D.1879_6; */
1177
1178 static void
1179 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1180 struct param_analysis_info *parms_ainfo,
1181 struct ipa_jump_func *jfunc,
1182 gimple call, gimple phi, tree param_type)
1183 {
1184 HOST_WIDE_INT offset;
1185 gimple assign, cond;
1186 basic_block phi_bb, assign_bb, cond_bb;
1187 tree tmp, parm, expr, obj;
1188 int index, i;
1189
1190 if (gimple_phi_num_args (phi) != 2)
1191 return;
1192
1193 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1194 tmp = PHI_ARG_DEF (phi, 0);
1195 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1196 tmp = PHI_ARG_DEF (phi, 1);
1197 else
1198 return;
1199 if (TREE_CODE (tmp) != SSA_NAME
1200 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1201 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1202 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1203 return;
1204
1205 assign = SSA_NAME_DEF_STMT (tmp);
1206 assign_bb = gimple_bb (assign);
1207 if (!single_pred_p (assign_bb))
1208 return;
1209 expr = get_ancestor_addr_info (assign, &obj, &offset);
1210 if (!expr)
1211 return;
1212 parm = TREE_OPERAND (expr, 0);
1213 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1214 gcc_assert (index >= 0);
1215
1216 cond_bb = single_pred (assign_bb);
1217 cond = last_stmt (cond_bb);
1218 if (!cond
1219 || gimple_code (cond) != GIMPLE_COND
1220 || gimple_cond_code (cond) != NE_EXPR
1221 || gimple_cond_lhs (cond) != parm
1222 || !integer_zerop (gimple_cond_rhs (cond)))
1223 return;
1224
1225 phi_bb = gimple_bb (phi);
1226 for (i = 0; i < 2; i++)
1227 {
1228 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1229 if (pred != assign_bb && pred != cond_bb)
1230 return;
1231 }
1232
1233 bool type_p = false;
1234 if (param_type && POINTER_TYPE_P (param_type))
1235 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1236 call, jfunc, offset);
1237 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1238 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1239 parm_ref_data_pass_through_p (&parms_ainfo[index],
1240 call, parm), type_p);
1241 }
1242
1243 /* Given OP which is passed as an actual argument to a called function,
1244 determine if it is possible to construct a KNOWN_TYPE jump function for it
1245 and if so, create one and store it to JFUNC.
1246 EXPECTED_TYPE represents a type the argument should be in */
1247
1248 static void
1249 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1250 gimple call, tree expected_type)
1251 {
1252 HOST_WIDE_INT offset, size, max_size;
1253 tree base;
1254
1255 if (!flag_devirtualize
1256 || TREE_CODE (op) != ADDR_EXPR
1257 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1258 /* Be sure expected_type is polymorphic. */
1259 || !expected_type
1260 || TREE_CODE (expected_type) != RECORD_TYPE
1261 || !TYPE_BINFO (expected_type)
1262 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1263 return;
1264
1265 op = TREE_OPERAND (op, 0);
1266 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1267 if (!DECL_P (base)
1268 || max_size == -1
1269 || max_size != size
1270 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1271 || is_global_var (base))
1272 return;
1273
1274 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1275 return;
1276
1277 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1278 expected_type);
1279 }
1280
1281 /* Inspect the given TYPE and return true iff it has the same structure (the
1282 same number of fields of the same types) as a C++ member pointer. If
1283 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1284 corresponding fields there. */
1285
1286 static bool
1287 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1288 {
1289 tree fld;
1290
1291 if (TREE_CODE (type) != RECORD_TYPE)
1292 return false;
1293
1294 fld = TYPE_FIELDS (type);
1295 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1296 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1297 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1298 return false;
1299
1300 if (method_ptr)
1301 *method_ptr = fld;
1302
1303 fld = DECL_CHAIN (fld);
1304 if (!fld || INTEGRAL_TYPE_P (fld)
1305 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1306 return false;
1307 if (delta)
1308 *delta = fld;
1309
1310 if (DECL_CHAIN (fld))
1311 return false;
1312
1313 return true;
1314 }
1315
1316 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1317 return the rhs of its defining statement. Otherwise return RHS as it
1318 is. */
1319
1320 static inline tree
1321 get_ssa_def_if_simple_copy (tree rhs)
1322 {
1323 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1324 {
1325 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1326
1327 if (gimple_assign_single_p (def_stmt))
1328 rhs = gimple_assign_rhs1 (def_stmt);
1329 else
1330 break;
1331 }
1332 return rhs;
1333 }
1334
1335 /* Simple linked list, describing known contents of an aggregate beforere
1336 call. */
1337
1338 struct ipa_known_agg_contents_list
1339 {
1340 /* Offset and size of the described part of the aggregate. */
1341 HOST_WIDE_INT offset, size;
1342 /* Known constant value or NULL if the contents is known to be unknown. */
1343 tree constant;
1344 /* Pointer to the next structure in the list. */
1345 struct ipa_known_agg_contents_list *next;
1346 };
1347
1348 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1349 in ARG is filled in with constant values. ARG can either be an aggregate
1350 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1351 JFUNC is the jump function into which the constants are subsequently stored. */
1352
1353 static void
1354 determine_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1355 struct ipa_jump_func *jfunc)
1356 {
1357 struct ipa_known_agg_contents_list *list = NULL;
1358 int item_count = 0, const_count = 0;
1359 HOST_WIDE_INT arg_offset, arg_size;
1360 gimple_stmt_iterator gsi;
1361 tree arg_base;
1362 bool check_ref, by_ref;
1363 ao_ref r;
1364
1365 /* The function operates in three stages. First, we prepare check_ref, r,
1366 arg_base and arg_offset based on what is actually passed as an actual
1367 argument. */
1368
1369 if (POINTER_TYPE_P (arg_type))
1370 {
1371 by_ref = true;
1372 if (TREE_CODE (arg) == SSA_NAME)
1373 {
1374 tree type_size;
1375 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1376 return;
1377 check_ref = true;
1378 arg_base = arg;
1379 arg_offset = 0;
1380 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1381 arg_size = tree_to_uhwi (type_size);
1382 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1383 }
1384 else if (TREE_CODE (arg) == ADDR_EXPR)
1385 {
1386 HOST_WIDE_INT arg_max_size;
1387
1388 arg = TREE_OPERAND (arg, 0);
1389 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1390 &arg_max_size);
1391 if (arg_max_size == -1
1392 || arg_max_size != arg_size
1393 || arg_offset < 0)
1394 return;
1395 if (DECL_P (arg_base))
1396 {
1397 tree size;
1398 check_ref = false;
1399 size = build_int_cst (integer_type_node, arg_size);
1400 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1401 }
1402 else
1403 return;
1404 }
1405 else
1406 return;
1407 }
1408 else
1409 {
1410 HOST_WIDE_INT arg_max_size;
1411
1412 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1413
1414 by_ref = false;
1415 check_ref = false;
1416 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1417 &arg_max_size);
1418 if (arg_max_size == -1
1419 || arg_max_size != arg_size
1420 || arg_offset < 0)
1421 return;
1422
1423 ao_ref_init (&r, arg);
1424 }
1425
1426 /* Second stage walks back the BB, looks at individual statements and as long
1427 as it is confident of how the statements affect contents of the
1428 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1429 describing it. */
1430 gsi = gsi_for_stmt (call);
1431 gsi_prev (&gsi);
1432 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1433 {
1434 struct ipa_known_agg_contents_list *n, **p;
1435 gimple stmt = gsi_stmt (gsi);
1436 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1437 tree lhs, rhs, lhs_base;
1438 bool partial_overlap;
1439
1440 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1441 continue;
1442 if (!gimple_assign_single_p (stmt))
1443 break;
1444
1445 lhs = gimple_assign_lhs (stmt);
1446 rhs = gimple_assign_rhs1 (stmt);
1447 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1448 || TREE_CODE (lhs) == BIT_FIELD_REF
1449 || contains_bitfld_component_ref_p (lhs))
1450 break;
1451
1452 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1453 &lhs_max_size);
1454 if (lhs_max_size == -1
1455 || lhs_max_size != lhs_size
1456 || (lhs_offset < arg_offset
1457 && lhs_offset + lhs_size > arg_offset)
1458 || (lhs_offset < arg_offset + arg_size
1459 && lhs_offset + lhs_size > arg_offset + arg_size))
1460 break;
1461
1462 if (check_ref)
1463 {
1464 if (TREE_CODE (lhs_base) != MEM_REF
1465 || TREE_OPERAND (lhs_base, 0) != arg_base
1466 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1467 break;
1468 }
1469 else if (lhs_base != arg_base)
1470 {
1471 if (DECL_P (lhs_base))
1472 continue;
1473 else
1474 break;
1475 }
1476
1477 if (lhs_offset + lhs_size < arg_offset
1478 || lhs_offset >= (arg_offset + arg_size))
1479 continue;
1480
1481 partial_overlap = false;
1482 p = &list;
1483 while (*p && (*p)->offset < lhs_offset)
1484 {
1485 if ((*p)->offset + (*p)->size > lhs_offset)
1486 {
1487 partial_overlap = true;
1488 break;
1489 }
1490 p = &(*p)->next;
1491 }
1492 if (partial_overlap)
1493 break;
1494 if (*p && (*p)->offset < lhs_offset + lhs_size)
1495 {
1496 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1497 /* We already know this value is subsequently overwritten with
1498 something else. */
1499 continue;
1500 else
1501 /* Otherwise this is a partial overlap which we cannot
1502 represent. */
1503 break;
1504 }
1505
1506 rhs = get_ssa_def_if_simple_copy (rhs);
1507 n = XALLOCA (struct ipa_known_agg_contents_list);
1508 n->size = lhs_size;
1509 n->offset = lhs_offset;
1510 if (is_gimple_ip_invariant (rhs))
1511 {
1512 n->constant = rhs;
1513 const_count++;
1514 }
1515 else
1516 n->constant = NULL_TREE;
1517 n->next = *p;
1518 *p = n;
1519
1520 item_count++;
1521 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1522 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1523 break;
1524 }
1525
1526 /* Third stage just goes over the list and creates an appropriate vector of
1527 ipa_agg_jf_item structures out of it, of sourse only if there are
1528 any known constants to begin with. */
1529
1530 if (const_count)
1531 {
1532 jfunc->agg.by_ref = by_ref;
1533 vec_alloc (jfunc->agg.items, const_count);
1534 while (list)
1535 {
1536 if (list->constant)
1537 {
1538 struct ipa_agg_jf_item item;
1539 item.offset = list->offset - arg_offset;
1540 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1541 item.value = unshare_expr_without_location (list->constant);
1542 jfunc->agg.items->quick_push (item);
1543 }
1544 list = list->next;
1545 }
1546 }
1547 }
1548
1549 static tree
1550 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1551 {
1552 int n;
1553 tree type = (e->callee
1554 ? TREE_TYPE (e->callee->decl)
1555 : gimple_call_fntype (e->call_stmt));
1556 tree t = TYPE_ARG_TYPES (type);
1557
1558 for (n = 0; n < i; n++)
1559 {
1560 if (!t)
1561 break;
1562 t = TREE_CHAIN (t);
1563 }
1564 if (t)
1565 return TREE_VALUE (t);
1566 if (!e->callee)
1567 return NULL;
1568 t = DECL_ARGUMENTS (e->callee->decl);
1569 for (n = 0; n < i; n++)
1570 {
1571 if (!t)
1572 return NULL;
1573 t = TREE_CHAIN (t);
1574 }
1575 if (t)
1576 return TREE_TYPE (t);
1577 return NULL;
1578 }
1579
1580 /* Compute jump function for all arguments of callsite CS and insert the
1581 information in the jump_functions array in the ipa_edge_args corresponding
1582 to this callsite. */
1583
1584 static void
1585 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1586 struct cgraph_edge *cs)
1587 {
1588 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1589 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1590 gimple call = cs->call_stmt;
1591 int n, arg_num = gimple_call_num_args (call);
1592
1593 if (arg_num == 0 || args->jump_functions)
1594 return;
1595 vec_safe_grow_cleared (args->jump_functions, arg_num);
1596
1597 if (gimple_call_internal_p (call))
1598 return;
1599 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1600 return;
1601
1602 for (n = 0; n < arg_num; n++)
1603 {
1604 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1605 tree arg = gimple_call_arg (call, n);
1606 tree param_type = ipa_get_callee_param_type (cs, n);
1607
1608 if (is_gimple_ip_invariant (arg))
1609 ipa_set_jf_constant (jfunc, arg, cs);
1610 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1611 && TREE_CODE (arg) == PARM_DECL)
1612 {
1613 int index = ipa_get_param_decl_index (info, arg);
1614
1615 gcc_assert (index >=0);
1616 /* Aggregate passed by value, check for pass-through, otherwise we
1617 will attempt to fill in aggregate contents later in this
1618 for cycle. */
1619 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1620 {
1621 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1622 continue;
1623 }
1624 }
1625 else if (TREE_CODE (arg) == SSA_NAME)
1626 {
1627 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1628 {
1629 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1630 if (index >= 0)
1631 {
1632 bool agg_p, type_p;
1633 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1634 call, arg);
1635 if (param_type && POINTER_TYPE_P (param_type))
1636 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1637 call, jfunc);
1638 else
1639 type_p = false;
1640 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1641 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1642 type_p);
1643 }
1644 }
1645 else
1646 {
1647 gimple stmt = SSA_NAME_DEF_STMT (arg);
1648 if (is_gimple_assign (stmt))
1649 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1650 call, stmt, arg, param_type);
1651 else if (gimple_code (stmt) == GIMPLE_PHI)
1652 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1653 call, stmt, param_type);
1654 }
1655 }
1656 else
1657 compute_known_type_jump_func (arg, jfunc, call,
1658 param_type
1659 && POINTER_TYPE_P (param_type)
1660 ? TREE_TYPE (param_type)
1661 : NULL);
1662
1663 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1664 passed (because type conversions are ignored in gimple). Usually we can
1665 safely get type from function declaration, but in case of K&R prototypes or
1666 variadic functions we can try our luck with type of the pointer passed.
1667 TODO: Since we look for actual initialization of the memory object, we may better
1668 work out the type based on the memory stores we find. */
1669 if (!param_type)
1670 param_type = TREE_TYPE (arg);
1671
1672 if ((jfunc->type != IPA_JF_PASS_THROUGH
1673 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1674 && (jfunc->type != IPA_JF_ANCESTOR
1675 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1676 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1677 || POINTER_TYPE_P (param_type)))
1678 determine_known_aggregate_parts (call, arg, param_type, jfunc);
1679 }
1680 }
1681
1682 /* Compute jump functions for all edges - both direct and indirect - outgoing
1683 from NODE. Also count the actual arguments in the process. */
1684
1685 static void
1686 ipa_compute_jump_functions (struct cgraph_node *node,
1687 struct param_analysis_info *parms_ainfo)
1688 {
1689 struct cgraph_edge *cs;
1690
1691 for (cs = node->callees; cs; cs = cs->next_callee)
1692 {
1693 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1694 NULL);
1695 /* We do not need to bother analyzing calls to unknown
1696 functions unless they may become known during lto/whopr. */
1697 if (!callee->definition && !flag_lto)
1698 continue;
1699 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1700 }
1701
1702 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1703 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1704 }
1705
1706 /* If STMT looks like a statement loading a value from a member pointer formal
1707 parameter, return that parameter and store the offset of the field to
1708 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1709 might be clobbered). If USE_DELTA, then we look for a use of the delta
1710 field rather than the pfn. */
1711
1712 static tree
1713 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1714 HOST_WIDE_INT *offset_p)
1715 {
1716 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1717
1718 if (!gimple_assign_single_p (stmt))
1719 return NULL_TREE;
1720
1721 rhs = gimple_assign_rhs1 (stmt);
1722 if (TREE_CODE (rhs) == COMPONENT_REF)
1723 {
1724 ref_field = TREE_OPERAND (rhs, 1);
1725 rhs = TREE_OPERAND (rhs, 0);
1726 }
1727 else
1728 ref_field = NULL_TREE;
1729 if (TREE_CODE (rhs) != MEM_REF)
1730 return NULL_TREE;
1731 rec = TREE_OPERAND (rhs, 0);
1732 if (TREE_CODE (rec) != ADDR_EXPR)
1733 return NULL_TREE;
1734 rec = TREE_OPERAND (rec, 0);
1735 if (TREE_CODE (rec) != PARM_DECL
1736 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1737 return NULL_TREE;
1738 ref_offset = TREE_OPERAND (rhs, 1);
1739
1740 if (use_delta)
1741 fld = delta_field;
1742 else
1743 fld = ptr_field;
1744 if (offset_p)
1745 *offset_p = int_bit_position (fld);
1746
1747 if (ref_field)
1748 {
1749 if (integer_nonzerop (ref_offset))
1750 return NULL_TREE;
1751 return ref_field == fld ? rec : NULL_TREE;
1752 }
1753 else
1754 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1755 : NULL_TREE;
1756 }
1757
1758 /* Returns true iff T is an SSA_NAME defined by a statement. */
1759
1760 static bool
1761 ipa_is_ssa_with_stmt_def (tree t)
1762 {
1763 if (TREE_CODE (t) == SSA_NAME
1764 && !SSA_NAME_IS_DEFAULT_DEF (t))
1765 return true;
1766 else
1767 return false;
1768 }
1769
1770 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1771 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1772 indirect call graph edge. */
1773
1774 static struct cgraph_edge *
1775 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1776 {
1777 struct cgraph_edge *cs;
1778
1779 cs = cgraph_edge (node, stmt);
1780 cs->indirect_info->param_index = param_index;
1781 cs->indirect_info->agg_contents = 0;
1782 cs->indirect_info->member_ptr = 0;
1783 return cs;
1784 }
1785
1786 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1787 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1788 intermediate information about each formal parameter. Currently it checks
1789 whether the call calls a pointer that is a formal parameter and if so, the
1790 parameter is marked with the called flag and an indirect call graph edge
1791 describing the call is created. This is very simple for ordinary pointers
1792 represented in SSA but not-so-nice when it comes to member pointers. The
1793 ugly part of this function does nothing more than trying to match the
1794 pattern of such a call. An example of such a pattern is the gimple dump
1795 below, the call is on the last line:
1796
1797 <bb 2>:
1798 f$__delta_5 = f.__delta;
1799 f$__pfn_24 = f.__pfn;
1800
1801 or
1802 <bb 2>:
1803 f$__delta_5 = MEM[(struct *)&f];
1804 f$__pfn_24 = MEM[(struct *)&f + 4B];
1805
1806 and a few lines below:
1807
1808 <bb 5>
1809 D.2496_3 = (int) f$__pfn_24;
1810 D.2497_4 = D.2496_3 & 1;
1811 if (D.2497_4 != 0)
1812 goto <bb 3>;
1813 else
1814 goto <bb 4>;
1815
1816 <bb 6>:
1817 D.2500_7 = (unsigned int) f$__delta_5;
1818 D.2501_8 = &S + D.2500_7;
1819 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1820 D.2503_10 = *D.2502_9;
1821 D.2504_12 = f$__pfn_24 + -1;
1822 D.2505_13 = (unsigned int) D.2504_12;
1823 D.2506_14 = D.2503_10 + D.2505_13;
1824 D.2507_15 = *D.2506_14;
1825 iftmp.11_16 = (String:: *) D.2507_15;
1826
1827 <bb 7>:
1828 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1829 D.2500_19 = (unsigned int) f$__delta_5;
1830 D.2508_20 = &S + D.2500_19;
1831 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1832
1833 Such patterns are results of simple calls to a member pointer:
1834
1835 int doprinting (int (MyString::* f)(int) const)
1836 {
1837 MyString S ("somestring");
1838
1839 return (S.*f)(4);
1840 }
1841
1842 Moreover, the function also looks for called pointers loaded from aggregates
1843 passed by value or reference. */
1844
1845 static void
1846 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1847 struct ipa_node_params *info,
1848 struct param_analysis_info *parms_ainfo,
1849 gimple call, tree target)
1850 {
1851 gimple def;
1852 tree n1, n2;
1853 gimple d1, d2;
1854 tree rec, rec2, cond;
1855 gimple branch;
1856 int index;
1857 basic_block bb, virt_bb, join;
1858 HOST_WIDE_INT offset;
1859 bool by_ref;
1860
1861 if (SSA_NAME_IS_DEFAULT_DEF (target))
1862 {
1863 tree var = SSA_NAME_VAR (target);
1864 index = ipa_get_param_decl_index (info, var);
1865 if (index >= 0)
1866 ipa_note_param_call (node, index, call);
1867 return;
1868 }
1869
1870 def = SSA_NAME_DEF_STMT (target);
1871 if (gimple_assign_single_p (def)
1872 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1873 gimple_assign_rhs1 (def), &index, &offset,
1874 NULL, &by_ref))
1875 {
1876 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1877 if (cs->indirect_info->offset != offset)
1878 cs->indirect_info->outer_type = NULL;
1879 cs->indirect_info->offset = offset;
1880 cs->indirect_info->agg_contents = 1;
1881 cs->indirect_info->by_ref = by_ref;
1882 return;
1883 }
1884
1885 /* Now we need to try to match the complex pattern of calling a member
1886 pointer. */
1887 if (gimple_code (def) != GIMPLE_PHI
1888 || gimple_phi_num_args (def) != 2
1889 || !POINTER_TYPE_P (TREE_TYPE (target))
1890 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1891 return;
1892
1893 /* First, we need to check whether one of these is a load from a member
1894 pointer that is a parameter to this function. */
1895 n1 = PHI_ARG_DEF (def, 0);
1896 n2 = PHI_ARG_DEF (def, 1);
1897 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1898 return;
1899 d1 = SSA_NAME_DEF_STMT (n1);
1900 d2 = SSA_NAME_DEF_STMT (n2);
1901
1902 join = gimple_bb (def);
1903 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1904 {
1905 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1906 return;
1907
1908 bb = EDGE_PRED (join, 0)->src;
1909 virt_bb = gimple_bb (d2);
1910 }
1911 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1912 {
1913 bb = EDGE_PRED (join, 1)->src;
1914 virt_bb = gimple_bb (d1);
1915 }
1916 else
1917 return;
1918
1919 /* Second, we need to check that the basic blocks are laid out in the way
1920 corresponding to the pattern. */
1921
1922 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1923 || single_pred (virt_bb) != bb
1924 || single_succ (virt_bb) != join)
1925 return;
1926
1927 /* Third, let's see that the branching is done depending on the least
1928 significant bit of the pfn. */
1929
1930 branch = last_stmt (bb);
1931 if (!branch || gimple_code (branch) != GIMPLE_COND)
1932 return;
1933
1934 if ((gimple_cond_code (branch) != NE_EXPR
1935 && gimple_cond_code (branch) != EQ_EXPR)
1936 || !integer_zerop (gimple_cond_rhs (branch)))
1937 return;
1938
1939 cond = gimple_cond_lhs (branch);
1940 if (!ipa_is_ssa_with_stmt_def (cond))
1941 return;
1942
1943 def = SSA_NAME_DEF_STMT (cond);
1944 if (!is_gimple_assign (def)
1945 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1946 || !integer_onep (gimple_assign_rhs2 (def)))
1947 return;
1948
1949 cond = gimple_assign_rhs1 (def);
1950 if (!ipa_is_ssa_with_stmt_def (cond))
1951 return;
1952
1953 def = SSA_NAME_DEF_STMT (cond);
1954
1955 if (is_gimple_assign (def)
1956 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1957 {
1958 cond = gimple_assign_rhs1 (def);
1959 if (!ipa_is_ssa_with_stmt_def (cond))
1960 return;
1961 def = SSA_NAME_DEF_STMT (cond);
1962 }
1963
1964 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1965 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1966 == ptrmemfunc_vbit_in_delta),
1967 NULL);
1968 if (rec != rec2)
1969 return;
1970
1971 index = ipa_get_param_decl_index (info, rec);
1972 if (index >= 0
1973 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1974 {
1975 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1976 if (cs->indirect_info->offset != offset)
1977 cs->indirect_info->outer_type = NULL;
1978 cs->indirect_info->offset = offset;
1979 cs->indirect_info->agg_contents = 1;
1980 cs->indirect_info->member_ptr = 1;
1981 }
1982
1983 return;
1984 }
1985
1986 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1987 object referenced in the expression is a formal parameter of the caller
1988 (described by INFO), create a call note for the statement. */
1989
1990 static void
1991 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1992 struct ipa_node_params *info, gimple call,
1993 tree target)
1994 {
1995 struct cgraph_edge *cs;
1996 struct cgraph_indirect_call_info *ii;
1997 struct ipa_jump_func jfunc;
1998 tree obj = OBJ_TYPE_REF_OBJECT (target);
1999 int index;
2000 HOST_WIDE_INT anc_offset;
2001
2002 if (!flag_devirtualize)
2003 return;
2004
2005 if (TREE_CODE (obj) != SSA_NAME)
2006 return;
2007
2008 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2009 {
2010 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2011 return;
2012
2013 anc_offset = 0;
2014 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2015 gcc_assert (index >= 0);
2016 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2017 call, &jfunc))
2018 return;
2019 }
2020 else
2021 {
2022 gimple stmt = SSA_NAME_DEF_STMT (obj);
2023 tree expr;
2024
2025 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2026 if (!expr)
2027 return;
2028 index = ipa_get_param_decl_index (info,
2029 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2030 gcc_assert (index >= 0);
2031 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2032 call, &jfunc, anc_offset))
2033 return;
2034 }
2035
2036 cs = ipa_note_param_call (node, index, call);
2037 ii = cs->indirect_info;
2038 ii->offset = anc_offset;
2039 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2040 ii->otr_type = obj_type_ref_class (target);
2041 ii->polymorphic = 1;
2042 }
2043
2044 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2045 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2046 containing intermediate information about each formal parameter. */
2047
2048 static void
2049 ipa_analyze_call_uses (struct cgraph_node *node,
2050 struct ipa_node_params *info,
2051 struct param_analysis_info *parms_ainfo, gimple call)
2052 {
2053 tree target = gimple_call_fn (call);
2054 struct cgraph_edge *cs;
2055
2056 if (!target
2057 || (TREE_CODE (target) != SSA_NAME
2058 && !virtual_method_call_p (target)))
2059 return;
2060
2061 /* If we previously turned the call into a direct call, there is
2062 no need to analyze. */
2063 cs = cgraph_edge (node, call);
2064 if (cs && !cs->indirect_unknown_callee)
2065 return;
2066 if (TREE_CODE (target) == SSA_NAME)
2067 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2068 else if (virtual_method_call_p (target))
2069 ipa_analyze_virtual_call_uses (node, info, call, target);
2070 }
2071
2072
2073 /* Analyze the call statement STMT with respect to formal parameters (described
2074 in INFO) of caller given by NODE. Currently it only checks whether formal
2075 parameters are called. PARMS_AINFO is a pointer to a vector containing
2076 intermediate information about each formal parameter. */
2077
2078 static void
2079 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2080 struct param_analysis_info *parms_ainfo, gimple stmt)
2081 {
2082 if (is_gimple_call (stmt))
2083 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2084 }
2085
2086 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2087 If OP is a parameter declaration, mark it as used in the info structure
2088 passed in DATA. */
2089
2090 static bool
2091 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2092 {
2093 struct ipa_node_params *info = (struct ipa_node_params *) data;
2094
2095 op = get_base_address (op);
2096 if (op
2097 && TREE_CODE (op) == PARM_DECL)
2098 {
2099 int index = ipa_get_param_decl_index (info, op);
2100 gcc_assert (index >= 0);
2101 ipa_set_param_used (info, index, true);
2102 }
2103
2104 return false;
2105 }
2106
2107 /* Scan the function body of NODE and inspect the uses of formal parameters.
2108 Store the findings in various structures of the associated ipa_node_params
2109 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2110 vector containing intermediate information about each formal parameter. */
2111
2112 static void
2113 ipa_analyze_params_uses (struct cgraph_node *node,
2114 struct param_analysis_info *parms_ainfo)
2115 {
2116 tree decl = node->decl;
2117 basic_block bb;
2118 struct function *func;
2119 gimple_stmt_iterator gsi;
2120 struct ipa_node_params *info = IPA_NODE_REF (node);
2121 int i;
2122
2123 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2124 return;
2125
2126 info->uses_analysis_done = 1;
2127 if (ipa_func_spec_opts_forbid_analysis_p (node))
2128 {
2129 for (i = 0; i < ipa_get_param_count (info); i++)
2130 {
2131 ipa_set_param_used (info, i, true);
2132 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2133 }
2134 return;
2135 }
2136
2137 for (i = 0; i < ipa_get_param_count (info); i++)
2138 {
2139 tree parm = ipa_get_param (info, i);
2140 int controlled_uses = 0;
2141
2142 /* For SSA regs see if parameter is used. For non-SSA we compute
2143 the flag during modification analysis. */
2144 if (is_gimple_reg (parm))
2145 {
2146 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2147 parm);
2148 if (ddef && !has_zero_uses (ddef))
2149 {
2150 imm_use_iterator imm_iter;
2151 use_operand_p use_p;
2152
2153 ipa_set_param_used (info, i, true);
2154 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2155 if (!is_gimple_call (USE_STMT (use_p)))
2156 {
2157 if (!is_gimple_debug (USE_STMT (use_p)))
2158 {
2159 controlled_uses = IPA_UNDESCRIBED_USE;
2160 break;
2161 }
2162 }
2163 else
2164 controlled_uses++;
2165 }
2166 else
2167 controlled_uses = 0;
2168 }
2169 else
2170 controlled_uses = IPA_UNDESCRIBED_USE;
2171 ipa_set_controlled_uses (info, i, controlled_uses);
2172 }
2173
2174 func = DECL_STRUCT_FUNCTION (decl);
2175 FOR_EACH_BB_FN (bb, func)
2176 {
2177 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2178 {
2179 gimple stmt = gsi_stmt (gsi);
2180
2181 if (is_gimple_debug (stmt))
2182 continue;
2183
2184 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2185 walk_stmt_load_store_addr_ops (stmt, info,
2186 visit_ref_for_mod_analysis,
2187 visit_ref_for_mod_analysis,
2188 visit_ref_for_mod_analysis);
2189 }
2190 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2191 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2192 visit_ref_for_mod_analysis,
2193 visit_ref_for_mod_analysis,
2194 visit_ref_for_mod_analysis);
2195 }
2196 }
2197
2198 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2199
2200 static void
2201 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2202 {
2203 int i;
2204
2205 for (i = 0; i < param_count; i++)
2206 {
2207 if (parms_ainfo[i].parm_visited_statements)
2208 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2209 if (parms_ainfo[i].pt_visited_statements)
2210 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2211 }
2212 }
2213
2214 /* Initialize the array describing properties of of formal parameters
2215 of NODE, analyze their uses and compute jump functions associated
2216 with actual arguments of calls from within NODE. */
2217
2218 void
2219 ipa_analyze_node (struct cgraph_node *node)
2220 {
2221 struct ipa_node_params *info;
2222 struct param_analysis_info *parms_ainfo;
2223 int param_count;
2224
2225 ipa_check_create_node_params ();
2226 ipa_check_create_edge_args ();
2227 info = IPA_NODE_REF (node);
2228 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2229 ipa_initialize_node_params (node);
2230
2231 param_count = ipa_get_param_count (info);
2232 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2233 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2234
2235 ipa_analyze_params_uses (node, parms_ainfo);
2236 ipa_compute_jump_functions (node, parms_ainfo);
2237
2238 free_parms_ainfo (parms_ainfo, param_count);
2239 pop_cfun ();
2240 }
2241
2242 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2243 attempt a type-based devirtualization. If successful, return the
2244 target function declaration, otherwise return NULL. */
2245
2246 tree
2247 ipa_intraprocedural_devirtualization (gimple call)
2248 {
2249 tree binfo, token, fndecl;
2250 struct ipa_jump_func jfunc;
2251 tree otr = gimple_call_fn (call);
2252
2253 jfunc.type = IPA_JF_UNKNOWN;
2254 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2255 call, obj_type_ref_class (otr));
2256 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2257 return NULL_TREE;
2258 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2259 if (!binfo)
2260 return NULL_TREE;
2261 token = OBJ_TYPE_REF_TOKEN (otr);
2262 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2263 binfo);
2264 #ifdef ENABLE_CHECKING
2265 if (fndecl)
2266 gcc_assert (possible_polymorphic_call_target_p
2267 (otr, cgraph_get_node (fndecl)));
2268 #endif
2269 return fndecl;
2270 }
2271
2272 /* Update the jump function DST when the call graph edge corresponding to SRC is
2273 is being inlined, knowing that DST is of type ancestor and src of known
2274 type. */
2275
2276 static void
2277 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2278 struct ipa_jump_func *dst)
2279 {
2280 HOST_WIDE_INT combined_offset;
2281 tree combined_type;
2282
2283 if (!ipa_get_jf_ancestor_type_preserved (dst))
2284 {
2285 dst->type = IPA_JF_UNKNOWN;
2286 return;
2287 }
2288
2289 combined_offset = ipa_get_jf_known_type_offset (src)
2290 + ipa_get_jf_ancestor_offset (dst);
2291 combined_type = ipa_get_jf_ancestor_type (dst);
2292
2293 ipa_set_jf_known_type (dst, combined_offset,
2294 ipa_get_jf_known_type_base_type (src),
2295 combined_type);
2296 }
2297
2298 /* Update the jump functions associated with call graph edge E when the call
2299 graph edge CS is being inlined, assuming that E->caller is already (possibly
2300 indirectly) inlined into CS->callee and that E has not been inlined. */
2301
2302 static void
2303 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2304 struct cgraph_edge *e)
2305 {
2306 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2307 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2308 int count = ipa_get_cs_argument_count (args);
2309 int i;
2310
2311 for (i = 0; i < count; i++)
2312 {
2313 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2314
2315 if (dst->type == IPA_JF_ANCESTOR)
2316 {
2317 struct ipa_jump_func *src;
2318 int dst_fid = dst->value.ancestor.formal_id;
2319
2320 /* Variable number of arguments can cause havoc if we try to access
2321 one that does not exist in the inlined edge. So make sure we
2322 don't. */
2323 if (dst_fid >= ipa_get_cs_argument_count (top))
2324 {
2325 dst->type = IPA_JF_UNKNOWN;
2326 continue;
2327 }
2328
2329 src = ipa_get_ith_jump_func (top, dst_fid);
2330
2331 if (src->agg.items
2332 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2333 {
2334 struct ipa_agg_jf_item *item;
2335 int j;
2336
2337 /* Currently we do not produce clobber aggregate jump functions,
2338 replace with merging when we do. */
2339 gcc_assert (!dst->agg.items);
2340
2341 dst->agg.items = vec_safe_copy (src->agg.items);
2342 dst->agg.by_ref = src->agg.by_ref;
2343 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2344 item->offset -= dst->value.ancestor.offset;
2345 }
2346
2347 if (src->type == IPA_JF_KNOWN_TYPE)
2348 combine_known_type_and_ancestor_jfs (src, dst);
2349 else if (src->type == IPA_JF_PASS_THROUGH
2350 && src->value.pass_through.operation == NOP_EXPR)
2351 {
2352 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2353 dst->value.ancestor.agg_preserved &=
2354 src->value.pass_through.agg_preserved;
2355 dst->value.ancestor.type_preserved &=
2356 src->value.pass_through.type_preserved;
2357 }
2358 else if (src->type == IPA_JF_ANCESTOR)
2359 {
2360 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2361 dst->value.ancestor.offset += src->value.ancestor.offset;
2362 dst->value.ancestor.agg_preserved &=
2363 src->value.ancestor.agg_preserved;
2364 dst->value.ancestor.type_preserved &=
2365 src->value.ancestor.type_preserved;
2366 }
2367 else
2368 dst->type = IPA_JF_UNKNOWN;
2369 }
2370 else if (dst->type == IPA_JF_PASS_THROUGH)
2371 {
2372 struct ipa_jump_func *src;
2373 /* We must check range due to calls with variable number of arguments
2374 and we cannot combine jump functions with operations. */
2375 if (dst->value.pass_through.operation == NOP_EXPR
2376 && (dst->value.pass_through.formal_id
2377 < ipa_get_cs_argument_count (top)))
2378 {
2379 int dst_fid = dst->value.pass_through.formal_id;
2380 src = ipa_get_ith_jump_func (top, dst_fid);
2381 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2382
2383 switch (src->type)
2384 {
2385 case IPA_JF_UNKNOWN:
2386 dst->type = IPA_JF_UNKNOWN;
2387 break;
2388 case IPA_JF_KNOWN_TYPE:
2389 if (ipa_get_jf_pass_through_type_preserved (dst))
2390 ipa_set_jf_known_type (dst,
2391 ipa_get_jf_known_type_offset (src),
2392 ipa_get_jf_known_type_base_type (src),
2393 ipa_get_jf_known_type_base_type (src));
2394 else
2395 dst->type = IPA_JF_UNKNOWN;
2396 break;
2397 case IPA_JF_CONST:
2398 ipa_set_jf_cst_copy (dst, src);
2399 break;
2400
2401 case IPA_JF_PASS_THROUGH:
2402 {
2403 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2404 enum tree_code operation;
2405 operation = ipa_get_jf_pass_through_operation (src);
2406
2407 if (operation == NOP_EXPR)
2408 {
2409 bool agg_p, type_p;
2410 agg_p = dst_agg_p
2411 && ipa_get_jf_pass_through_agg_preserved (src);
2412 type_p = ipa_get_jf_pass_through_type_preserved (src)
2413 && ipa_get_jf_pass_through_type_preserved (dst);
2414 ipa_set_jf_simple_pass_through (dst, formal_id,
2415 agg_p, type_p);
2416 }
2417 else
2418 {
2419 tree operand = ipa_get_jf_pass_through_operand (src);
2420 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2421 operation);
2422 }
2423 break;
2424 }
2425 case IPA_JF_ANCESTOR:
2426 {
2427 bool agg_p, type_p;
2428 agg_p = dst_agg_p
2429 && ipa_get_jf_ancestor_agg_preserved (src);
2430 type_p = ipa_get_jf_ancestor_type_preserved (src)
2431 && ipa_get_jf_pass_through_type_preserved (dst);
2432 ipa_set_ancestor_jf (dst,
2433 ipa_get_jf_ancestor_offset (src),
2434 ipa_get_jf_ancestor_type (src),
2435 ipa_get_jf_ancestor_formal_id (src),
2436 agg_p, type_p);
2437 break;
2438 }
2439 default:
2440 gcc_unreachable ();
2441 }
2442
2443 if (src->agg.items
2444 && (dst_agg_p || !src->agg.by_ref))
2445 {
2446 /* Currently we do not produce clobber aggregate jump
2447 functions, replace with merging when we do. */
2448 gcc_assert (!dst->agg.items);
2449
2450 dst->agg.by_ref = src->agg.by_ref;
2451 dst->agg.items = vec_safe_copy (src->agg.items);
2452 }
2453 }
2454 else
2455 dst->type = IPA_JF_UNKNOWN;
2456 }
2457 }
2458 }
2459
2460 /* If TARGET is an addr_expr of a function declaration, make it the destination
2461 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2462
2463 struct cgraph_edge *
2464 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2465 {
2466 struct cgraph_node *callee;
2467 struct inline_edge_summary *es = inline_edge_summary (ie);
2468 bool unreachable = false;
2469
2470 if (TREE_CODE (target) == ADDR_EXPR)
2471 target = TREE_OPERAND (target, 0);
2472 if (TREE_CODE (target) != FUNCTION_DECL)
2473 {
2474 target = canonicalize_constructor_val (target, NULL);
2475 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2476 {
2477 if (ie->indirect_info->member_ptr)
2478 /* Member pointer call that goes through a VMT lookup. */
2479 return NULL;
2480
2481 if (dump_file)
2482 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2483 " in %s/%i, making it unreachable.\n",
2484 ie->caller->name (), ie->caller->order);
2485 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2486 callee = cgraph_get_create_node (target);
2487 unreachable = true;
2488 }
2489 else
2490 callee = cgraph_get_node (target);
2491 }
2492 else
2493 callee = cgraph_get_node (target);
2494
2495 /* Because may-edges are not explicitely represented and vtable may be external,
2496 we may create the first reference to the object in the unit. */
2497 if (!callee || callee->global.inlined_to)
2498 {
2499
2500 /* We are better to ensure we can refer to it.
2501 In the case of static functions we are out of luck, since we already
2502 removed its body. In the case of public functions we may or may
2503 not introduce the reference. */
2504 if (!canonicalize_constructor_val (target, NULL)
2505 || !TREE_PUBLIC (target))
2506 {
2507 if (dump_file)
2508 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2509 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2510 xstrdup (ie->caller->name ()),
2511 ie->caller->order,
2512 xstrdup (ie->callee->name ()),
2513 ie->callee->order);
2514 return NULL;
2515 }
2516 callee = cgraph_get_create_node (target);
2517 }
2518 ipa_check_create_node_params ();
2519
2520 /* We can not make edges to inline clones. It is bug that someone removed
2521 the cgraph node too early. */
2522 gcc_assert (!callee->global.inlined_to);
2523
2524 if (dump_file && !unreachable)
2525 {
2526 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2527 "(%s/%i -> %s/%i), for stmt ",
2528 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2529 xstrdup (ie->caller->name ()),
2530 ie->caller->order,
2531 xstrdup (callee->name ()),
2532 callee->order);
2533 if (ie->call_stmt)
2534 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2535 else
2536 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2537 }
2538 ie = cgraph_make_edge_direct (ie, callee);
2539 es = inline_edge_summary (ie);
2540 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2541 - eni_size_weights.call_cost);
2542 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2543 - eni_time_weights.call_cost);
2544
2545 return ie;
2546 }
2547
2548 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2549 return NULL if there is not any. BY_REF specifies whether the value has to
2550 be passed by reference or by value. */
2551
2552 tree
2553 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2554 HOST_WIDE_INT offset, bool by_ref)
2555 {
2556 struct ipa_agg_jf_item *item;
2557 int i;
2558
2559 if (by_ref != agg->by_ref)
2560 return NULL;
2561
2562 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2563 if (item->offset == offset)
2564 {
2565 /* Currently we do not have clobber values, return NULL for them once
2566 we do. */
2567 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2568 return item->value;
2569 }
2570 return NULL;
2571 }
2572
2573 /* Remove a reference to SYMBOL from the list of references of a node given by
2574 reference description RDESC. Return true if the reference has been
2575 successfully found and removed. */
2576
2577 static bool
2578 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2579 {
2580 struct ipa_ref *to_del;
2581 struct cgraph_edge *origin;
2582
2583 origin = rdesc->cs;
2584 if (!origin)
2585 return false;
2586 to_del = ipa_find_reference (origin->caller, symbol,
2587 origin->call_stmt, origin->lto_stmt_uid);
2588 if (!to_del)
2589 return false;
2590
2591 ipa_remove_reference (to_del);
2592 if (dump_file)
2593 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2594 xstrdup (origin->caller->name ()),
2595 origin->caller->order, xstrdup (symbol->name ()));
2596 return true;
2597 }
2598
2599 /* If JFUNC has a reference description with refcount different from
2600 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2601 NULL. JFUNC must be a constant jump function. */
2602
2603 static struct ipa_cst_ref_desc *
2604 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2605 {
2606 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2607 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2608 return rdesc;
2609 else
2610 return NULL;
2611 }
2612
2613 /* If the value of constant jump function JFUNC is an address of a function
2614 declaration, return the associated call graph node. Otherwise return
2615 NULL. */
2616
2617 static cgraph_node *
2618 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2619 {
2620 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2621 tree cst = ipa_get_jf_constant (jfunc);
2622 if (TREE_CODE (cst) != ADDR_EXPR
2623 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2624 return NULL;
2625
2626 return cgraph_get_node (TREE_OPERAND (cst, 0));
2627 }
2628
2629
2630 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2631 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2632 the edge specified in the rdesc. Return false if either the symbol or the
2633 reference could not be found, otherwise return true. */
2634
2635 static bool
2636 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2637 {
2638 struct ipa_cst_ref_desc *rdesc;
2639 if (jfunc->type == IPA_JF_CONST
2640 && (rdesc = jfunc_rdesc_usable (jfunc))
2641 && --rdesc->refcount == 0)
2642 {
2643 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2644 if (!symbol)
2645 return false;
2646
2647 return remove_described_reference (symbol, rdesc);
2648 }
2649 return true;
2650 }
2651
2652 /* Try to find a destination for indirect edge IE that corresponds to a simple
2653 call or a call of a member function pointer and where the destination is a
2654 pointer formal parameter described by jump function JFUNC. If it can be
2655 determined, return the newly direct edge, otherwise return NULL.
2656 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2657
2658 static struct cgraph_edge *
2659 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2660 struct ipa_jump_func *jfunc,
2661 struct ipa_node_params *new_root_info)
2662 {
2663 struct cgraph_edge *cs;
2664 tree target;
2665 bool agg_contents = ie->indirect_info->agg_contents;
2666
2667 if (ie->indirect_info->agg_contents)
2668 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2669 ie->indirect_info->offset,
2670 ie->indirect_info->by_ref);
2671 else
2672 target = ipa_value_from_jfunc (new_root_info, jfunc);
2673 if (!target)
2674 return NULL;
2675 cs = ipa_make_edge_direct_to_target (ie, target);
2676
2677 if (cs && !agg_contents)
2678 {
2679 bool ok;
2680 gcc_checking_assert (cs->callee
2681 && (cs != ie
2682 || jfunc->type != IPA_JF_CONST
2683 || !cgraph_node_for_jfunc (jfunc)
2684 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2685 ok = try_decrement_rdesc_refcount (jfunc);
2686 gcc_checking_assert (ok);
2687 }
2688
2689 return cs;
2690 }
2691
2692 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2693 call based on a formal parameter which is described by jump function JFUNC
2694 and if it can be determined, make it direct and return the direct edge.
2695 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2696 are relative to. */
2697
2698 static struct cgraph_edge *
2699 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2700 struct ipa_jump_func *jfunc,
2701 struct ipa_node_params *new_root_info)
2702 {
2703 tree binfo, target;
2704
2705 if (!flag_devirtualize)
2706 return NULL;
2707
2708 /* First try to do lookup via known virtual table pointer value. */
2709 if (!ie->indirect_info->by_ref)
2710 {
2711 tree vtable;
2712 unsigned HOST_WIDE_INT offset;
2713 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2714 ie->indirect_info->offset,
2715 true);
2716 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2717 {
2718 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2719 vtable, offset);
2720 if (target)
2721 {
2722 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2723 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2724 || !possible_polymorphic_call_target_p
2725 (ie, cgraph_get_node (target)))
2726 {
2727 if (dump_file)
2728 fprintf (dump_file,
2729 "Type inconsident devirtualization: %s/%i->%s\n",
2730 ie->caller->name (), ie->caller->order,
2731 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2732 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2733 cgraph_get_create_node (target);
2734 }
2735 return ipa_make_edge_direct_to_target (ie, target);
2736 }
2737 }
2738 }
2739
2740 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2741
2742 if (!binfo)
2743 return NULL;
2744
2745 if (TREE_CODE (binfo) != TREE_BINFO)
2746 {
2747 ipa_polymorphic_call_context context;
2748 vec <cgraph_node *>targets;
2749 bool final;
2750
2751 if (!get_polymorphic_call_info_from_invariant
2752 (&context, binfo, ie->indirect_info->otr_type,
2753 ie->indirect_info->offset))
2754 return NULL;
2755 targets = possible_polymorphic_call_targets
2756 (ie->indirect_info->otr_type,
2757 ie->indirect_info->otr_token,
2758 context, &final);
2759 if (!final || targets.length () > 1)
2760 return NULL;
2761 if (targets.length () == 1)
2762 target = targets[0]->decl;
2763 else
2764 {
2765 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2766 cgraph_get_create_node (target);
2767 }
2768 }
2769 else
2770 {
2771 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2772 ie->indirect_info->otr_type);
2773 if (binfo)
2774 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2775 binfo);
2776 else
2777 return NULL;
2778 }
2779
2780 if (target)
2781 {
2782 #ifdef ENABLE_CHECKING
2783 gcc_assert (possible_polymorphic_call_target_p
2784 (ie, cgraph_get_node (target)));
2785 #endif
2786 return ipa_make_edge_direct_to_target (ie, target);
2787 }
2788 else
2789 return NULL;
2790 }
2791
2792 /* Update the param called notes associated with NODE when CS is being inlined,
2793 assuming NODE is (potentially indirectly) inlined into CS->callee.
2794 Moreover, if the callee is discovered to be constant, create a new cgraph
2795 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2796 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2797
2798 static bool
2799 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2800 struct cgraph_node *node,
2801 vec<cgraph_edge_p> *new_edges)
2802 {
2803 struct ipa_edge_args *top;
2804 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2805 struct ipa_node_params *new_root_info;
2806 bool res = false;
2807
2808 ipa_check_create_edge_args ();
2809 top = IPA_EDGE_REF (cs);
2810 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2811 ? cs->caller->global.inlined_to
2812 : cs->caller);
2813
2814 for (ie = node->indirect_calls; ie; ie = next_ie)
2815 {
2816 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2817 struct ipa_jump_func *jfunc;
2818 int param_index;
2819
2820 next_ie = ie->next_callee;
2821
2822 if (ici->param_index == -1)
2823 continue;
2824
2825 /* We must check range due to calls with variable number of arguments: */
2826 if (ici->param_index >= ipa_get_cs_argument_count (top))
2827 {
2828 ici->param_index = -1;
2829 continue;
2830 }
2831
2832 param_index = ici->param_index;
2833 jfunc = ipa_get_ith_jump_func (top, param_index);
2834
2835 if (!flag_indirect_inlining)
2836 new_direct_edge = NULL;
2837 else if (ici->polymorphic)
2838 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2839 new_root_info);
2840 else
2841 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2842 new_root_info);
2843 /* If speculation was removed, then we need to do nothing. */
2844 if (new_direct_edge && new_direct_edge != ie)
2845 {
2846 new_direct_edge->indirect_inlining_edge = 1;
2847 top = IPA_EDGE_REF (cs);
2848 res = true;
2849 }
2850 else if (new_direct_edge)
2851 {
2852 new_direct_edge->indirect_inlining_edge = 1;
2853 if (new_direct_edge->call_stmt)
2854 new_direct_edge->call_stmt_cannot_inline_p
2855 = !gimple_check_call_matching_types (
2856 new_direct_edge->call_stmt,
2857 new_direct_edge->callee->decl, false);
2858 if (new_edges)
2859 {
2860 new_edges->safe_push (new_direct_edge);
2861 res = true;
2862 }
2863 top = IPA_EDGE_REF (cs);
2864 }
2865 else if (jfunc->type == IPA_JF_PASS_THROUGH
2866 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2867 {
2868 if (ici->agg_contents
2869 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2870 ici->param_index = -1;
2871 else
2872 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2873 }
2874 else if (jfunc->type == IPA_JF_ANCESTOR)
2875 {
2876 if (ici->agg_contents
2877 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2878 ici->param_index = -1;
2879 else
2880 {
2881 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2882 if (ipa_get_jf_ancestor_offset (jfunc))
2883 ici->outer_type = NULL;
2884 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2885 }
2886 }
2887 else
2888 /* Either we can find a destination for this edge now or never. */
2889 ici->param_index = -1;
2890 }
2891
2892 return res;
2893 }
2894
2895 /* Recursively traverse subtree of NODE (including node) made of inlined
2896 cgraph_edges when CS has been inlined and invoke
2897 update_indirect_edges_after_inlining on all nodes and
2898 update_jump_functions_after_inlining on all non-inlined edges that lead out
2899 of this subtree. Newly discovered indirect edges will be added to
2900 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2901 created. */
2902
2903 static bool
2904 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2905 struct cgraph_node *node,
2906 vec<cgraph_edge_p> *new_edges)
2907 {
2908 struct cgraph_edge *e;
2909 bool res;
2910
2911 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2912
2913 for (e = node->callees; e; e = e->next_callee)
2914 if (!e->inline_failed)
2915 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2916 else
2917 update_jump_functions_after_inlining (cs, e);
2918 for (e = node->indirect_calls; e; e = e->next_callee)
2919 update_jump_functions_after_inlining (cs, e);
2920
2921 return res;
2922 }
2923
2924 /* Combine two controlled uses counts as done during inlining. */
2925
2926 static int
2927 combine_controlled_uses_counters (int c, int d)
2928 {
2929 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2930 return IPA_UNDESCRIBED_USE;
2931 else
2932 return c + d - 1;
2933 }
2934
2935 /* Propagate number of controlled users from CS->caleee to the new root of the
2936 tree of inlined nodes. */
2937
2938 static void
2939 propagate_controlled_uses (struct cgraph_edge *cs)
2940 {
2941 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2942 struct cgraph_node *new_root = cs->caller->global.inlined_to
2943 ? cs->caller->global.inlined_to : cs->caller;
2944 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2945 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2946 int count, i;
2947
2948 count = MIN (ipa_get_cs_argument_count (args),
2949 ipa_get_param_count (old_root_info));
2950 for (i = 0; i < count; i++)
2951 {
2952 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2953 struct ipa_cst_ref_desc *rdesc;
2954
2955 if (jf->type == IPA_JF_PASS_THROUGH)
2956 {
2957 int src_idx, c, d;
2958 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2959 c = ipa_get_controlled_uses (new_root_info, src_idx);
2960 d = ipa_get_controlled_uses (old_root_info, i);
2961
2962 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2963 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2964 c = combine_controlled_uses_counters (c, d);
2965 ipa_set_controlled_uses (new_root_info, src_idx, c);
2966 if (c == 0 && new_root_info->ipcp_orig_node)
2967 {
2968 struct cgraph_node *n;
2969 struct ipa_ref *ref;
2970 tree t = new_root_info->known_vals[src_idx];
2971
2972 if (t && TREE_CODE (t) == ADDR_EXPR
2973 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2974 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2975 && (ref = ipa_find_reference (new_root,
2976 n, NULL, 0)))
2977 {
2978 if (dump_file)
2979 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2980 "reference from %s/%i to %s/%i.\n",
2981 xstrdup (new_root->name ()),
2982 new_root->order,
2983 xstrdup (n->name ()), n->order);
2984 ipa_remove_reference (ref);
2985 }
2986 }
2987 }
2988 else if (jf->type == IPA_JF_CONST
2989 && (rdesc = jfunc_rdesc_usable (jf)))
2990 {
2991 int d = ipa_get_controlled_uses (old_root_info, i);
2992 int c = rdesc->refcount;
2993 rdesc->refcount = combine_controlled_uses_counters (c, d);
2994 if (rdesc->refcount == 0)
2995 {
2996 tree cst = ipa_get_jf_constant (jf);
2997 struct cgraph_node *n;
2998 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2999 && TREE_CODE (TREE_OPERAND (cst, 0))
3000 == FUNCTION_DECL);
3001 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3002 if (n)
3003 {
3004 struct cgraph_node *clone;
3005 bool ok;
3006 ok = remove_described_reference (n, rdesc);
3007 gcc_checking_assert (ok);
3008
3009 clone = cs->caller;
3010 while (clone->global.inlined_to
3011 && clone != rdesc->cs->caller
3012 && IPA_NODE_REF (clone)->ipcp_orig_node)
3013 {
3014 struct ipa_ref *ref;
3015 ref = ipa_find_reference (clone,
3016 n, NULL, 0);
3017 if (ref)
3018 {
3019 if (dump_file)
3020 fprintf (dump_file, "ipa-prop: Removing "
3021 "cloning-created reference "
3022 "from %s/%i to %s/%i.\n",
3023 xstrdup (clone->name ()),
3024 clone->order,
3025 xstrdup (n->name ()),
3026 n->order);
3027 ipa_remove_reference (ref);
3028 }
3029 clone = clone->callers->caller;
3030 }
3031 }
3032 }
3033 }
3034 }
3035
3036 for (i = ipa_get_param_count (old_root_info);
3037 i < ipa_get_cs_argument_count (args);
3038 i++)
3039 {
3040 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3041
3042 if (jf->type == IPA_JF_CONST)
3043 {
3044 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3045 if (rdesc)
3046 rdesc->refcount = IPA_UNDESCRIBED_USE;
3047 }
3048 else if (jf->type == IPA_JF_PASS_THROUGH)
3049 ipa_set_controlled_uses (new_root_info,
3050 jf->value.pass_through.formal_id,
3051 IPA_UNDESCRIBED_USE);
3052 }
3053 }
3054
3055 /* Update jump functions and call note functions on inlining the call site CS.
3056 CS is expected to lead to a node already cloned by
3057 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3058 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3059 created. */
3060
3061 bool
3062 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3063 vec<cgraph_edge_p> *new_edges)
3064 {
3065 bool changed;
3066 /* Do nothing if the preparation phase has not been carried out yet
3067 (i.e. during early inlining). */
3068 if (!ipa_node_params_vector.exists ())
3069 return false;
3070 gcc_assert (ipa_edge_args_vector);
3071
3072 propagate_controlled_uses (cs);
3073 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3074
3075 return changed;
3076 }
3077
3078 /* Frees all dynamically allocated structures that the argument info points
3079 to. */
3080
3081 void
3082 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3083 {
3084 vec_free (args->jump_functions);
3085 memset (args, 0, sizeof (*args));
3086 }
3087
3088 /* Free all ipa_edge structures. */
3089
3090 void
3091 ipa_free_all_edge_args (void)
3092 {
3093 int i;
3094 struct ipa_edge_args *args;
3095
3096 if (!ipa_edge_args_vector)
3097 return;
3098
3099 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3100 ipa_free_edge_args_substructures (args);
3101
3102 vec_free (ipa_edge_args_vector);
3103 }
3104
3105 /* Frees all dynamically allocated structures that the param info points
3106 to. */
3107
3108 void
3109 ipa_free_node_params_substructures (struct ipa_node_params *info)
3110 {
3111 info->descriptors.release ();
3112 free (info->lattices);
3113 /* Lattice values and their sources are deallocated with their alocation
3114 pool. */
3115 info->known_vals.release ();
3116 memset (info, 0, sizeof (*info));
3117 }
3118
3119 /* Free all ipa_node_params structures. */
3120
3121 void
3122 ipa_free_all_node_params (void)
3123 {
3124 int i;
3125 struct ipa_node_params *info;
3126
3127 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3128 ipa_free_node_params_substructures (info);
3129
3130 ipa_node_params_vector.release ();
3131 }
3132
3133 /* Set the aggregate replacements of NODE to be AGGVALS. */
3134
3135 void
3136 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3137 struct ipa_agg_replacement_value *aggvals)
3138 {
3139 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3140 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3141
3142 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3143 }
3144
3145 /* Hook that is called by cgraph.c when an edge is removed. */
3146
3147 static void
3148 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3149 {
3150 struct ipa_edge_args *args;
3151
3152 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3153 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3154 return;
3155
3156 args = IPA_EDGE_REF (cs);
3157 if (args->jump_functions)
3158 {
3159 struct ipa_jump_func *jf;
3160 int i;
3161 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3162 {
3163 struct ipa_cst_ref_desc *rdesc;
3164 try_decrement_rdesc_refcount (jf);
3165 if (jf->type == IPA_JF_CONST
3166 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3167 && rdesc->cs == cs)
3168 rdesc->cs = NULL;
3169 }
3170 }
3171
3172 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3173 }
3174
3175 /* Hook that is called by cgraph.c when a node is removed. */
3176
3177 static void
3178 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3179 {
3180 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3181 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3182 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3183 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3184 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3185 }
3186
3187 /* Hook that is called by cgraph.c when an edge is duplicated. */
3188
3189 static void
3190 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3191 __attribute__((unused)) void *data)
3192 {
3193 struct ipa_edge_args *old_args, *new_args;
3194 unsigned int i;
3195
3196 ipa_check_create_edge_args ();
3197
3198 old_args = IPA_EDGE_REF (src);
3199 new_args = IPA_EDGE_REF (dst);
3200
3201 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3202
3203 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3204 {
3205 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3206 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3207
3208 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3209
3210 if (src_jf->type == IPA_JF_CONST)
3211 {
3212 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3213
3214 if (!src_rdesc)
3215 dst_jf->value.constant.rdesc = NULL;
3216 else if (src->caller == dst->caller)
3217 {
3218 struct ipa_ref *ref;
3219 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3220 gcc_checking_assert (n);
3221 ref = ipa_find_reference (src->caller, n,
3222 src->call_stmt, src->lto_stmt_uid);
3223 gcc_checking_assert (ref);
3224 ipa_clone_ref (ref, dst->caller, ref->stmt);
3225
3226 gcc_checking_assert (ipa_refdesc_pool);
3227 struct ipa_cst_ref_desc *dst_rdesc
3228 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3229 dst_rdesc->cs = dst;
3230 dst_rdesc->refcount = src_rdesc->refcount;
3231 dst_rdesc->next_duplicate = NULL;
3232 dst_jf->value.constant.rdesc = dst_rdesc;
3233 }
3234 else if (src_rdesc->cs == src)
3235 {
3236 struct ipa_cst_ref_desc *dst_rdesc;
3237 gcc_checking_assert (ipa_refdesc_pool);
3238 dst_rdesc
3239 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3240 dst_rdesc->cs = dst;
3241 dst_rdesc->refcount = src_rdesc->refcount;
3242 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3243 src_rdesc->next_duplicate = dst_rdesc;
3244 dst_jf->value.constant.rdesc = dst_rdesc;
3245 }
3246 else
3247 {
3248 struct ipa_cst_ref_desc *dst_rdesc;
3249 /* This can happen during inlining, when a JFUNC can refer to a
3250 reference taken in a function up in the tree of inline clones.
3251 We need to find the duplicate that refers to our tree of
3252 inline clones. */
3253
3254 gcc_assert (dst->caller->global.inlined_to);
3255 for (dst_rdesc = src_rdesc->next_duplicate;
3256 dst_rdesc;
3257 dst_rdesc = dst_rdesc->next_duplicate)
3258 {
3259 struct cgraph_node *top;
3260 top = dst_rdesc->cs->caller->global.inlined_to
3261 ? dst_rdesc->cs->caller->global.inlined_to
3262 : dst_rdesc->cs->caller;
3263 if (dst->caller->global.inlined_to == top)
3264 break;
3265 }
3266 gcc_assert (dst_rdesc);
3267 dst_jf->value.constant.rdesc = dst_rdesc;
3268 }
3269 }
3270 }
3271 }
3272
3273 /* Hook that is called by cgraph.c when a node is duplicated. */
3274
3275 static void
3276 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3277 ATTRIBUTE_UNUSED void *data)
3278 {
3279 struct ipa_node_params *old_info, *new_info;
3280 struct ipa_agg_replacement_value *old_av, *new_av;
3281
3282 ipa_check_create_node_params ();
3283 old_info = IPA_NODE_REF (src);
3284 new_info = IPA_NODE_REF (dst);
3285
3286 new_info->descriptors = old_info->descriptors.copy ();
3287 new_info->lattices = NULL;
3288 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3289
3290 new_info->uses_analysis_done = old_info->uses_analysis_done;
3291 new_info->node_enqueued = old_info->node_enqueued;
3292
3293 old_av = ipa_get_agg_replacements_for_node (src);
3294 if (!old_av)
3295 return;
3296
3297 new_av = NULL;
3298 while (old_av)
3299 {
3300 struct ipa_agg_replacement_value *v;
3301
3302 v = ggc_alloc_ipa_agg_replacement_value ();
3303 memcpy (v, old_av, sizeof (*v));
3304 v->next = new_av;
3305 new_av = v;
3306 old_av = old_av->next;
3307 }
3308 ipa_set_node_agg_value_chain (dst, new_av);
3309 }
3310
3311
3312 /* Analyze newly added function into callgraph. */
3313
3314 static void
3315 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3316 {
3317 if (cgraph_function_with_gimple_body_p (node))
3318 ipa_analyze_node (node);
3319 }
3320
3321 /* Register our cgraph hooks if they are not already there. */
3322
3323 void
3324 ipa_register_cgraph_hooks (void)
3325 {
3326 if (!edge_removal_hook_holder)
3327 edge_removal_hook_holder =
3328 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3329 if (!node_removal_hook_holder)
3330 node_removal_hook_holder =
3331 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3332 if (!edge_duplication_hook_holder)
3333 edge_duplication_hook_holder =
3334 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3335 if (!node_duplication_hook_holder)
3336 node_duplication_hook_holder =
3337 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3338 function_insertion_hook_holder =
3339 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3340 }
3341
3342 /* Unregister our cgraph hooks if they are not already there. */
3343
3344 static void
3345 ipa_unregister_cgraph_hooks (void)
3346 {
3347 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3348 edge_removal_hook_holder = NULL;
3349 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3350 node_removal_hook_holder = NULL;
3351 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3352 edge_duplication_hook_holder = NULL;
3353 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3354 node_duplication_hook_holder = NULL;
3355 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3356 function_insertion_hook_holder = NULL;
3357 }
3358
3359 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3360 longer needed after ipa-cp. */
3361
3362 void
3363 ipa_free_all_structures_after_ipa_cp (void)
3364 {
3365 if (!optimize)
3366 {
3367 ipa_free_all_edge_args ();
3368 ipa_free_all_node_params ();
3369 free_alloc_pool (ipcp_sources_pool);
3370 free_alloc_pool (ipcp_values_pool);
3371 free_alloc_pool (ipcp_agg_lattice_pool);
3372 ipa_unregister_cgraph_hooks ();
3373 if (ipa_refdesc_pool)
3374 free_alloc_pool (ipa_refdesc_pool);
3375 }
3376 }
3377
3378 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3379 longer needed after indirect inlining. */
3380
3381 void
3382 ipa_free_all_structures_after_iinln (void)
3383 {
3384 ipa_free_all_edge_args ();
3385 ipa_free_all_node_params ();
3386 ipa_unregister_cgraph_hooks ();
3387 if (ipcp_sources_pool)
3388 free_alloc_pool (ipcp_sources_pool);
3389 if (ipcp_values_pool)
3390 free_alloc_pool (ipcp_values_pool);
3391 if (ipcp_agg_lattice_pool)
3392 free_alloc_pool (ipcp_agg_lattice_pool);
3393 if (ipa_refdesc_pool)
3394 free_alloc_pool (ipa_refdesc_pool);
3395 }
3396
3397 /* Print ipa_tree_map data structures of all functions in the
3398 callgraph to F. */
3399
3400 void
3401 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3402 {
3403 int i, count;
3404 struct ipa_node_params *info;
3405
3406 if (!node->definition)
3407 return;
3408 info = IPA_NODE_REF (node);
3409 fprintf (f, " function %s/%i parameter descriptors:\n",
3410 node->name (), node->order);
3411 count = ipa_get_param_count (info);
3412 for (i = 0; i < count; i++)
3413 {
3414 int c;
3415
3416 fprintf (f, " ");
3417 ipa_dump_param (f, info, i);
3418 if (ipa_is_param_used (info, i))
3419 fprintf (f, " used");
3420 c = ipa_get_controlled_uses (info, i);
3421 if (c == IPA_UNDESCRIBED_USE)
3422 fprintf (f, " undescribed_use");
3423 else
3424 fprintf (f, " controlled_uses=%i", c);
3425 fprintf (f, "\n");
3426 }
3427 }
3428
3429 /* Print ipa_tree_map data structures of all functions in the
3430 callgraph to F. */
3431
3432 void
3433 ipa_print_all_params (FILE * f)
3434 {
3435 struct cgraph_node *node;
3436
3437 fprintf (f, "\nFunction parameters:\n");
3438 FOR_EACH_FUNCTION (node)
3439 ipa_print_node_params (f, node);
3440 }
3441
3442 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3443
3444 vec<tree>
3445 ipa_get_vector_of_formal_parms (tree fndecl)
3446 {
3447 vec<tree> args;
3448 int count;
3449 tree parm;
3450
3451 gcc_assert (!flag_wpa);
3452 count = count_formal_params (fndecl);
3453 args.create (count);
3454 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3455 args.quick_push (parm);
3456
3457 return args;
3458 }
3459
3460 /* Return a heap allocated vector containing types of formal parameters of
3461 function type FNTYPE. */
3462
3463 vec<tree>
3464 ipa_get_vector_of_formal_parm_types (tree fntype)
3465 {
3466 vec<tree> types;
3467 int count = 0;
3468 tree t;
3469
3470 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3471 count++;
3472
3473 types.create (count);
3474 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3475 types.quick_push (TREE_VALUE (t));
3476
3477 return types;
3478 }
3479
3480 /* Modify the function declaration FNDECL and its type according to the plan in
3481 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3482 to reflect the actual parameters being modified which are determined by the
3483 base_index field. */
3484
3485 void
3486 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3487 {
3488 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3489 tree orig_type = TREE_TYPE (fndecl);
3490 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3491
3492 /* The following test is an ugly hack, some functions simply don't have any
3493 arguments in their type. This is probably a bug but well... */
3494 bool care_for_types = (old_arg_types != NULL_TREE);
3495 bool last_parm_void;
3496 vec<tree> otypes;
3497 if (care_for_types)
3498 {
3499 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3500 == void_type_node);
3501 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3502 if (last_parm_void)
3503 gcc_assert (oparms.length () + 1 == otypes.length ());
3504 else
3505 gcc_assert (oparms.length () == otypes.length ());
3506 }
3507 else
3508 {
3509 last_parm_void = false;
3510 otypes.create (0);
3511 }
3512
3513 int len = adjustments.length ();
3514 tree *link = &DECL_ARGUMENTS (fndecl);
3515 tree new_arg_types = NULL;
3516 for (int i = 0; i < len; i++)
3517 {
3518 struct ipa_parm_adjustment *adj;
3519 gcc_assert (link);
3520
3521 adj = &adjustments[i];
3522 tree parm;
3523 if (adj->op == IPA_PARM_OP_NEW)
3524 parm = NULL;
3525 else
3526 parm = oparms[adj->base_index];
3527 adj->base = parm;
3528
3529 if (adj->op == IPA_PARM_OP_COPY)
3530 {
3531 if (care_for_types)
3532 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3533 new_arg_types);
3534 *link = parm;
3535 link = &DECL_CHAIN (parm);
3536 }
3537 else if (adj->op != IPA_PARM_OP_REMOVE)
3538 {
3539 tree new_parm;
3540 tree ptype;
3541
3542 if (adj->by_ref)
3543 ptype = build_pointer_type (adj->type);
3544 else
3545 {
3546 ptype = adj->type;
3547 if (is_gimple_reg_type (ptype))
3548 {
3549 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3550 if (TYPE_ALIGN (ptype) < malign)
3551 ptype = build_aligned_type (ptype, malign);
3552 }
3553 }
3554
3555 if (care_for_types)
3556 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3557
3558 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3559 ptype);
3560 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3561 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3562 DECL_ARTIFICIAL (new_parm) = 1;
3563 DECL_ARG_TYPE (new_parm) = ptype;
3564 DECL_CONTEXT (new_parm) = fndecl;
3565 TREE_USED (new_parm) = 1;
3566 DECL_IGNORED_P (new_parm) = 1;
3567 layout_decl (new_parm, 0);
3568
3569 if (adj->op == IPA_PARM_OP_NEW)
3570 adj->base = NULL;
3571 else
3572 adj->base = parm;
3573 adj->new_decl = new_parm;
3574
3575 *link = new_parm;
3576 link = &DECL_CHAIN (new_parm);
3577 }
3578 }
3579
3580 *link = NULL_TREE;
3581
3582 tree new_reversed = NULL;
3583 if (care_for_types)
3584 {
3585 new_reversed = nreverse (new_arg_types);
3586 if (last_parm_void)
3587 {
3588 if (new_reversed)
3589 TREE_CHAIN (new_arg_types) = void_list_node;
3590 else
3591 new_reversed = void_list_node;
3592 }
3593 }
3594
3595 /* Use copy_node to preserve as much as possible from original type
3596 (debug info, attribute lists etc.)
3597 Exception is METHOD_TYPEs must have THIS argument.
3598 When we are asked to remove it, we need to build new FUNCTION_TYPE
3599 instead. */
3600 tree new_type = NULL;
3601 if (TREE_CODE (orig_type) != METHOD_TYPE
3602 || (adjustments[0].op == IPA_PARM_OP_COPY
3603 && adjustments[0].base_index == 0))
3604 {
3605 new_type = build_distinct_type_copy (orig_type);
3606 TYPE_ARG_TYPES (new_type) = new_reversed;
3607 }
3608 else
3609 {
3610 new_type
3611 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3612 new_reversed));
3613 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3614 DECL_VINDEX (fndecl) = NULL_TREE;
3615 }
3616
3617 /* When signature changes, we need to clear builtin info. */
3618 if (DECL_BUILT_IN (fndecl))
3619 {
3620 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3621 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3622 }
3623
3624 /* This is a new type, not a copy of an old type. Need to reassociate
3625 variants. We can handle everything except the main variant lazily. */
3626 tree t = TYPE_MAIN_VARIANT (orig_type);
3627 if (orig_type != t)
3628 {
3629 TYPE_MAIN_VARIANT (new_type) = t;
3630 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3631 TYPE_NEXT_VARIANT (t) = new_type;
3632 }
3633 else
3634 {
3635 TYPE_MAIN_VARIANT (new_type) = new_type;
3636 TYPE_NEXT_VARIANT (new_type) = NULL;
3637 }
3638
3639 TREE_TYPE (fndecl) = new_type;
3640 DECL_VIRTUAL_P (fndecl) = 0;
3641 otypes.release ();
3642 oparms.release ();
3643 }
3644
3645 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3646 If this is a directly recursive call, CS must be NULL. Otherwise it must
3647 contain the corresponding call graph edge. */
3648
3649 void
3650 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3651 ipa_parm_adjustment_vec adjustments)
3652 {
3653 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3654 vec<tree> vargs;
3655 vec<tree, va_gc> **debug_args = NULL;
3656 gimple new_stmt;
3657 gimple_stmt_iterator gsi, prev_gsi;
3658 tree callee_decl;
3659 int i, len;
3660
3661 len = adjustments.length ();
3662 vargs.create (len);
3663 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3664 ipa_remove_stmt_references (current_node, stmt);
3665
3666 gsi = gsi_for_stmt (stmt);
3667 prev_gsi = gsi;
3668 gsi_prev (&prev_gsi);
3669 for (i = 0; i < len; i++)
3670 {
3671 struct ipa_parm_adjustment *adj;
3672
3673 adj = &adjustments[i];
3674
3675 if (adj->op == IPA_PARM_OP_COPY)
3676 {
3677 tree arg = gimple_call_arg (stmt, adj->base_index);
3678
3679 vargs.quick_push (arg);
3680 }
3681 else if (adj->op != IPA_PARM_OP_REMOVE)
3682 {
3683 tree expr, base, off;
3684 location_t loc;
3685 unsigned int deref_align = 0;
3686 bool deref_base = false;
3687
3688 /* We create a new parameter out of the value of the old one, we can
3689 do the following kind of transformations:
3690
3691 - A scalar passed by reference is converted to a scalar passed by
3692 value. (adj->by_ref is false and the type of the original
3693 actual argument is a pointer to a scalar).
3694
3695 - A part of an aggregate is passed instead of the whole aggregate.
3696 The part can be passed either by value or by reference, this is
3697 determined by value of adj->by_ref. Moreover, the code below
3698 handles both situations when the original aggregate is passed by
3699 value (its type is not a pointer) and when it is passed by
3700 reference (it is a pointer to an aggregate).
3701
3702 When the new argument is passed by reference (adj->by_ref is true)
3703 it must be a part of an aggregate and therefore we form it by
3704 simply taking the address of a reference inside the original
3705 aggregate. */
3706
3707 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3708 base = gimple_call_arg (stmt, adj->base_index);
3709 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3710 : EXPR_LOCATION (base);
3711
3712 if (TREE_CODE (base) != ADDR_EXPR
3713 && POINTER_TYPE_P (TREE_TYPE (base)))
3714 off = build_int_cst (adj->alias_ptr_type,
3715 adj->offset / BITS_PER_UNIT);
3716 else
3717 {
3718 HOST_WIDE_INT base_offset;
3719 tree prev_base;
3720 bool addrof;
3721
3722 if (TREE_CODE (base) == ADDR_EXPR)
3723 {
3724 base = TREE_OPERAND (base, 0);
3725 addrof = true;
3726 }
3727 else
3728 addrof = false;
3729 prev_base = base;
3730 base = get_addr_base_and_unit_offset (base, &base_offset);
3731 /* Aggregate arguments can have non-invariant addresses. */
3732 if (!base)
3733 {
3734 base = build_fold_addr_expr (prev_base);
3735 off = build_int_cst (adj->alias_ptr_type,
3736 adj->offset / BITS_PER_UNIT);
3737 }
3738 else if (TREE_CODE (base) == MEM_REF)
3739 {
3740 if (!addrof)
3741 {
3742 deref_base = true;
3743 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3744 }
3745 off = build_int_cst (adj->alias_ptr_type,
3746 base_offset
3747 + adj->offset / BITS_PER_UNIT);
3748 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3749 off);
3750 base = TREE_OPERAND (base, 0);
3751 }
3752 else
3753 {
3754 off = build_int_cst (adj->alias_ptr_type,
3755 base_offset
3756 + adj->offset / BITS_PER_UNIT);
3757 base = build_fold_addr_expr (base);
3758 }
3759 }
3760
3761 if (!adj->by_ref)
3762 {
3763 tree type = adj->type;
3764 unsigned int align;
3765 unsigned HOST_WIDE_INT misalign;
3766
3767 if (deref_base)
3768 {
3769 align = deref_align;
3770 misalign = 0;
3771 }
3772 else
3773 {
3774 get_pointer_alignment_1 (base, &align, &misalign);
3775 if (TYPE_ALIGN (type) > align)
3776 align = TYPE_ALIGN (type);
3777 }
3778 misalign += (tree_to_double_int (off)
3779 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3780 * BITS_PER_UNIT);
3781 misalign = misalign & (align - 1);
3782 if (misalign != 0)
3783 align = (misalign & -misalign);
3784 if (align < TYPE_ALIGN (type))
3785 type = build_aligned_type (type, align);
3786 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3787 }
3788 else
3789 {
3790 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3791 expr = build_fold_addr_expr (expr);
3792 }
3793
3794 expr = force_gimple_operand_gsi (&gsi, expr,
3795 adj->by_ref
3796 || is_gimple_reg_type (adj->type),
3797 NULL, true, GSI_SAME_STMT);
3798 vargs.quick_push (expr);
3799 }
3800 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3801 {
3802 unsigned int ix;
3803 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3804 gimple def_temp;
3805
3806 arg = gimple_call_arg (stmt, adj->base_index);
3807 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3808 {
3809 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3810 continue;
3811 arg = fold_convert_loc (gimple_location (stmt),
3812 TREE_TYPE (origin), arg);
3813 }
3814 if (debug_args == NULL)
3815 debug_args = decl_debug_args_insert (callee_decl);
3816 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3817 if (ddecl == origin)
3818 {
3819 ddecl = (**debug_args)[ix + 1];
3820 break;
3821 }
3822 if (ddecl == NULL)
3823 {
3824 ddecl = make_node (DEBUG_EXPR_DECL);
3825 DECL_ARTIFICIAL (ddecl) = 1;
3826 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3827 DECL_MODE (ddecl) = DECL_MODE (origin);
3828
3829 vec_safe_push (*debug_args, origin);
3830 vec_safe_push (*debug_args, ddecl);
3831 }
3832 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3833 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3834 }
3835 }
3836
3837 if (dump_file && (dump_flags & TDF_DETAILS))
3838 {
3839 fprintf (dump_file, "replacing stmt:");
3840 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3841 }
3842
3843 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3844 vargs.release ();
3845 if (gimple_call_lhs (stmt))
3846 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3847
3848 gimple_set_block (new_stmt, gimple_block (stmt));
3849 if (gimple_has_location (stmt))
3850 gimple_set_location (new_stmt, gimple_location (stmt));
3851 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3852 gimple_call_copy_flags (new_stmt, stmt);
3853
3854 if (dump_file && (dump_flags & TDF_DETAILS))
3855 {
3856 fprintf (dump_file, "with stmt:");
3857 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3858 fprintf (dump_file, "\n");
3859 }
3860 gsi_replace (&gsi, new_stmt, true);
3861 if (cs)
3862 cgraph_set_call_stmt (cs, new_stmt);
3863 do
3864 {
3865 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3866 gsi_prev (&gsi);
3867 }
3868 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3869 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3870
3871 update_ssa (TODO_update_ssa);
3872 free_dominance_info (CDI_DOMINATORS);
3873 }
3874
3875 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3876 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3877 specifies whether the function should care about type incompatibility the
3878 current and new expressions. If it is false, the function will leave
3879 incompatibility issues to the caller. Return true iff the expression
3880 was modified. */
3881
3882 bool
3883 ipa_modify_expr (tree *expr, bool convert,
3884 ipa_parm_adjustment_vec adjustments)
3885 {
3886 struct ipa_parm_adjustment *cand
3887 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3888 if (!cand)
3889 return false;
3890
3891 tree src;
3892 if (cand->by_ref)
3893 src = build_simple_mem_ref (cand->new_decl);
3894 else
3895 src = cand->new_decl;
3896
3897 if (dump_file && (dump_flags & TDF_DETAILS))
3898 {
3899 fprintf (dump_file, "About to replace expr ");
3900 print_generic_expr (dump_file, *expr, 0);
3901 fprintf (dump_file, " with ");
3902 print_generic_expr (dump_file, src, 0);
3903 fprintf (dump_file, "\n");
3904 }
3905
3906 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3907 {
3908 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3909 *expr = vce;
3910 }
3911 else
3912 *expr = src;
3913 return true;
3914 }
3915
3916 /* If T is an SSA_NAME, return NULL if it is not a default def or
3917 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3918 the base variable is always returned, regardless if it is a default
3919 def. Return T if it is not an SSA_NAME. */
3920
3921 static tree
3922 get_ssa_base_param (tree t, bool ignore_default_def)
3923 {
3924 if (TREE_CODE (t) == SSA_NAME)
3925 {
3926 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3927 return SSA_NAME_VAR (t);
3928 else
3929 return NULL_TREE;
3930 }
3931 return t;
3932 }
3933
3934 /* Given an expression, return an adjustment entry specifying the
3935 transformation to be done on EXPR. If no suitable adjustment entry
3936 was found, returns NULL.
3937
3938 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3939 default def, otherwise bail on them.
3940
3941 If CONVERT is non-NULL, this function will set *CONVERT if the
3942 expression provided is a component reference. ADJUSTMENTS is the
3943 adjustments vector. */
3944
3945 ipa_parm_adjustment *
3946 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3947 ipa_parm_adjustment_vec adjustments,
3948 bool ignore_default_def)
3949 {
3950 if (TREE_CODE (**expr) == BIT_FIELD_REF
3951 || TREE_CODE (**expr) == IMAGPART_EXPR
3952 || TREE_CODE (**expr) == REALPART_EXPR)
3953 {
3954 *expr = &TREE_OPERAND (**expr, 0);
3955 if (convert)
3956 *convert = true;
3957 }
3958
3959 HOST_WIDE_INT offset, size, max_size;
3960 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3961 if (!base || size == -1 || max_size == -1)
3962 return NULL;
3963
3964 if (TREE_CODE (base) == MEM_REF)
3965 {
3966 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3967 base = TREE_OPERAND (base, 0);
3968 }
3969
3970 base = get_ssa_base_param (base, ignore_default_def);
3971 if (!base || TREE_CODE (base) != PARM_DECL)
3972 return NULL;
3973
3974 struct ipa_parm_adjustment *cand = NULL;
3975 unsigned int len = adjustments.length ();
3976 for (unsigned i = 0; i < len; i++)
3977 {
3978 struct ipa_parm_adjustment *adj = &adjustments[i];
3979
3980 if (adj->base == base
3981 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3982 {
3983 cand = adj;
3984 break;
3985 }
3986 }
3987
3988 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3989 return NULL;
3990 return cand;
3991 }
3992
3993 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3994
3995 static bool
3996 index_in_adjustments_multiple_times_p (int base_index,
3997 ipa_parm_adjustment_vec adjustments)
3998 {
3999 int i, len = adjustments.length ();
4000 bool one = false;
4001
4002 for (i = 0; i < len; i++)
4003 {
4004 struct ipa_parm_adjustment *adj;
4005 adj = &adjustments[i];
4006
4007 if (adj->base_index == base_index)
4008 {
4009 if (one)
4010 return true;
4011 else
4012 one = true;
4013 }
4014 }
4015 return false;
4016 }
4017
4018
4019 /* Return adjustments that should have the same effect on function parameters
4020 and call arguments as if they were first changed according to adjustments in
4021 INNER and then by adjustments in OUTER. */
4022
4023 ipa_parm_adjustment_vec
4024 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4025 ipa_parm_adjustment_vec outer)
4026 {
4027 int i, outlen = outer.length ();
4028 int inlen = inner.length ();
4029 int removals = 0;
4030 ipa_parm_adjustment_vec adjustments, tmp;
4031
4032 tmp.create (inlen);
4033 for (i = 0; i < inlen; i++)
4034 {
4035 struct ipa_parm_adjustment *n;
4036 n = &inner[i];
4037
4038 if (n->op == IPA_PARM_OP_REMOVE)
4039 removals++;
4040 else
4041 {
4042 /* FIXME: Handling of new arguments are not implemented yet. */
4043 gcc_assert (n->op != IPA_PARM_OP_NEW);
4044 tmp.quick_push (*n);
4045 }
4046 }
4047
4048 adjustments.create (outlen + removals);
4049 for (i = 0; i < outlen; i++)
4050 {
4051 struct ipa_parm_adjustment r;
4052 struct ipa_parm_adjustment *out = &outer[i];
4053 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4054
4055 memset (&r, 0, sizeof (r));
4056 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4057 if (out->op == IPA_PARM_OP_REMOVE)
4058 {
4059 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4060 {
4061 r.op = IPA_PARM_OP_REMOVE;
4062 adjustments.quick_push (r);
4063 }
4064 continue;
4065 }
4066 else
4067 {
4068 /* FIXME: Handling of new arguments are not implemented yet. */
4069 gcc_assert (out->op != IPA_PARM_OP_NEW);
4070 }
4071
4072 r.base_index = in->base_index;
4073 r.type = out->type;
4074
4075 /* FIXME: Create nonlocal value too. */
4076
4077 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4078 r.op = IPA_PARM_OP_COPY;
4079 else if (in->op == IPA_PARM_OP_COPY)
4080 r.offset = out->offset;
4081 else if (out->op == IPA_PARM_OP_COPY)
4082 r.offset = in->offset;
4083 else
4084 r.offset = in->offset + out->offset;
4085 adjustments.quick_push (r);
4086 }
4087
4088 for (i = 0; i < inlen; i++)
4089 {
4090 struct ipa_parm_adjustment *n = &inner[i];
4091
4092 if (n->op == IPA_PARM_OP_REMOVE)
4093 adjustments.quick_push (*n);
4094 }
4095
4096 tmp.release ();
4097 return adjustments;
4098 }
4099
4100 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4101 friendly way, assuming they are meant to be applied to FNDECL. */
4102
4103 void
4104 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4105 tree fndecl)
4106 {
4107 int i, len = adjustments.length ();
4108 bool first = true;
4109 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4110
4111 fprintf (file, "IPA param adjustments: ");
4112 for (i = 0; i < len; i++)
4113 {
4114 struct ipa_parm_adjustment *adj;
4115 adj = &adjustments[i];
4116
4117 if (!first)
4118 fprintf (file, " ");
4119 else
4120 first = false;
4121
4122 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4123 print_generic_expr (file, parms[adj->base_index], 0);
4124 if (adj->base)
4125 {
4126 fprintf (file, ", base: ");
4127 print_generic_expr (file, adj->base, 0);
4128 }
4129 if (adj->new_decl)
4130 {
4131 fprintf (file, ", new_decl: ");
4132 print_generic_expr (file, adj->new_decl, 0);
4133 }
4134 if (adj->new_ssa_base)
4135 {
4136 fprintf (file, ", new_ssa_base: ");
4137 print_generic_expr (file, adj->new_ssa_base, 0);
4138 }
4139
4140 if (adj->op == IPA_PARM_OP_COPY)
4141 fprintf (file, ", copy_param");
4142 else if (adj->op == IPA_PARM_OP_REMOVE)
4143 fprintf (file, ", remove_param");
4144 else
4145 fprintf (file, ", offset %li", (long) adj->offset);
4146 if (adj->by_ref)
4147 fprintf (file, ", by_ref");
4148 print_node_brief (file, ", type: ", adj->type, 0);
4149 fprintf (file, "\n");
4150 }
4151 parms.release ();
4152 }
4153
4154 /* Dump the AV linked list. */
4155
4156 void
4157 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4158 {
4159 bool comma = false;
4160 fprintf (f, " Aggregate replacements:");
4161 for (; av; av = av->next)
4162 {
4163 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4164 av->index, av->offset);
4165 print_generic_expr (f, av->value, 0);
4166 comma = true;
4167 }
4168 fprintf (f, "\n");
4169 }
4170
4171 /* Stream out jump function JUMP_FUNC to OB. */
4172
4173 static void
4174 ipa_write_jump_function (struct output_block *ob,
4175 struct ipa_jump_func *jump_func)
4176 {
4177 struct ipa_agg_jf_item *item;
4178 struct bitpack_d bp;
4179 int i, count;
4180
4181 streamer_write_uhwi (ob, jump_func->type);
4182 switch (jump_func->type)
4183 {
4184 case IPA_JF_UNKNOWN:
4185 break;
4186 case IPA_JF_KNOWN_TYPE:
4187 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4188 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4189 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4190 break;
4191 case IPA_JF_CONST:
4192 gcc_assert (
4193 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4194 stream_write_tree (ob, jump_func->value.constant.value, true);
4195 break;
4196 case IPA_JF_PASS_THROUGH:
4197 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4198 if (jump_func->value.pass_through.operation == NOP_EXPR)
4199 {
4200 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4201 bp = bitpack_create (ob->main_stream);
4202 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4203 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4204 streamer_write_bitpack (&bp);
4205 }
4206 else
4207 {
4208 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4209 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4210 }
4211 break;
4212 case IPA_JF_ANCESTOR:
4213 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4214 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4215 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4216 bp = bitpack_create (ob->main_stream);
4217 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4218 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4219 streamer_write_bitpack (&bp);
4220 break;
4221 }
4222
4223 count = vec_safe_length (jump_func->agg.items);
4224 streamer_write_uhwi (ob, count);
4225 if (count)
4226 {
4227 bp = bitpack_create (ob->main_stream);
4228 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4229 streamer_write_bitpack (&bp);
4230 }
4231
4232 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4233 {
4234 streamer_write_uhwi (ob, item->offset);
4235 stream_write_tree (ob, item->value, true);
4236 }
4237 }
4238
4239 /* Read in jump function JUMP_FUNC from IB. */
4240
4241 static void
4242 ipa_read_jump_function (struct lto_input_block *ib,
4243 struct ipa_jump_func *jump_func,
4244 struct cgraph_edge *cs,
4245 struct data_in *data_in)
4246 {
4247 enum jump_func_type jftype;
4248 enum tree_code operation;
4249 int i, count;
4250
4251 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4252 switch (jftype)
4253 {
4254 case IPA_JF_UNKNOWN:
4255 jump_func->type = IPA_JF_UNKNOWN;
4256 break;
4257 case IPA_JF_KNOWN_TYPE:
4258 {
4259 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4260 tree base_type = stream_read_tree (ib, data_in);
4261 tree component_type = stream_read_tree (ib, data_in);
4262
4263 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4264 break;
4265 }
4266 case IPA_JF_CONST:
4267 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4268 break;
4269 case IPA_JF_PASS_THROUGH:
4270 operation = (enum tree_code) streamer_read_uhwi (ib);
4271 if (operation == NOP_EXPR)
4272 {
4273 int formal_id = streamer_read_uhwi (ib);
4274 struct bitpack_d bp = streamer_read_bitpack (ib);
4275 bool agg_preserved = bp_unpack_value (&bp, 1);
4276 bool type_preserved = bp_unpack_value (&bp, 1);
4277 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4278 type_preserved);
4279 }
4280 else
4281 {
4282 tree operand = stream_read_tree (ib, data_in);
4283 int formal_id = streamer_read_uhwi (ib);
4284 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4285 operation);
4286 }
4287 break;
4288 case IPA_JF_ANCESTOR:
4289 {
4290 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4291 tree type = stream_read_tree (ib, data_in);
4292 int formal_id = streamer_read_uhwi (ib);
4293 struct bitpack_d bp = streamer_read_bitpack (ib);
4294 bool agg_preserved = bp_unpack_value (&bp, 1);
4295 bool type_preserved = bp_unpack_value (&bp, 1);
4296
4297 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4298 type_preserved);
4299 break;
4300 }
4301 }
4302
4303 count = streamer_read_uhwi (ib);
4304 vec_alloc (jump_func->agg.items, count);
4305 if (count)
4306 {
4307 struct bitpack_d bp = streamer_read_bitpack (ib);
4308 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4309 }
4310 for (i = 0; i < count; i++)
4311 {
4312 struct ipa_agg_jf_item item;
4313 item.offset = streamer_read_uhwi (ib);
4314 item.value = stream_read_tree (ib, data_in);
4315 jump_func->agg.items->quick_push (item);
4316 }
4317 }
4318
4319 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4320 relevant to indirect inlining to OB. */
4321
4322 static void
4323 ipa_write_indirect_edge_info (struct output_block *ob,
4324 struct cgraph_edge *cs)
4325 {
4326 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4327 struct bitpack_d bp;
4328
4329 streamer_write_hwi (ob, ii->param_index);
4330 streamer_write_hwi (ob, ii->offset);
4331 bp = bitpack_create (ob->main_stream);
4332 bp_pack_value (&bp, ii->polymorphic, 1);
4333 bp_pack_value (&bp, ii->agg_contents, 1);
4334 bp_pack_value (&bp, ii->member_ptr, 1);
4335 bp_pack_value (&bp, ii->by_ref, 1);
4336 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4337 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4338 streamer_write_bitpack (&bp);
4339
4340 if (ii->polymorphic)
4341 {
4342 streamer_write_hwi (ob, ii->otr_token);
4343 stream_write_tree (ob, ii->otr_type, true);
4344 stream_write_tree (ob, ii->outer_type, true);
4345 }
4346 }
4347
4348 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4349 relevant to indirect inlining from IB. */
4350
4351 static void
4352 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4353 struct data_in *data_in ATTRIBUTE_UNUSED,
4354 struct cgraph_edge *cs)
4355 {
4356 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4357 struct bitpack_d bp;
4358
4359 ii->param_index = (int) streamer_read_hwi (ib);
4360 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4361 bp = streamer_read_bitpack (ib);
4362 ii->polymorphic = bp_unpack_value (&bp, 1);
4363 ii->agg_contents = bp_unpack_value (&bp, 1);
4364 ii->member_ptr = bp_unpack_value (&bp, 1);
4365 ii->by_ref = bp_unpack_value (&bp, 1);
4366 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4367 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4368 if (ii->polymorphic)
4369 {
4370 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4371 ii->otr_type = stream_read_tree (ib, data_in);
4372 ii->outer_type = stream_read_tree (ib, data_in);
4373 }
4374 }
4375
4376 /* Stream out NODE info to OB. */
4377
4378 static void
4379 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4380 {
4381 int node_ref;
4382 lto_symtab_encoder_t encoder;
4383 struct ipa_node_params *info = IPA_NODE_REF (node);
4384 int j;
4385 struct cgraph_edge *e;
4386 struct bitpack_d bp;
4387
4388 encoder = ob->decl_state->symtab_node_encoder;
4389 node_ref = lto_symtab_encoder_encode (encoder, node);
4390 streamer_write_uhwi (ob, node_ref);
4391
4392 streamer_write_uhwi (ob, ipa_get_param_count (info));
4393 for (j = 0; j < ipa_get_param_count (info); j++)
4394 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4395 bp = bitpack_create (ob->main_stream);
4396 gcc_assert (info->uses_analysis_done
4397 || ipa_get_param_count (info) == 0);
4398 gcc_assert (!info->node_enqueued);
4399 gcc_assert (!info->ipcp_orig_node);
4400 for (j = 0; j < ipa_get_param_count (info); j++)
4401 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4402 streamer_write_bitpack (&bp);
4403 for (j = 0; j < ipa_get_param_count (info); j++)
4404 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4405 for (e = node->callees; e; e = e->next_callee)
4406 {
4407 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4408
4409 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4410 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4411 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4412 }
4413 for (e = node->indirect_calls; e; e = e->next_callee)
4414 {
4415 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4416
4417 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4418 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4419 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4420 ipa_write_indirect_edge_info (ob, e);
4421 }
4422 }
4423
4424 /* Stream in NODE info from IB. */
4425
4426 static void
4427 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4428 struct data_in *data_in)
4429 {
4430 struct ipa_node_params *info = IPA_NODE_REF (node);
4431 int k;
4432 struct cgraph_edge *e;
4433 struct bitpack_d bp;
4434
4435 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4436
4437 for (k = 0; k < ipa_get_param_count (info); k++)
4438 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4439
4440 bp = streamer_read_bitpack (ib);
4441 if (ipa_get_param_count (info) != 0)
4442 info->uses_analysis_done = true;
4443 info->node_enqueued = false;
4444 for (k = 0; k < ipa_get_param_count (info); k++)
4445 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4446 for (k = 0; k < ipa_get_param_count (info); k++)
4447 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4448 for (e = node->callees; e; e = e->next_callee)
4449 {
4450 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4451 int count = streamer_read_uhwi (ib);
4452
4453 if (!count)
4454 continue;
4455 vec_safe_grow_cleared (args->jump_functions, count);
4456
4457 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4458 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4459 data_in);
4460 }
4461 for (e = node->indirect_calls; e; e = e->next_callee)
4462 {
4463 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4464 int count = streamer_read_uhwi (ib);
4465
4466 if (count)
4467 {
4468 vec_safe_grow_cleared (args->jump_functions, count);
4469 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4470 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4471 data_in);
4472 }
4473 ipa_read_indirect_edge_info (ib, data_in, e);
4474 }
4475 }
4476
4477 /* Write jump functions for nodes in SET. */
4478
4479 void
4480 ipa_prop_write_jump_functions (void)
4481 {
4482 struct cgraph_node *node;
4483 struct output_block *ob;
4484 unsigned int count = 0;
4485 lto_symtab_encoder_iterator lsei;
4486 lto_symtab_encoder_t encoder;
4487
4488
4489 if (!ipa_node_params_vector.exists ())
4490 return;
4491
4492 ob = create_output_block (LTO_section_jump_functions);
4493 encoder = ob->decl_state->symtab_node_encoder;
4494 ob->cgraph_node = NULL;
4495 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4496 lsei_next_function_in_partition (&lsei))
4497 {
4498 node = lsei_cgraph_node (lsei);
4499 if (cgraph_function_with_gimple_body_p (node)
4500 && IPA_NODE_REF (node) != NULL)
4501 count++;
4502 }
4503
4504 streamer_write_uhwi (ob, count);
4505
4506 /* Process all of the functions. */
4507 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4508 lsei_next_function_in_partition (&lsei))
4509 {
4510 node = lsei_cgraph_node (lsei);
4511 if (cgraph_function_with_gimple_body_p (node)
4512 && IPA_NODE_REF (node) != NULL)
4513 ipa_write_node_info (ob, node);
4514 }
4515 streamer_write_char_stream (ob->main_stream, 0);
4516 produce_asm (ob, NULL);
4517 destroy_output_block (ob);
4518 }
4519
4520 /* Read section in file FILE_DATA of length LEN with data DATA. */
4521
4522 static void
4523 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4524 size_t len)
4525 {
4526 const struct lto_function_header *header =
4527 (const struct lto_function_header *) data;
4528 const int cfg_offset = sizeof (struct lto_function_header);
4529 const int main_offset = cfg_offset + header->cfg_size;
4530 const int string_offset = main_offset + header->main_size;
4531 struct data_in *data_in;
4532 struct lto_input_block ib_main;
4533 unsigned int i;
4534 unsigned int count;
4535
4536 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4537 header->main_size);
4538
4539 data_in =
4540 lto_data_in_create (file_data, (const char *) data + string_offset,
4541 header->string_size, vNULL);
4542 count = streamer_read_uhwi (&ib_main);
4543
4544 for (i = 0; i < count; i++)
4545 {
4546 unsigned int index;
4547 struct cgraph_node *node;
4548 lto_symtab_encoder_t encoder;
4549
4550 index = streamer_read_uhwi (&ib_main);
4551 encoder = file_data->symtab_node_encoder;
4552 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4553 gcc_assert (node->definition);
4554 ipa_read_node_info (&ib_main, node, data_in);
4555 }
4556 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4557 len);
4558 lto_data_in_delete (data_in);
4559 }
4560
4561 /* Read ipcp jump functions. */
4562
4563 void
4564 ipa_prop_read_jump_functions (void)
4565 {
4566 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4567 struct lto_file_decl_data *file_data;
4568 unsigned int j = 0;
4569
4570 ipa_check_create_node_params ();
4571 ipa_check_create_edge_args ();
4572 ipa_register_cgraph_hooks ();
4573
4574 while ((file_data = file_data_vec[j++]))
4575 {
4576 size_t len;
4577 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4578
4579 if (data)
4580 ipa_prop_read_section (file_data, data, len);
4581 }
4582 }
4583
4584 /* After merging units, we can get mismatch in argument counts.
4585 Also decl merging might've rendered parameter lists obsolete.
4586 Also compute called_with_variable_arg info. */
4587
4588 void
4589 ipa_update_after_lto_read (void)
4590 {
4591 ipa_check_create_node_params ();
4592 ipa_check_create_edge_args ();
4593 }
4594
4595 void
4596 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4597 {
4598 int node_ref;
4599 unsigned int count = 0;
4600 lto_symtab_encoder_t encoder;
4601 struct ipa_agg_replacement_value *aggvals, *av;
4602
4603 aggvals = ipa_get_agg_replacements_for_node (node);
4604 encoder = ob->decl_state->symtab_node_encoder;
4605 node_ref = lto_symtab_encoder_encode (encoder, node);
4606 streamer_write_uhwi (ob, node_ref);
4607
4608 for (av = aggvals; av; av = av->next)
4609 count++;
4610 streamer_write_uhwi (ob, count);
4611
4612 for (av = aggvals; av; av = av->next)
4613 {
4614 struct bitpack_d bp;
4615
4616 streamer_write_uhwi (ob, av->offset);
4617 streamer_write_uhwi (ob, av->index);
4618 stream_write_tree (ob, av->value, true);
4619
4620 bp = bitpack_create (ob->main_stream);
4621 bp_pack_value (&bp, av->by_ref, 1);
4622 streamer_write_bitpack (&bp);
4623 }
4624 }
4625
4626 /* Stream in the aggregate value replacement chain for NODE from IB. */
4627
4628 static void
4629 read_agg_replacement_chain (struct lto_input_block *ib,
4630 struct cgraph_node *node,
4631 struct data_in *data_in)
4632 {
4633 struct ipa_agg_replacement_value *aggvals = NULL;
4634 unsigned int count, i;
4635
4636 count = streamer_read_uhwi (ib);
4637 for (i = 0; i <count; i++)
4638 {
4639 struct ipa_agg_replacement_value *av;
4640 struct bitpack_d bp;
4641
4642 av = ggc_alloc_ipa_agg_replacement_value ();
4643 av->offset = streamer_read_uhwi (ib);
4644 av->index = streamer_read_uhwi (ib);
4645 av->value = stream_read_tree (ib, data_in);
4646 bp = streamer_read_bitpack (ib);
4647 av->by_ref = bp_unpack_value (&bp, 1);
4648 av->next = aggvals;
4649 aggvals = av;
4650 }
4651 ipa_set_node_agg_value_chain (node, aggvals);
4652 }
4653
4654 /* Write all aggregate replacement for nodes in set. */
4655
4656 void
4657 ipa_prop_write_all_agg_replacement (void)
4658 {
4659 struct cgraph_node *node;
4660 struct output_block *ob;
4661 unsigned int count = 0;
4662 lto_symtab_encoder_iterator lsei;
4663 lto_symtab_encoder_t encoder;
4664
4665 if (!ipa_node_agg_replacements)
4666 return;
4667
4668 ob = create_output_block (LTO_section_ipcp_transform);
4669 encoder = ob->decl_state->symtab_node_encoder;
4670 ob->cgraph_node = NULL;
4671 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4672 lsei_next_function_in_partition (&lsei))
4673 {
4674 node = lsei_cgraph_node (lsei);
4675 if (cgraph_function_with_gimple_body_p (node)
4676 && ipa_get_agg_replacements_for_node (node) != NULL)
4677 count++;
4678 }
4679
4680 streamer_write_uhwi (ob, count);
4681
4682 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4683 lsei_next_function_in_partition (&lsei))
4684 {
4685 node = lsei_cgraph_node (lsei);
4686 if (cgraph_function_with_gimple_body_p (node)
4687 && ipa_get_agg_replacements_for_node (node) != NULL)
4688 write_agg_replacement_chain (ob, node);
4689 }
4690 streamer_write_char_stream (ob->main_stream, 0);
4691 produce_asm (ob, NULL);
4692 destroy_output_block (ob);
4693 }
4694
4695 /* Read replacements section in file FILE_DATA of length LEN with data
4696 DATA. */
4697
4698 static void
4699 read_replacements_section (struct lto_file_decl_data *file_data,
4700 const char *data,
4701 size_t len)
4702 {
4703 const struct lto_function_header *header =
4704 (const struct lto_function_header *) data;
4705 const int cfg_offset = sizeof (struct lto_function_header);
4706 const int main_offset = cfg_offset + header->cfg_size;
4707 const int string_offset = main_offset + header->main_size;
4708 struct data_in *data_in;
4709 struct lto_input_block ib_main;
4710 unsigned int i;
4711 unsigned int count;
4712
4713 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4714 header->main_size);
4715
4716 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4717 header->string_size, vNULL);
4718 count = streamer_read_uhwi (&ib_main);
4719
4720 for (i = 0; i < count; i++)
4721 {
4722 unsigned int index;
4723 struct cgraph_node *node;
4724 lto_symtab_encoder_t encoder;
4725
4726 index = streamer_read_uhwi (&ib_main);
4727 encoder = file_data->symtab_node_encoder;
4728 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4729 gcc_assert (node->definition);
4730 read_agg_replacement_chain (&ib_main, node, data_in);
4731 }
4732 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4733 len);
4734 lto_data_in_delete (data_in);
4735 }
4736
4737 /* Read IPA-CP aggregate replacements. */
4738
4739 void
4740 ipa_prop_read_all_agg_replacement (void)
4741 {
4742 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4743 struct lto_file_decl_data *file_data;
4744 unsigned int j = 0;
4745
4746 while ((file_data = file_data_vec[j++]))
4747 {
4748 size_t len;
4749 const char *data = lto_get_section_data (file_data,
4750 LTO_section_ipcp_transform,
4751 NULL, &len);
4752 if (data)
4753 read_replacements_section (file_data, data, len);
4754 }
4755 }
4756
4757 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4758 NODE. */
4759
4760 static void
4761 adjust_agg_replacement_values (struct cgraph_node *node,
4762 struct ipa_agg_replacement_value *aggval)
4763 {
4764 struct ipa_agg_replacement_value *v;
4765 int i, c = 0, d = 0, *adj;
4766
4767 if (!node->clone.combined_args_to_skip)
4768 return;
4769
4770 for (v = aggval; v; v = v->next)
4771 {
4772 gcc_assert (v->index >= 0);
4773 if (c < v->index)
4774 c = v->index;
4775 }
4776 c++;
4777
4778 adj = XALLOCAVEC (int, c);
4779 for (i = 0; i < c; i++)
4780 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4781 {
4782 adj[i] = -1;
4783 d++;
4784 }
4785 else
4786 adj[i] = i - d;
4787
4788 for (v = aggval; v; v = v->next)
4789 v->index = adj[v->index];
4790 }
4791
4792
4793 /* Function body transformation phase. */
4794
4795 unsigned int
4796 ipcp_transform_function (struct cgraph_node *node)
4797 {
4798 vec<ipa_param_descriptor> descriptors = vNULL;
4799 struct param_analysis_info *parms_ainfo;
4800 struct ipa_agg_replacement_value *aggval;
4801 gimple_stmt_iterator gsi;
4802 basic_block bb;
4803 int param_count;
4804 bool cfg_changed = false, something_changed = false;
4805
4806 gcc_checking_assert (cfun);
4807 gcc_checking_assert (current_function_decl);
4808
4809 if (dump_file)
4810 fprintf (dump_file, "Modification phase of node %s/%i\n",
4811 node->name (), node->order);
4812
4813 aggval = ipa_get_agg_replacements_for_node (node);
4814 if (!aggval)
4815 return 0;
4816 param_count = count_formal_params (node->decl);
4817 if (param_count == 0)
4818 return 0;
4819 adjust_agg_replacement_values (node, aggval);
4820 if (dump_file)
4821 ipa_dump_agg_replacement_values (dump_file, aggval);
4822 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4823 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4824 descriptors.safe_grow_cleared (param_count);
4825 ipa_populate_param_decls (node, descriptors);
4826
4827 FOR_EACH_BB_FN (bb, cfun)
4828 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4829 {
4830 struct ipa_agg_replacement_value *v;
4831 gimple stmt = gsi_stmt (gsi);
4832 tree rhs, val, t;
4833 HOST_WIDE_INT offset, size;
4834 int index;
4835 bool by_ref, vce;
4836
4837 if (!gimple_assign_load_p (stmt))
4838 continue;
4839 rhs = gimple_assign_rhs1 (stmt);
4840 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4841 continue;
4842
4843 vce = false;
4844 t = rhs;
4845 while (handled_component_p (t))
4846 {
4847 /* V_C_E can do things like convert an array of integers to one
4848 bigger integer and similar things we do not handle below. */
4849 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4850 {
4851 vce = true;
4852 break;
4853 }
4854 t = TREE_OPERAND (t, 0);
4855 }
4856 if (vce)
4857 continue;
4858
4859 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4860 rhs, &index, &offset, &size, &by_ref))
4861 continue;
4862 for (v = aggval; v; v = v->next)
4863 if (v->index == index
4864 && v->offset == offset)
4865 break;
4866 if (!v
4867 || v->by_ref != by_ref
4868 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4869 continue;
4870
4871 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4872 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4873 {
4874 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4875 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4876 else if (TYPE_SIZE (TREE_TYPE (rhs))
4877 == TYPE_SIZE (TREE_TYPE (v->value)))
4878 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4879 else
4880 {
4881 if (dump_file)
4882 {
4883 fprintf (dump_file, " const ");
4884 print_generic_expr (dump_file, v->value, 0);
4885 fprintf (dump_file, " can't be converted to type of ");
4886 print_generic_expr (dump_file, rhs, 0);
4887 fprintf (dump_file, "\n");
4888 }
4889 continue;
4890 }
4891 }
4892 else
4893 val = v->value;
4894
4895 if (dump_file && (dump_flags & TDF_DETAILS))
4896 {
4897 fprintf (dump_file, "Modifying stmt:\n ");
4898 print_gimple_stmt (dump_file, stmt, 0, 0);
4899 }
4900 gimple_assign_set_rhs_from_tree (&gsi, val);
4901 update_stmt (stmt);
4902
4903 if (dump_file && (dump_flags & TDF_DETAILS))
4904 {
4905 fprintf (dump_file, "into:\n ");
4906 print_gimple_stmt (dump_file, stmt, 0, 0);
4907 fprintf (dump_file, "\n");
4908 }
4909
4910 something_changed = true;
4911 if (maybe_clean_eh_stmt (stmt)
4912 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4913 cfg_changed = true;
4914 }
4915
4916 (*ipa_node_agg_replacements)[node->uid] = NULL;
4917 free_parms_ainfo (parms_ainfo, param_count);
4918 descriptors.release ();
4919
4920 if (!something_changed)
4921 return 0;
4922 else if (cfg_changed)
4923 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4924 else
4925 return TODO_update_ssa_only_virtuals;
4926 }