Update copyright years in gcc/
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64 struct param_analysis_info
65 {
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
68 };
69
70 /* Vector where the parameter infos are actually stored. */
71 vec<ipa_node_params> ipa_node_params_vector;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
74 /* Vector where the parameter infos are actually stored. */
75 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
76
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79 static struct cgraph_node_hook_list *node_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
82 static struct cgraph_node_hook_list *function_insertion_hook_holder;
83
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
86 {
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94 };
95
96 /* Allocation pool for reference descriptions. */
97
98 static alloc_pool ipa_refdesc_pool;
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113 }
114
115 /* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
118 static int
119 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
120 {
121 int i, count;
122
123 count = descriptors.length ();
124 for (i = 0; i < count; i++)
125 if (descriptors[i].decl == ptree)
126 return i;
127
128 return -1;
129 }
130
131 /* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134 int
135 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136 {
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138 }
139
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
142
143 static void
144 ipa_populate_param_decls (struct cgraph_node *node,
145 vec<ipa_param_descriptor> &descriptors)
146 {
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
151
152 fndecl = node->decl;
153 gcc_assert (gimple_has_body_p (fndecl));
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
157 {
158 descriptors[param_num].decl = parm;
159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
160 param_num++;
161 }
162 }
163
164 /* Return how many formal parameters FNDECL has. */
165
166 static inline int
167 count_formal_params (tree fndecl)
168 {
169 tree parm;
170 int count = 0;
171 gcc_assert (gimple_has_body_p (fndecl));
172
173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
174 count++;
175
176 return count;
177 }
178
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183 void
184 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185 {
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192 }
193
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197 void
198 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199 {
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204 }
205
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
209
210 void
211 ipa_initialize_node_params (struct cgraph_node *node)
212 {
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
215 if (!info->descriptors.exists ())
216 {
217 ipa_alloc_node_params (node, count_formal_params (node->decl));
218 ipa_populate_param_decls (node, info->descriptors);
219 }
220 }
221
222 /* Print the jump functions associated with call graph edge CS to file F. */
223
224 static void
225 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226 {
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
249 }
250 else if (type == IPA_JF_CONST)
251 {
252 tree val = jump_func->value.constant.value;
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
267 fprintf (f, "%d, op %s",
268 jump_func->value.pass_through.formal_id,
269 get_tree_code_name(jump_func->value.pass_through.operation));
270 if (jump_func->value.pass_through.operation != NOP_EXPR)
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
280 fprintf (f, "\n");
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
293 fprintf (f, "\n");
294 }
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
309 tree_to_uhwi (TYPE_SIZE (item->value)));
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
318 }
319 }
320
321
322 /* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
325 void
326 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
327 {
328 struct cgraph_edge *cs;
329
330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
331 node->order);
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
340 cs->callee->order);
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
343
344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
345 {
346 struct cgraph_indirect_call_info *ii;
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
349
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
352 fprintf (f, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
354 ii->member_ptr ? "member ptr" : "aggregate",
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
360
361 if (cs->call_stmt)
362 {
363 fprintf (f, ", for stmt ");
364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
365 }
366 else
367 fprintf (f, "\n");
368 ipa_print_node_jump_functions_for_edge (f, cs);
369 }
370 }
371
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
373
374 void
375 ipa_print_all_jump_functions (FILE *f)
376 {
377 struct cgraph_node *node;
378
379 fprintf (f, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node)
381 {
382 ipa_print_node_jump_functions (f, node);
383 }
384 }
385
386 /* Set JFUNC to be a known type jump function. */
387
388 static void
389 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
391 {
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
398 gcc_assert (component_type);
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
423 jfunc->type = IPA_JF_CONST;
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
433
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
439 }
440 else
441 jfunc->value.constant.rdesc = NULL;
442 }
443
444 /* Set JFUNC to be a simple pass-through jump function. */
445 static void
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
447 bool agg_preserved, bool type_preserved)
448 {
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
453 jfunc->value.pass_through.agg_preserved = agg_preserved;
454 jfunc->value.pass_through.type_preserved = type_preserved;
455 }
456
457 /* Set JFUNC to be an arithmetic pass through jump function. */
458
459 static void
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
462 {
463 jfunc->type = IPA_JF_PASS_THROUGH;
464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
467 jfunc->value.pass_through.agg_preserved = false;
468 jfunc->value.pass_through.type_preserved = false;
469 }
470
471 /* Set JFUNC to be an ancestor jump function. */
472
473 static void
474 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
477 {
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
482 jfunc->value.ancestor.agg_preserved = agg_preserved;
483 jfunc->value.ancestor.type_preserved = type_preserved;
484 }
485
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
488
489 tree
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
491 {
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
498 }
499
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
502
503 struct type_change_info
504 {
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
519 };
520
521 /* Return true if STMT can modify a virtual method table pointer.
522
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
527
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
530
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
533
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
537
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
540
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
544
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
556 */
557
558 static bool
559 stmt_may_be_vtbl_ptr_store (gimple stmt)
560 {
561 if (is_gimple_call (stmt))
562 return false;
563 else if (gimple_clobber_p (stmt))
564 return false;
565 else if (is_gimple_assign (stmt))
566 {
567 tree lhs = gimple_assign_lhs (stmt);
568
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
570 {
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
573 return false;
574
575 if (TREE_CODE (lhs) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
577 return false;
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
581 }
582 }
583 return true;
584 }
585
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
589
590 static tree
591 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
592 {
593 HOST_WIDE_INT offset, size, max_size;
594 tree lhs, rhs, base;
595
596 if (!gimple_assign_single_p (stmt))
597 return NULL_TREE;
598
599 lhs = gimple_assign_lhs (stmt);
600 rhs = gimple_assign_rhs1 (stmt);
601 if (TREE_CODE (lhs) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
603 || TREE_CODE (rhs) != ADDR_EXPR)
604 return NULL_TREE;
605 rhs = get_base_address (TREE_OPERAND (rhs, 0));
606 if (!rhs
607 || TREE_CODE (rhs) != VAR_DECL
608 || !DECL_VIRTUAL_P (rhs))
609 return NULL_TREE;
610
611 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
612 if (offset != tci->offset
613 || size != POINTER_SIZE
614 || max_size != POINTER_SIZE)
615 return NULL_TREE;
616 if (TREE_CODE (base) == MEM_REF)
617 {
618 if (TREE_CODE (tci->object) != MEM_REF
619 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
620 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
621 TREE_OPERAND (base, 1)))
622 return NULL_TREE;
623 }
624 else if (tci->object != base)
625 return NULL_TREE;
626
627 return DECL_CONTEXT (rhs);
628 }
629
630 /* Callback of walk_aliased_vdefs and a helper function for
631 detect_type_change to check whether a particular statement may modify
632 the virtual table pointer, and if possible also determine the new type of
633 the (sub-)object. It stores its result into DATA, which points to a
634 type_change_info structure. */
635
636 static bool
637 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
638 {
639 gimple stmt = SSA_NAME_DEF_STMT (vdef);
640 struct type_change_info *tci = (struct type_change_info *) data;
641
642 if (stmt_may_be_vtbl_ptr_store (stmt))
643 {
644 tree type;
645 type = extr_type_from_vtbl_ptr_store (stmt, tci);
646 if (tci->type_maybe_changed
647 && type != tci->known_current_type)
648 tci->multiple_types_encountered = true;
649 tci->known_current_type = type;
650 tci->type_maybe_changed = true;
651 return true;
652 }
653 else
654 return false;
655 }
656
657
658
659 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
660 callsite CALL) by looking for assignments to its virtual table pointer. If
661 it is, return true and fill in the jump function JFUNC with relevant type
662 information or set it to unknown. ARG is the object itself (not a pointer
663 to it, unless dereferenced). BASE is the base of the memory access as
664 returned by get_ref_base_and_extent, as is the offset. */
665
666 static bool
667 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
668 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
669 {
670 struct type_change_info tci;
671 ao_ref ao;
672
673 gcc_checking_assert (DECL_P (arg)
674 || TREE_CODE (arg) == MEM_REF
675 || handled_component_p (arg));
676 /* Const calls cannot call virtual methods through VMT and so type changes do
677 not matter. */
678 if (!flag_devirtualize || !gimple_vuse (call)
679 /* Be sure expected_type is polymorphic. */
680 || !comp_type
681 || TREE_CODE (comp_type) != RECORD_TYPE
682 || !TYPE_BINFO (comp_type)
683 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
684 return false;
685
686 ao_ref_init (&ao, arg);
687 ao.base = base;
688 ao.offset = offset;
689 ao.size = POINTER_SIZE;
690 ao.max_size = ao.size;
691
692 tci.offset = offset;
693 tci.object = get_base_address (arg);
694 tci.known_current_type = NULL_TREE;
695 tci.type_maybe_changed = false;
696 tci.multiple_types_encountered = false;
697
698 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
699 &tci, NULL);
700 if (!tci.type_maybe_changed)
701 return false;
702
703 if (!tci.known_current_type
704 || tci.multiple_types_encountered
705 || offset != 0)
706 jfunc->type = IPA_JF_UNKNOWN;
707 else
708 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
709
710 return true;
711 }
712
713 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
714 SSA name (its dereference will become the base and the offset is assumed to
715 be zero). */
716
717 static bool
718 detect_type_change_ssa (tree arg, tree comp_type,
719 gimple call, struct ipa_jump_func *jfunc)
720 {
721 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
722 if (!flag_devirtualize
723 || !POINTER_TYPE_P (TREE_TYPE (arg)))
724 return false;
725
726 arg = build2 (MEM_REF, ptr_type_node, arg,
727 build_int_cst (ptr_type_node, 0));
728
729 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
730 }
731
732 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
733 boolean variable pointed to by DATA. */
734
735 static bool
736 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
737 void *data)
738 {
739 bool *b = (bool *) data;
740 *b = true;
741 return true;
742 }
743
744 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
745 a value known not to be modified in this function before reaching the
746 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
747 information about the parameter. */
748
749 static bool
750 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
751 gimple stmt, tree parm_load)
752 {
753 bool modified = false;
754 bitmap *visited_stmts;
755 ao_ref refd;
756
757 if (parm_ainfo && parm_ainfo->parm_modified)
758 return false;
759
760 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
761 ao_ref_init (&refd, parm_load);
762 /* We can cache visited statements only when parm_ainfo is available and when
763 we are looking at a naked load of the whole parameter. */
764 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
765 visited_stmts = NULL;
766 else
767 visited_stmts = &parm_ainfo->parm_visited_statements;
768 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
769 visited_stmts);
770 if (parm_ainfo && modified)
771 parm_ainfo->parm_modified = true;
772 return !modified;
773 }
774
775 /* If STMT is an assignment that loads a value from an parameter declaration,
776 return the index of the parameter in ipa_node_params which has not been
777 modified. Otherwise return -1. */
778
779 static int
780 load_from_unmodified_param (vec<ipa_param_descriptor> descriptors,
781 struct param_analysis_info *parms_ainfo,
782 gimple stmt)
783 {
784 int index;
785 tree op1;
786
787 if (!gimple_assign_single_p (stmt))
788 return -1;
789
790 op1 = gimple_assign_rhs1 (stmt);
791 if (TREE_CODE (op1) != PARM_DECL)
792 return -1;
793
794 index = ipa_get_param_decl_index_1 (descriptors, op1);
795 if (index < 0
796 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
797 : NULL, stmt, op1))
798 return -1;
799
800 return index;
801 }
802
803 /* Return true if memory reference REF loads data that are known to be
804 unmodified in this function before reaching statement STMT. PARM_AINFO, if
805 non-NULL, is a pointer to a structure containing temporary information about
806 PARM. */
807
808 static bool
809 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
810 gimple stmt, tree ref)
811 {
812 bool modified = false;
813 ao_ref refd;
814
815 gcc_checking_assert (gimple_vuse (stmt));
816 if (parm_ainfo && parm_ainfo->ref_modified)
817 return false;
818
819 ao_ref_init (&refd, ref);
820 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
821 NULL);
822 if (parm_ainfo && modified)
823 parm_ainfo->ref_modified = true;
824 return !modified;
825 }
826
827 /* Return true if the data pointed to by PARM is known to be unmodified in this
828 function before reaching call statement CALL into which it is passed.
829 PARM_AINFO is a pointer to a structure containing temporary information
830 about PARM. */
831
832 static bool
833 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
834 gimple call, tree parm)
835 {
836 bool modified = false;
837 ao_ref refd;
838
839 /* It's unnecessary to calculate anything about memory contnets for a const
840 function because it is not goin to use it. But do not cache the result
841 either. Also, no such calculations for non-pointers. */
842 if (!gimple_vuse (call)
843 || !POINTER_TYPE_P (TREE_TYPE (parm)))
844 return false;
845
846 if (parm_ainfo->pt_modified)
847 return false;
848
849 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
850 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
851 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
852 if (modified)
853 parm_ainfo->pt_modified = true;
854 return !modified;
855 }
856
857 /* Return true if we can prove that OP is a memory reference loading unmodified
858 data from an aggregate passed as a parameter and if the aggregate is passed
859 by reference, that the alias type of the load corresponds to the type of the
860 formal parameter (so that we can rely on this type for TBAA in callers).
861 INFO and PARMS_AINFO describe parameters of the current function (but the
862 latter can be NULL), STMT is the load statement. If function returns true,
863 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
864 within the aggregate and whether it is a load from a value passed by
865 reference respectively. */
866
867 static bool
868 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor> descriptors,
869 struct param_analysis_info *parms_ainfo, gimple stmt,
870 tree op, int *index_p, HOST_WIDE_INT *offset_p,
871 HOST_WIDE_INT *size_p, bool *by_ref_p)
872 {
873 int index;
874 HOST_WIDE_INT size, max_size;
875 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
876
877 if (max_size == -1 || max_size != size || *offset_p < 0)
878 return false;
879
880 if (DECL_P (base))
881 {
882 int index = ipa_get_param_decl_index_1 (descriptors, base);
883 if (index >= 0
884 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
885 : NULL, stmt, op))
886 {
887 *index_p = index;
888 *by_ref_p = false;
889 if (size_p)
890 *size_p = size;
891 return true;
892 }
893 return false;
894 }
895
896 if (TREE_CODE (base) != MEM_REF
897 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
898 || !integer_zerop (TREE_OPERAND (base, 1)))
899 return false;
900
901 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
902 {
903 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
904 index = ipa_get_param_decl_index_1 (descriptors, parm);
905 }
906 else
907 {
908 /* This branch catches situations where a pointer parameter is not a
909 gimple register, for example:
910
911 void hip7(S*) (struct S * p)
912 {
913 void (*<T2e4>) (struct S *) D.1867;
914 struct S * p.1;
915
916 <bb 2>:
917 p.1_1 = p;
918 D.1867_2 = p.1_1->f;
919 D.1867_2 ();
920 gdp = &p;
921 */
922
923 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
924 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
925 }
926
927 if (index >= 0
928 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
929 stmt, op))
930 {
931 *index_p = index;
932 *by_ref_p = true;
933 if (size_p)
934 *size_p = size;
935 return true;
936 }
937 return false;
938 }
939
940 /* Just like the previous function, just without the param_analysis_info
941 pointer, for users outside of this file. */
942
943 bool
944 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
945 tree op, int *index_p, HOST_WIDE_INT *offset_p,
946 bool *by_ref_p)
947 {
948 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
949 offset_p, NULL, by_ref_p);
950 }
951
952 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
953 of an assignment statement STMT, try to determine whether we are actually
954 handling any of the following cases and construct an appropriate jump
955 function into JFUNC if so:
956
957 1) The passed value is loaded from a formal parameter which is not a gimple
958 register (most probably because it is addressable, the value has to be
959 scalar) and we can guarantee the value has not changed. This case can
960 therefore be described by a simple pass-through jump function. For example:
961
962 foo (int a)
963 {
964 int a.0;
965
966 a.0_2 = a;
967 bar (a.0_2);
968
969 2) The passed value can be described by a simple arithmetic pass-through
970 jump function. E.g.
971
972 foo (int a)
973 {
974 int D.2064;
975
976 D.2064_4 = a.1(D) + 4;
977 bar (D.2064_4);
978
979 This case can also occur in combination of the previous one, e.g.:
980
981 foo (int a, int z)
982 {
983 int a.0;
984 int D.2064;
985
986 a.0_3 = a;
987 D.2064_4 = a.0_3 + 4;
988 foo (D.2064_4);
989
990 3) The passed value is an address of an object within another one (which
991 also passed by reference). Such situations are described by an ancestor
992 jump function and describe situations such as:
993
994 B::foo() (struct B * const this)
995 {
996 struct A * D.1845;
997
998 D.1845_2 = &this_1(D)->D.1748;
999 A::bar (D.1845_2);
1000
1001 INFO is the structure describing individual parameters access different
1002 stages of IPA optimizations. PARMS_AINFO contains the information that is
1003 only needed for intraprocedural analysis. */
1004
1005 static void
1006 compute_complex_assign_jump_func (struct ipa_node_params *info,
1007 struct param_analysis_info *parms_ainfo,
1008 struct ipa_jump_func *jfunc,
1009 gimple call, gimple stmt, tree name,
1010 tree param_type)
1011 {
1012 HOST_WIDE_INT offset, size, max_size;
1013 tree op1, tc_ssa, base, ssa;
1014 int index;
1015
1016 op1 = gimple_assign_rhs1 (stmt);
1017
1018 if (TREE_CODE (op1) == SSA_NAME)
1019 {
1020 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1021 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1022 else
1023 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
1024 SSA_NAME_DEF_STMT (op1));
1025 tc_ssa = op1;
1026 }
1027 else
1028 {
1029 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
1030 tc_ssa = gimple_assign_lhs (stmt);
1031 }
1032
1033 if (index >= 0)
1034 {
1035 tree op2 = gimple_assign_rhs2 (stmt);
1036
1037 if (op2)
1038 {
1039 if (!is_gimple_ip_invariant (op2)
1040 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1041 && !useless_type_conversion_p (TREE_TYPE (name),
1042 TREE_TYPE (op1))))
1043 return;
1044
1045 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1046 gimple_assign_rhs_code (stmt));
1047 }
1048 else if (gimple_assign_single_p (stmt))
1049 {
1050 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1051 call, tc_ssa);
1052 bool type_p = false;
1053
1054 if (param_type && POINTER_TYPE_P (param_type))
1055 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1056 call, jfunc);
1057 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1058 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1059 }
1060 return;
1061 }
1062
1063 if (TREE_CODE (op1) != ADDR_EXPR)
1064 return;
1065 op1 = TREE_OPERAND (op1, 0);
1066 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1067 return;
1068 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1069 if (TREE_CODE (base) != MEM_REF
1070 /* If this is a varying address, punt. */
1071 || max_size == -1
1072 || max_size != size)
1073 return;
1074 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
1075 ssa = TREE_OPERAND (base, 0);
1076 if (TREE_CODE (ssa) != SSA_NAME
1077 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1078 || offset < 0)
1079 return;
1080
1081 /* Dynamic types are changed in constructors and destructors. */
1082 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1083 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1084 {
1085 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1086 call, jfunc, offset);
1087 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1088 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1089 parm_ref_data_pass_through_p (&parms_ainfo[index],
1090 call, ssa), type_p);
1091 }
1092 }
1093
1094 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1095 it looks like:
1096
1097 iftmp.1_3 = &obj_2(D)->D.1762;
1098
1099 The base of the MEM_REF must be a default definition SSA NAME of a
1100 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1101 whole MEM_REF expression is returned and the offset calculated from any
1102 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1103 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1104
1105 static tree
1106 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1107 {
1108 HOST_WIDE_INT size, max_size;
1109 tree expr, parm, obj;
1110
1111 if (!gimple_assign_single_p (assign))
1112 return NULL_TREE;
1113 expr = gimple_assign_rhs1 (assign);
1114
1115 if (TREE_CODE (expr) != ADDR_EXPR)
1116 return NULL_TREE;
1117 expr = TREE_OPERAND (expr, 0);
1118 obj = expr;
1119 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1120
1121 if (TREE_CODE (expr) != MEM_REF
1122 /* If this is a varying address, punt. */
1123 || max_size == -1
1124 || max_size != size
1125 || *offset < 0)
1126 return NULL_TREE;
1127 parm = TREE_OPERAND (expr, 0);
1128 if (TREE_CODE (parm) != SSA_NAME
1129 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1130 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1131 return NULL_TREE;
1132
1133 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1134 *obj_p = obj;
1135 return expr;
1136 }
1137
1138
1139 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1140 statement PHI, try to find out whether NAME is in fact a
1141 multiple-inheritance typecast from a descendant into an ancestor of a formal
1142 parameter and thus can be described by an ancestor jump function and if so,
1143 write the appropriate function into JFUNC.
1144
1145 Essentially we want to match the following pattern:
1146
1147 if (obj_2(D) != 0B)
1148 goto <bb 3>;
1149 else
1150 goto <bb 4>;
1151
1152 <bb 3>:
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1154
1155 <bb 4>:
1156 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1157 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1158 return D.1879_6; */
1159
1160 static void
1161 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1162 struct param_analysis_info *parms_ainfo,
1163 struct ipa_jump_func *jfunc,
1164 gimple call, gimple phi, tree param_type)
1165 {
1166 HOST_WIDE_INT offset;
1167 gimple assign, cond;
1168 basic_block phi_bb, assign_bb, cond_bb;
1169 tree tmp, parm, expr, obj;
1170 int index, i;
1171
1172 if (gimple_phi_num_args (phi) != 2)
1173 return;
1174
1175 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1176 tmp = PHI_ARG_DEF (phi, 0);
1177 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1178 tmp = PHI_ARG_DEF (phi, 1);
1179 else
1180 return;
1181 if (TREE_CODE (tmp) != SSA_NAME
1182 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1183 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1184 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1185 return;
1186
1187 assign = SSA_NAME_DEF_STMT (tmp);
1188 assign_bb = gimple_bb (assign);
1189 if (!single_pred_p (assign_bb))
1190 return;
1191 expr = get_ancestor_addr_info (assign, &obj, &offset);
1192 if (!expr)
1193 return;
1194 parm = TREE_OPERAND (expr, 0);
1195 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1196 gcc_assert (index >= 0);
1197
1198 cond_bb = single_pred (assign_bb);
1199 cond = last_stmt (cond_bb);
1200 if (!cond
1201 || gimple_code (cond) != GIMPLE_COND
1202 || gimple_cond_code (cond) != NE_EXPR
1203 || gimple_cond_lhs (cond) != parm
1204 || !integer_zerop (gimple_cond_rhs (cond)))
1205 return;
1206
1207 phi_bb = gimple_bb (phi);
1208 for (i = 0; i < 2; i++)
1209 {
1210 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1211 if (pred != assign_bb && pred != cond_bb)
1212 return;
1213 }
1214
1215 bool type_p = false;
1216 if (param_type && POINTER_TYPE_P (param_type))
1217 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1218 call, jfunc, offset);
1219 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1220 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1221 parm_ref_data_pass_through_p (&parms_ainfo[index],
1222 call, parm), type_p);
1223 }
1224
1225 /* Given OP which is passed as an actual argument to a called function,
1226 determine if it is possible to construct a KNOWN_TYPE jump function for it
1227 and if so, create one and store it to JFUNC.
1228 EXPECTED_TYPE represents a type the argument should be in */
1229
1230 static void
1231 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1232 gimple call, tree expected_type)
1233 {
1234 HOST_WIDE_INT offset, size, max_size;
1235 tree base;
1236
1237 if (!flag_devirtualize
1238 || TREE_CODE (op) != ADDR_EXPR
1239 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1240 /* Be sure expected_type is polymorphic. */
1241 || !expected_type
1242 || TREE_CODE (expected_type) != RECORD_TYPE
1243 || !TYPE_BINFO (expected_type)
1244 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1245 return;
1246
1247 op = TREE_OPERAND (op, 0);
1248 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1249 if (!DECL_P (base)
1250 || max_size == -1
1251 || max_size != size
1252 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1253 || is_global_var (base))
1254 return;
1255
1256 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1257 return;
1258
1259 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1260 expected_type);
1261 }
1262
1263 /* Inspect the given TYPE and return true iff it has the same structure (the
1264 same number of fields of the same types) as a C++ member pointer. If
1265 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1266 corresponding fields there. */
1267
1268 static bool
1269 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1270 {
1271 tree fld;
1272
1273 if (TREE_CODE (type) != RECORD_TYPE)
1274 return false;
1275
1276 fld = TYPE_FIELDS (type);
1277 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1278 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1280 return false;
1281
1282 if (method_ptr)
1283 *method_ptr = fld;
1284
1285 fld = DECL_CHAIN (fld);
1286 if (!fld || INTEGRAL_TYPE_P (fld)
1287 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1288 return false;
1289 if (delta)
1290 *delta = fld;
1291
1292 if (DECL_CHAIN (fld))
1293 return false;
1294
1295 return true;
1296 }
1297
1298 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1299 return the rhs of its defining statement. Otherwise return RHS as it
1300 is. */
1301
1302 static inline tree
1303 get_ssa_def_if_simple_copy (tree rhs)
1304 {
1305 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1306 {
1307 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1308
1309 if (gimple_assign_single_p (def_stmt))
1310 rhs = gimple_assign_rhs1 (def_stmt);
1311 else
1312 break;
1313 }
1314 return rhs;
1315 }
1316
1317 /* Simple linked list, describing known contents of an aggregate beforere
1318 call. */
1319
1320 struct ipa_known_agg_contents_list
1321 {
1322 /* Offset and size of the described part of the aggregate. */
1323 HOST_WIDE_INT offset, size;
1324 /* Known constant value or NULL if the contents is known to be unknown. */
1325 tree constant;
1326 /* Pointer to the next structure in the list. */
1327 struct ipa_known_agg_contents_list *next;
1328 };
1329
1330 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1331 in ARG is filled in with constant values. ARG can either be an aggregate
1332 expression or a pointer to an aggregate. JFUNC is the jump function into
1333 which the constants are subsequently stored. */
1334
1335 static void
1336 determine_known_aggregate_parts (gimple call, tree arg,
1337 struct ipa_jump_func *jfunc)
1338 {
1339 struct ipa_known_agg_contents_list *list = NULL;
1340 int item_count = 0, const_count = 0;
1341 HOST_WIDE_INT arg_offset, arg_size;
1342 gimple_stmt_iterator gsi;
1343 tree arg_base;
1344 bool check_ref, by_ref;
1345 ao_ref r;
1346
1347 /* The function operates in three stages. First, we prepare check_ref, r,
1348 arg_base and arg_offset based on what is actually passed as an actual
1349 argument. */
1350
1351 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1352 {
1353 by_ref = true;
1354 if (TREE_CODE (arg) == SSA_NAME)
1355 {
1356 tree type_size;
1357 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
1358 return;
1359 check_ref = true;
1360 arg_base = arg;
1361 arg_offset = 0;
1362 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1363 arg_size = tree_to_uhwi (type_size);
1364 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1365 }
1366 else if (TREE_CODE (arg) == ADDR_EXPR)
1367 {
1368 HOST_WIDE_INT arg_max_size;
1369
1370 arg = TREE_OPERAND (arg, 0);
1371 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1372 &arg_max_size);
1373 if (arg_max_size == -1
1374 || arg_max_size != arg_size
1375 || arg_offset < 0)
1376 return;
1377 if (DECL_P (arg_base))
1378 {
1379 tree size;
1380 check_ref = false;
1381 size = build_int_cst (integer_type_node, arg_size);
1382 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1383 }
1384 else
1385 return;
1386 }
1387 else
1388 return;
1389 }
1390 else
1391 {
1392 HOST_WIDE_INT arg_max_size;
1393
1394 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1395
1396 by_ref = false;
1397 check_ref = false;
1398 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1399 &arg_max_size);
1400 if (arg_max_size == -1
1401 || arg_max_size != arg_size
1402 || arg_offset < 0)
1403 return;
1404
1405 ao_ref_init (&r, arg);
1406 }
1407
1408 /* Second stage walks back the BB, looks at individual statements and as long
1409 as it is confident of how the statements affect contents of the
1410 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1411 describing it. */
1412 gsi = gsi_for_stmt (call);
1413 gsi_prev (&gsi);
1414 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1415 {
1416 struct ipa_known_agg_contents_list *n, **p;
1417 gimple stmt = gsi_stmt (gsi);
1418 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1419 tree lhs, rhs, lhs_base;
1420 bool partial_overlap;
1421
1422 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1423 continue;
1424 if (!gimple_assign_single_p (stmt))
1425 break;
1426
1427 lhs = gimple_assign_lhs (stmt);
1428 rhs = gimple_assign_rhs1 (stmt);
1429 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1430 || TREE_CODE (lhs) == BIT_FIELD_REF
1431 || contains_bitfld_component_ref_p (lhs))
1432 break;
1433
1434 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1435 &lhs_max_size);
1436 if (lhs_max_size == -1
1437 || lhs_max_size != lhs_size
1438 || (lhs_offset < arg_offset
1439 && lhs_offset + lhs_size > arg_offset)
1440 || (lhs_offset < arg_offset + arg_size
1441 && lhs_offset + lhs_size > arg_offset + arg_size))
1442 break;
1443
1444 if (check_ref)
1445 {
1446 if (TREE_CODE (lhs_base) != MEM_REF
1447 || TREE_OPERAND (lhs_base, 0) != arg_base
1448 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1449 break;
1450 }
1451 else if (lhs_base != arg_base)
1452 {
1453 if (DECL_P (lhs_base))
1454 continue;
1455 else
1456 break;
1457 }
1458
1459 if (lhs_offset + lhs_size < arg_offset
1460 || lhs_offset >= (arg_offset + arg_size))
1461 continue;
1462
1463 partial_overlap = false;
1464 p = &list;
1465 while (*p && (*p)->offset < lhs_offset)
1466 {
1467 if ((*p)->offset + (*p)->size > lhs_offset)
1468 {
1469 partial_overlap = true;
1470 break;
1471 }
1472 p = &(*p)->next;
1473 }
1474 if (partial_overlap)
1475 break;
1476 if (*p && (*p)->offset < lhs_offset + lhs_size)
1477 {
1478 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1479 /* We already know this value is subsequently overwritten with
1480 something else. */
1481 continue;
1482 else
1483 /* Otherwise this is a partial overlap which we cannot
1484 represent. */
1485 break;
1486 }
1487
1488 rhs = get_ssa_def_if_simple_copy (rhs);
1489 n = XALLOCA (struct ipa_known_agg_contents_list);
1490 n->size = lhs_size;
1491 n->offset = lhs_offset;
1492 if (is_gimple_ip_invariant (rhs))
1493 {
1494 n->constant = rhs;
1495 const_count++;
1496 }
1497 else
1498 n->constant = NULL_TREE;
1499 n->next = *p;
1500 *p = n;
1501
1502 item_count++;
1503 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1504 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1505 break;
1506 }
1507
1508 /* Third stage just goes over the list and creates an appropriate vector of
1509 ipa_agg_jf_item structures out of it, of sourse only if there are
1510 any known constants to begin with. */
1511
1512 if (const_count)
1513 {
1514 jfunc->agg.by_ref = by_ref;
1515 vec_alloc (jfunc->agg.items, const_count);
1516 while (list)
1517 {
1518 if (list->constant)
1519 {
1520 struct ipa_agg_jf_item item;
1521 item.offset = list->offset - arg_offset;
1522 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1523 item.value = unshare_expr_without_location (list->constant);
1524 jfunc->agg.items->quick_push (item);
1525 }
1526 list = list->next;
1527 }
1528 }
1529 }
1530
1531 static tree
1532 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1533 {
1534 int n;
1535 tree type = (e->callee
1536 ? TREE_TYPE (e->callee->decl)
1537 : gimple_call_fntype (e->call_stmt));
1538 tree t = TYPE_ARG_TYPES (type);
1539
1540 for (n = 0; n < i; n++)
1541 {
1542 if (!t)
1543 break;
1544 t = TREE_CHAIN (t);
1545 }
1546 if (t)
1547 return TREE_VALUE (t);
1548 if (!e->callee)
1549 return NULL;
1550 t = DECL_ARGUMENTS (e->callee->decl);
1551 for (n = 0; n < i; n++)
1552 {
1553 if (!t)
1554 return NULL;
1555 t = TREE_CHAIN (t);
1556 }
1557 if (t)
1558 return TREE_TYPE (t);
1559 return NULL;
1560 }
1561
1562 /* Compute jump function for all arguments of callsite CS and insert the
1563 information in the jump_functions array in the ipa_edge_args corresponding
1564 to this callsite. */
1565
1566 static void
1567 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1568 struct cgraph_edge *cs)
1569 {
1570 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1571 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1572 gimple call = cs->call_stmt;
1573 int n, arg_num = gimple_call_num_args (call);
1574
1575 if (arg_num == 0 || args->jump_functions)
1576 return;
1577 vec_safe_grow_cleared (args->jump_functions, arg_num);
1578
1579 if (gimple_call_internal_p (call))
1580 return;
1581 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1582 return;
1583
1584 for (n = 0; n < arg_num; n++)
1585 {
1586 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1587 tree arg = gimple_call_arg (call, n);
1588 tree param_type = ipa_get_callee_param_type (cs, n);
1589
1590 if (is_gimple_ip_invariant (arg))
1591 ipa_set_jf_constant (jfunc, arg, cs);
1592 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1593 && TREE_CODE (arg) == PARM_DECL)
1594 {
1595 int index = ipa_get_param_decl_index (info, arg);
1596
1597 gcc_assert (index >=0);
1598 /* Aggregate passed by value, check for pass-through, otherwise we
1599 will attempt to fill in aggregate contents later in this
1600 for cycle. */
1601 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1602 {
1603 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1604 continue;
1605 }
1606 }
1607 else if (TREE_CODE (arg) == SSA_NAME)
1608 {
1609 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1610 {
1611 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1612 if (index >= 0)
1613 {
1614 bool agg_p, type_p;
1615 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1616 call, arg);
1617 if (param_type && POINTER_TYPE_P (param_type))
1618 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1619 call, jfunc);
1620 else
1621 type_p = false;
1622 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1623 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1624 type_p);
1625 }
1626 }
1627 else
1628 {
1629 gimple stmt = SSA_NAME_DEF_STMT (arg);
1630 if (is_gimple_assign (stmt))
1631 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1632 call, stmt, arg, param_type);
1633 else if (gimple_code (stmt) == GIMPLE_PHI)
1634 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1635 call, stmt, param_type);
1636 }
1637 }
1638 else
1639 compute_known_type_jump_func (arg, jfunc, call,
1640 param_type
1641 && POINTER_TYPE_P (param_type)
1642 ? TREE_TYPE (param_type)
1643 : NULL);
1644
1645 if ((jfunc->type != IPA_JF_PASS_THROUGH
1646 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1647 && (jfunc->type != IPA_JF_ANCESTOR
1648 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1649 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1650 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1651 determine_known_aggregate_parts (call, arg, jfunc);
1652 }
1653 }
1654
1655 /* Compute jump functions for all edges - both direct and indirect - outgoing
1656 from NODE. Also count the actual arguments in the process. */
1657
1658 static void
1659 ipa_compute_jump_functions (struct cgraph_node *node,
1660 struct param_analysis_info *parms_ainfo)
1661 {
1662 struct cgraph_edge *cs;
1663
1664 for (cs = node->callees; cs; cs = cs->next_callee)
1665 {
1666 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1667 NULL);
1668 /* We do not need to bother analyzing calls to unknown
1669 functions unless they may become known during lto/whopr. */
1670 if (!callee->definition && !flag_lto)
1671 continue;
1672 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1673 }
1674
1675 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1676 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1677 }
1678
1679 /* If STMT looks like a statement loading a value from a member pointer formal
1680 parameter, return that parameter and store the offset of the field to
1681 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1682 might be clobbered). If USE_DELTA, then we look for a use of the delta
1683 field rather than the pfn. */
1684
1685 static tree
1686 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1687 HOST_WIDE_INT *offset_p)
1688 {
1689 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1690
1691 if (!gimple_assign_single_p (stmt))
1692 return NULL_TREE;
1693
1694 rhs = gimple_assign_rhs1 (stmt);
1695 if (TREE_CODE (rhs) == COMPONENT_REF)
1696 {
1697 ref_field = TREE_OPERAND (rhs, 1);
1698 rhs = TREE_OPERAND (rhs, 0);
1699 }
1700 else
1701 ref_field = NULL_TREE;
1702 if (TREE_CODE (rhs) != MEM_REF)
1703 return NULL_TREE;
1704 rec = TREE_OPERAND (rhs, 0);
1705 if (TREE_CODE (rec) != ADDR_EXPR)
1706 return NULL_TREE;
1707 rec = TREE_OPERAND (rec, 0);
1708 if (TREE_CODE (rec) != PARM_DECL
1709 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1710 return NULL_TREE;
1711 ref_offset = TREE_OPERAND (rhs, 1);
1712
1713 if (use_delta)
1714 fld = delta_field;
1715 else
1716 fld = ptr_field;
1717 if (offset_p)
1718 *offset_p = int_bit_position (fld);
1719
1720 if (ref_field)
1721 {
1722 if (integer_nonzerop (ref_offset))
1723 return NULL_TREE;
1724 return ref_field == fld ? rec : NULL_TREE;
1725 }
1726 else
1727 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1728 : NULL_TREE;
1729 }
1730
1731 /* Returns true iff T is an SSA_NAME defined by a statement. */
1732
1733 static bool
1734 ipa_is_ssa_with_stmt_def (tree t)
1735 {
1736 if (TREE_CODE (t) == SSA_NAME
1737 && !SSA_NAME_IS_DEFAULT_DEF (t))
1738 return true;
1739 else
1740 return false;
1741 }
1742
1743 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1744 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1745 indirect call graph edge. */
1746
1747 static struct cgraph_edge *
1748 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1749 {
1750 struct cgraph_edge *cs;
1751
1752 cs = cgraph_edge (node, stmt);
1753 cs->indirect_info->param_index = param_index;
1754 cs->indirect_info->agg_contents = 0;
1755 cs->indirect_info->member_ptr = 0;
1756 return cs;
1757 }
1758
1759 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1760 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1761 intermediate information about each formal parameter. Currently it checks
1762 whether the call calls a pointer that is a formal parameter and if so, the
1763 parameter is marked with the called flag and an indirect call graph edge
1764 describing the call is created. This is very simple for ordinary pointers
1765 represented in SSA but not-so-nice when it comes to member pointers. The
1766 ugly part of this function does nothing more than trying to match the
1767 pattern of such a call. An example of such a pattern is the gimple dump
1768 below, the call is on the last line:
1769
1770 <bb 2>:
1771 f$__delta_5 = f.__delta;
1772 f$__pfn_24 = f.__pfn;
1773
1774 or
1775 <bb 2>:
1776 f$__delta_5 = MEM[(struct *)&f];
1777 f$__pfn_24 = MEM[(struct *)&f + 4B];
1778
1779 and a few lines below:
1780
1781 <bb 5>
1782 D.2496_3 = (int) f$__pfn_24;
1783 D.2497_4 = D.2496_3 & 1;
1784 if (D.2497_4 != 0)
1785 goto <bb 3>;
1786 else
1787 goto <bb 4>;
1788
1789 <bb 6>:
1790 D.2500_7 = (unsigned int) f$__delta_5;
1791 D.2501_8 = &S + D.2500_7;
1792 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1793 D.2503_10 = *D.2502_9;
1794 D.2504_12 = f$__pfn_24 + -1;
1795 D.2505_13 = (unsigned int) D.2504_12;
1796 D.2506_14 = D.2503_10 + D.2505_13;
1797 D.2507_15 = *D.2506_14;
1798 iftmp.11_16 = (String:: *) D.2507_15;
1799
1800 <bb 7>:
1801 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1802 D.2500_19 = (unsigned int) f$__delta_5;
1803 D.2508_20 = &S + D.2500_19;
1804 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1805
1806 Such patterns are results of simple calls to a member pointer:
1807
1808 int doprinting (int (MyString::* f)(int) const)
1809 {
1810 MyString S ("somestring");
1811
1812 return (S.*f)(4);
1813 }
1814
1815 Moreover, the function also looks for called pointers loaded from aggregates
1816 passed by value or reference. */
1817
1818 static void
1819 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1820 struct ipa_node_params *info,
1821 struct param_analysis_info *parms_ainfo,
1822 gimple call, tree target)
1823 {
1824 gimple def;
1825 tree n1, n2;
1826 gimple d1, d2;
1827 tree rec, rec2, cond;
1828 gimple branch;
1829 int index;
1830 basic_block bb, virt_bb, join;
1831 HOST_WIDE_INT offset;
1832 bool by_ref;
1833
1834 if (SSA_NAME_IS_DEFAULT_DEF (target))
1835 {
1836 tree var = SSA_NAME_VAR (target);
1837 index = ipa_get_param_decl_index (info, var);
1838 if (index >= 0)
1839 ipa_note_param_call (node, index, call);
1840 return;
1841 }
1842
1843 def = SSA_NAME_DEF_STMT (target);
1844 if (gimple_assign_single_p (def)
1845 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1846 gimple_assign_rhs1 (def), &index, &offset,
1847 NULL, &by_ref))
1848 {
1849 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1850 if (cs->indirect_info->offset != offset)
1851 cs->indirect_info->outer_type = NULL;
1852 cs->indirect_info->offset = offset;
1853 cs->indirect_info->agg_contents = 1;
1854 cs->indirect_info->by_ref = by_ref;
1855 return;
1856 }
1857
1858 /* Now we need to try to match the complex pattern of calling a member
1859 pointer. */
1860 if (gimple_code (def) != GIMPLE_PHI
1861 || gimple_phi_num_args (def) != 2
1862 || !POINTER_TYPE_P (TREE_TYPE (target))
1863 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1864 return;
1865
1866 /* First, we need to check whether one of these is a load from a member
1867 pointer that is a parameter to this function. */
1868 n1 = PHI_ARG_DEF (def, 0);
1869 n2 = PHI_ARG_DEF (def, 1);
1870 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1871 return;
1872 d1 = SSA_NAME_DEF_STMT (n1);
1873 d2 = SSA_NAME_DEF_STMT (n2);
1874
1875 join = gimple_bb (def);
1876 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1877 {
1878 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1879 return;
1880
1881 bb = EDGE_PRED (join, 0)->src;
1882 virt_bb = gimple_bb (d2);
1883 }
1884 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1885 {
1886 bb = EDGE_PRED (join, 1)->src;
1887 virt_bb = gimple_bb (d1);
1888 }
1889 else
1890 return;
1891
1892 /* Second, we need to check that the basic blocks are laid out in the way
1893 corresponding to the pattern. */
1894
1895 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1896 || single_pred (virt_bb) != bb
1897 || single_succ (virt_bb) != join)
1898 return;
1899
1900 /* Third, let's see that the branching is done depending on the least
1901 significant bit of the pfn. */
1902
1903 branch = last_stmt (bb);
1904 if (!branch || gimple_code (branch) != GIMPLE_COND)
1905 return;
1906
1907 if ((gimple_cond_code (branch) != NE_EXPR
1908 && gimple_cond_code (branch) != EQ_EXPR)
1909 || !integer_zerop (gimple_cond_rhs (branch)))
1910 return;
1911
1912 cond = gimple_cond_lhs (branch);
1913 if (!ipa_is_ssa_with_stmt_def (cond))
1914 return;
1915
1916 def = SSA_NAME_DEF_STMT (cond);
1917 if (!is_gimple_assign (def)
1918 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1919 || !integer_onep (gimple_assign_rhs2 (def)))
1920 return;
1921
1922 cond = gimple_assign_rhs1 (def);
1923 if (!ipa_is_ssa_with_stmt_def (cond))
1924 return;
1925
1926 def = SSA_NAME_DEF_STMT (cond);
1927
1928 if (is_gimple_assign (def)
1929 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1930 {
1931 cond = gimple_assign_rhs1 (def);
1932 if (!ipa_is_ssa_with_stmt_def (cond))
1933 return;
1934 def = SSA_NAME_DEF_STMT (cond);
1935 }
1936
1937 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1938 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1939 == ptrmemfunc_vbit_in_delta),
1940 NULL);
1941 if (rec != rec2)
1942 return;
1943
1944 index = ipa_get_param_decl_index (info, rec);
1945 if (index >= 0
1946 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1947 {
1948 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1949 if (cs->indirect_info->offset != offset)
1950 cs->indirect_info->outer_type = NULL;
1951 cs->indirect_info->offset = offset;
1952 cs->indirect_info->agg_contents = 1;
1953 cs->indirect_info->member_ptr = 1;
1954 }
1955
1956 return;
1957 }
1958
1959 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1960 object referenced in the expression is a formal parameter of the caller
1961 (described by INFO), create a call note for the statement. */
1962
1963 static void
1964 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1965 struct ipa_node_params *info, gimple call,
1966 tree target)
1967 {
1968 struct cgraph_edge *cs;
1969 struct cgraph_indirect_call_info *ii;
1970 struct ipa_jump_func jfunc;
1971 tree obj = OBJ_TYPE_REF_OBJECT (target);
1972 int index;
1973 HOST_WIDE_INT anc_offset;
1974
1975 if (!flag_devirtualize)
1976 return;
1977
1978 if (TREE_CODE (obj) != SSA_NAME)
1979 return;
1980
1981 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1982 {
1983 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1984 return;
1985
1986 anc_offset = 0;
1987 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1988 gcc_assert (index >= 0);
1989 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1990 call, &jfunc))
1991 return;
1992 }
1993 else
1994 {
1995 gimple stmt = SSA_NAME_DEF_STMT (obj);
1996 tree expr;
1997
1998 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1999 if (!expr)
2000 return;
2001 index = ipa_get_param_decl_index (info,
2002 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2003 gcc_assert (index >= 0);
2004 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2005 call, &jfunc, anc_offset))
2006 return;
2007 }
2008
2009 cs = ipa_note_param_call (node, index, call);
2010 ii = cs->indirect_info;
2011 ii->offset = anc_offset;
2012 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2013 ii->otr_type = obj_type_ref_class (target);
2014 ii->polymorphic = 1;
2015 }
2016
2017 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2018 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2019 containing intermediate information about each formal parameter. */
2020
2021 static void
2022 ipa_analyze_call_uses (struct cgraph_node *node,
2023 struct ipa_node_params *info,
2024 struct param_analysis_info *parms_ainfo, gimple call)
2025 {
2026 tree target = gimple_call_fn (call);
2027 struct cgraph_edge *cs;
2028
2029 if (!target
2030 || (TREE_CODE (target) != SSA_NAME
2031 && !virtual_method_call_p (target)))
2032 return;
2033
2034 /* If we previously turned the call into a direct call, there is
2035 no need to analyze. */
2036 cs = cgraph_edge (node, call);
2037 if (cs && !cs->indirect_unknown_callee)
2038 return;
2039 if (TREE_CODE (target) == SSA_NAME)
2040 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
2041 else if (virtual_method_call_p (target))
2042 ipa_analyze_virtual_call_uses (node, info, call, target);
2043 }
2044
2045
2046 /* Analyze the call statement STMT with respect to formal parameters (described
2047 in INFO) of caller given by NODE. Currently it only checks whether formal
2048 parameters are called. PARMS_AINFO is a pointer to a vector containing
2049 intermediate information about each formal parameter. */
2050
2051 static void
2052 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
2053 struct param_analysis_info *parms_ainfo, gimple stmt)
2054 {
2055 if (is_gimple_call (stmt))
2056 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
2057 }
2058
2059 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2060 If OP is a parameter declaration, mark it as used in the info structure
2061 passed in DATA. */
2062
2063 static bool
2064 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2065 {
2066 struct ipa_node_params *info = (struct ipa_node_params *) data;
2067
2068 op = get_base_address (op);
2069 if (op
2070 && TREE_CODE (op) == PARM_DECL)
2071 {
2072 int index = ipa_get_param_decl_index (info, op);
2073 gcc_assert (index >= 0);
2074 ipa_set_param_used (info, index, true);
2075 }
2076
2077 return false;
2078 }
2079
2080 /* Scan the function body of NODE and inspect the uses of formal parameters.
2081 Store the findings in various structures of the associated ipa_node_params
2082 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2083 vector containing intermediate information about each formal parameter. */
2084
2085 static void
2086 ipa_analyze_params_uses (struct cgraph_node *node,
2087 struct param_analysis_info *parms_ainfo)
2088 {
2089 tree decl = node->decl;
2090 basic_block bb;
2091 struct function *func;
2092 gimple_stmt_iterator gsi;
2093 struct ipa_node_params *info = IPA_NODE_REF (node);
2094 int i;
2095
2096 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
2097 return;
2098
2099 info->uses_analysis_done = 1;
2100 if (ipa_func_spec_opts_forbid_analysis_p (node))
2101 {
2102 for (i = 0; i < ipa_get_param_count (info); i++)
2103 {
2104 ipa_set_param_used (info, i, true);
2105 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2106 }
2107 return;
2108 }
2109
2110 for (i = 0; i < ipa_get_param_count (info); i++)
2111 {
2112 tree parm = ipa_get_param (info, i);
2113 int controlled_uses = 0;
2114
2115 /* For SSA regs see if parameter is used. For non-SSA we compute
2116 the flag during modification analysis. */
2117 if (is_gimple_reg (parm))
2118 {
2119 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2120 parm);
2121 if (ddef && !has_zero_uses (ddef))
2122 {
2123 imm_use_iterator imm_iter;
2124 use_operand_p use_p;
2125
2126 ipa_set_param_used (info, i, true);
2127 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2128 if (!is_gimple_call (USE_STMT (use_p)))
2129 {
2130 controlled_uses = IPA_UNDESCRIBED_USE;
2131 break;
2132 }
2133 else
2134 controlled_uses++;
2135 }
2136 else
2137 controlled_uses = 0;
2138 }
2139 else
2140 controlled_uses = IPA_UNDESCRIBED_USE;
2141 ipa_set_controlled_uses (info, i, controlled_uses);
2142 }
2143
2144 func = DECL_STRUCT_FUNCTION (decl);
2145 FOR_EACH_BB_FN (bb, func)
2146 {
2147 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2148 {
2149 gimple stmt = gsi_stmt (gsi);
2150
2151 if (is_gimple_debug (stmt))
2152 continue;
2153
2154 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
2155 walk_stmt_load_store_addr_ops (stmt, info,
2156 visit_ref_for_mod_analysis,
2157 visit_ref_for_mod_analysis,
2158 visit_ref_for_mod_analysis);
2159 }
2160 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2161 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2162 visit_ref_for_mod_analysis,
2163 visit_ref_for_mod_analysis,
2164 visit_ref_for_mod_analysis);
2165 }
2166 }
2167
2168 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2169
2170 static void
2171 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2172 {
2173 int i;
2174
2175 for (i = 0; i < param_count; i++)
2176 {
2177 if (parms_ainfo[i].parm_visited_statements)
2178 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2179 if (parms_ainfo[i].pt_visited_statements)
2180 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2181 }
2182 }
2183
2184 /* Initialize the array describing properties of of formal parameters
2185 of NODE, analyze their uses and compute jump functions associated
2186 with actual arguments of calls from within NODE. */
2187
2188 void
2189 ipa_analyze_node (struct cgraph_node *node)
2190 {
2191 struct ipa_node_params *info;
2192 struct param_analysis_info *parms_ainfo;
2193 int param_count;
2194
2195 ipa_check_create_node_params ();
2196 ipa_check_create_edge_args ();
2197 info = IPA_NODE_REF (node);
2198 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2199 ipa_initialize_node_params (node);
2200
2201 param_count = ipa_get_param_count (info);
2202 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2203 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
2204
2205 ipa_analyze_params_uses (node, parms_ainfo);
2206 ipa_compute_jump_functions (node, parms_ainfo);
2207
2208 free_parms_ainfo (parms_ainfo, param_count);
2209 pop_cfun ();
2210 }
2211
2212 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2213 attempt a type-based devirtualization. If successful, return the
2214 target function declaration, otherwise return NULL. */
2215
2216 tree
2217 ipa_intraprocedural_devirtualization (gimple call)
2218 {
2219 tree binfo, token, fndecl;
2220 struct ipa_jump_func jfunc;
2221 tree otr = gimple_call_fn (call);
2222
2223 jfunc.type = IPA_JF_UNKNOWN;
2224 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2225 call, obj_type_ref_class (otr));
2226 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2227 return NULL_TREE;
2228 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2229 if (!binfo)
2230 return NULL_TREE;
2231 token = OBJ_TYPE_REF_TOKEN (otr);
2232 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2233 binfo);
2234 #ifdef ENABLE_CHECKING
2235 if (fndecl)
2236 gcc_assert (possible_polymorphic_call_target_p
2237 (otr, cgraph_get_node (fndecl)));
2238 #endif
2239 return fndecl;
2240 }
2241
2242 /* Update the jump function DST when the call graph edge corresponding to SRC is
2243 is being inlined, knowing that DST is of type ancestor and src of known
2244 type. */
2245
2246 static void
2247 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2248 struct ipa_jump_func *dst)
2249 {
2250 HOST_WIDE_INT combined_offset;
2251 tree combined_type;
2252
2253 if (!ipa_get_jf_ancestor_type_preserved (dst))
2254 {
2255 dst->type = IPA_JF_UNKNOWN;
2256 return;
2257 }
2258
2259 combined_offset = ipa_get_jf_known_type_offset (src)
2260 + ipa_get_jf_ancestor_offset (dst);
2261 combined_type = ipa_get_jf_ancestor_type (dst);
2262
2263 ipa_set_jf_known_type (dst, combined_offset,
2264 ipa_get_jf_known_type_base_type (src),
2265 combined_type);
2266 }
2267
2268 /* Update the jump functions associated with call graph edge E when the call
2269 graph edge CS is being inlined, assuming that E->caller is already (possibly
2270 indirectly) inlined into CS->callee and that E has not been inlined. */
2271
2272 static void
2273 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2274 struct cgraph_edge *e)
2275 {
2276 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2277 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2278 int count = ipa_get_cs_argument_count (args);
2279 int i;
2280
2281 for (i = 0; i < count; i++)
2282 {
2283 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2284
2285 if (dst->type == IPA_JF_ANCESTOR)
2286 {
2287 struct ipa_jump_func *src;
2288 int dst_fid = dst->value.ancestor.formal_id;
2289
2290 /* Variable number of arguments can cause havoc if we try to access
2291 one that does not exist in the inlined edge. So make sure we
2292 don't. */
2293 if (dst_fid >= ipa_get_cs_argument_count (top))
2294 {
2295 dst->type = IPA_JF_UNKNOWN;
2296 continue;
2297 }
2298
2299 src = ipa_get_ith_jump_func (top, dst_fid);
2300
2301 if (src->agg.items
2302 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2303 {
2304 struct ipa_agg_jf_item *item;
2305 int j;
2306
2307 /* Currently we do not produce clobber aggregate jump functions,
2308 replace with merging when we do. */
2309 gcc_assert (!dst->agg.items);
2310
2311 dst->agg.items = vec_safe_copy (src->agg.items);
2312 dst->agg.by_ref = src->agg.by_ref;
2313 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2314 item->offset -= dst->value.ancestor.offset;
2315 }
2316
2317 if (src->type == IPA_JF_KNOWN_TYPE)
2318 combine_known_type_and_ancestor_jfs (src, dst);
2319 else if (src->type == IPA_JF_PASS_THROUGH
2320 && src->value.pass_through.operation == NOP_EXPR)
2321 {
2322 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2323 dst->value.ancestor.agg_preserved &=
2324 src->value.pass_through.agg_preserved;
2325 dst->value.ancestor.type_preserved &=
2326 src->value.pass_through.type_preserved;
2327 }
2328 else if (src->type == IPA_JF_ANCESTOR)
2329 {
2330 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2331 dst->value.ancestor.offset += src->value.ancestor.offset;
2332 dst->value.ancestor.agg_preserved &=
2333 src->value.ancestor.agg_preserved;
2334 dst->value.ancestor.type_preserved &=
2335 src->value.ancestor.type_preserved;
2336 }
2337 else
2338 dst->type = IPA_JF_UNKNOWN;
2339 }
2340 else if (dst->type == IPA_JF_PASS_THROUGH)
2341 {
2342 struct ipa_jump_func *src;
2343 /* We must check range due to calls with variable number of arguments
2344 and we cannot combine jump functions with operations. */
2345 if (dst->value.pass_through.operation == NOP_EXPR
2346 && (dst->value.pass_through.formal_id
2347 < ipa_get_cs_argument_count (top)))
2348 {
2349 int dst_fid = dst->value.pass_through.formal_id;
2350 src = ipa_get_ith_jump_func (top, dst_fid);
2351 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2352
2353 switch (src->type)
2354 {
2355 case IPA_JF_UNKNOWN:
2356 dst->type = IPA_JF_UNKNOWN;
2357 break;
2358 case IPA_JF_KNOWN_TYPE:
2359 ipa_set_jf_known_type (dst,
2360 ipa_get_jf_known_type_offset (src),
2361 ipa_get_jf_known_type_base_type (src),
2362 ipa_get_jf_known_type_base_type (src));
2363 break;
2364 case IPA_JF_CONST:
2365 ipa_set_jf_cst_copy (dst, src);
2366 break;
2367
2368 case IPA_JF_PASS_THROUGH:
2369 {
2370 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2371 enum tree_code operation;
2372 operation = ipa_get_jf_pass_through_operation (src);
2373
2374 if (operation == NOP_EXPR)
2375 {
2376 bool agg_p, type_p;
2377 agg_p = dst_agg_p
2378 && ipa_get_jf_pass_through_agg_preserved (src);
2379 type_p = ipa_get_jf_pass_through_type_preserved (src)
2380 && ipa_get_jf_pass_through_type_preserved (dst);
2381 ipa_set_jf_simple_pass_through (dst, formal_id,
2382 agg_p, type_p);
2383 }
2384 else
2385 {
2386 tree operand = ipa_get_jf_pass_through_operand (src);
2387 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2388 operation);
2389 }
2390 break;
2391 }
2392 case IPA_JF_ANCESTOR:
2393 {
2394 bool agg_p, type_p;
2395 agg_p = dst_agg_p
2396 && ipa_get_jf_ancestor_agg_preserved (src);
2397 type_p = ipa_get_jf_ancestor_type_preserved (src)
2398 && ipa_get_jf_pass_through_type_preserved (dst);
2399 ipa_set_ancestor_jf (dst,
2400 ipa_get_jf_ancestor_offset (src),
2401 ipa_get_jf_ancestor_type (src),
2402 ipa_get_jf_ancestor_formal_id (src),
2403 agg_p, type_p);
2404 break;
2405 }
2406 default:
2407 gcc_unreachable ();
2408 }
2409
2410 if (src->agg.items
2411 && (dst_agg_p || !src->agg.by_ref))
2412 {
2413 /* Currently we do not produce clobber aggregate jump
2414 functions, replace with merging when we do. */
2415 gcc_assert (!dst->agg.items);
2416
2417 dst->agg.by_ref = src->agg.by_ref;
2418 dst->agg.items = vec_safe_copy (src->agg.items);
2419 }
2420 }
2421 else
2422 dst->type = IPA_JF_UNKNOWN;
2423 }
2424 }
2425 }
2426
2427 /* If TARGET is an addr_expr of a function declaration, make it the destination
2428 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2429
2430 struct cgraph_edge *
2431 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2432 {
2433 struct cgraph_node *callee;
2434 struct inline_edge_summary *es = inline_edge_summary (ie);
2435 bool unreachable = false;
2436
2437 if (TREE_CODE (target) == ADDR_EXPR)
2438 target = TREE_OPERAND (target, 0);
2439 if (TREE_CODE (target) != FUNCTION_DECL)
2440 {
2441 target = canonicalize_constructor_val (target, NULL);
2442 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2443 {
2444 if (ie->indirect_info->member_ptr)
2445 /* Member pointer call that goes through a VMT lookup. */
2446 return NULL;
2447
2448 if (dump_file)
2449 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2450 " in %s/%i, making it unreachable.\n",
2451 ie->caller->name (), ie->caller->order);
2452 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2453 callee = cgraph_get_create_node (target);
2454 unreachable = true;
2455 }
2456 else
2457 callee = cgraph_get_node (target);
2458 }
2459 else
2460 callee = cgraph_get_node (target);
2461
2462 /* Because may-edges are not explicitely represented and vtable may be external,
2463 we may create the first reference to the object in the unit. */
2464 if (!callee || callee->global.inlined_to)
2465 {
2466
2467 /* We are better to ensure we can refer to it.
2468 In the case of static functions we are out of luck, since we already
2469 removed its body. In the case of public functions we may or may
2470 not introduce the reference. */
2471 if (!canonicalize_constructor_val (target, NULL)
2472 || !TREE_PUBLIC (target))
2473 {
2474 if (dump_file)
2475 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2476 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2477 xstrdup (ie->caller->name ()),
2478 ie->caller->order,
2479 xstrdup (ie->callee->name ()),
2480 ie->callee->order);
2481 return NULL;
2482 }
2483 callee = cgraph_get_create_node (target);
2484 }
2485 ipa_check_create_node_params ();
2486
2487 /* We can not make edges to inline clones. It is bug that someone removed
2488 the cgraph node too early. */
2489 gcc_assert (!callee->global.inlined_to);
2490
2491 if (dump_file && !unreachable)
2492 {
2493 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2494 "(%s/%i -> %s/%i), for stmt ",
2495 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2496 xstrdup (ie->caller->name ()),
2497 ie->caller->order,
2498 xstrdup (callee->name ()),
2499 callee->order);
2500 if (ie->call_stmt)
2501 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2502 else
2503 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2504 }
2505 ie = cgraph_make_edge_direct (ie, callee);
2506 es = inline_edge_summary (ie);
2507 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2508 - eni_size_weights.call_cost);
2509 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2510 - eni_time_weights.call_cost);
2511
2512 return ie;
2513 }
2514
2515 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2516 return NULL if there is not any. BY_REF specifies whether the value has to
2517 be passed by reference or by value. */
2518
2519 tree
2520 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2521 HOST_WIDE_INT offset, bool by_ref)
2522 {
2523 struct ipa_agg_jf_item *item;
2524 int i;
2525
2526 if (by_ref != agg->by_ref)
2527 return NULL;
2528
2529 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2530 if (item->offset == offset)
2531 {
2532 /* Currently we do not have clobber values, return NULL for them once
2533 we do. */
2534 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2535 return item->value;
2536 }
2537 return NULL;
2538 }
2539
2540 /* Remove a reference to SYMBOL from the list of references of a node given by
2541 reference description RDESC. Return true if the reference has been
2542 successfully found and removed. */
2543
2544 static bool
2545 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2546 {
2547 struct ipa_ref *to_del;
2548 struct cgraph_edge *origin;
2549
2550 origin = rdesc->cs;
2551 if (!origin)
2552 return false;
2553 to_del = ipa_find_reference (origin->caller, symbol,
2554 origin->call_stmt, origin->lto_stmt_uid);
2555 if (!to_del)
2556 return false;
2557
2558 ipa_remove_reference (to_del);
2559 if (dump_file)
2560 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2561 xstrdup (origin->caller->name ()),
2562 origin->caller->order, xstrdup (symbol->name ()));
2563 return true;
2564 }
2565
2566 /* If JFUNC has a reference description with refcount different from
2567 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2568 NULL. JFUNC must be a constant jump function. */
2569
2570 static struct ipa_cst_ref_desc *
2571 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2572 {
2573 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2574 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2575 return rdesc;
2576 else
2577 return NULL;
2578 }
2579
2580 /* If the value of constant jump function JFUNC is an address of a function
2581 declaration, return the associated call graph node. Otherwise return
2582 NULL. */
2583
2584 static cgraph_node *
2585 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2586 {
2587 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2588 tree cst = ipa_get_jf_constant (jfunc);
2589 if (TREE_CODE (cst) != ADDR_EXPR
2590 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2591 return NULL;
2592
2593 return cgraph_get_node (TREE_OPERAND (cst, 0));
2594 }
2595
2596
2597 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2598 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2599 the edge specified in the rdesc. Return false if either the symbol or the
2600 reference could not be found, otherwise return true. */
2601
2602 static bool
2603 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2604 {
2605 struct ipa_cst_ref_desc *rdesc;
2606 if (jfunc->type == IPA_JF_CONST
2607 && (rdesc = jfunc_rdesc_usable (jfunc))
2608 && --rdesc->refcount == 0)
2609 {
2610 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2611 if (!symbol)
2612 return false;
2613
2614 return remove_described_reference (symbol, rdesc);
2615 }
2616 return true;
2617 }
2618
2619 /* Try to find a destination for indirect edge IE that corresponds to a simple
2620 call or a call of a member function pointer and where the destination is a
2621 pointer formal parameter described by jump function JFUNC. If it can be
2622 determined, return the newly direct edge, otherwise return NULL.
2623 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2624
2625 static struct cgraph_edge *
2626 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2627 struct ipa_jump_func *jfunc,
2628 struct ipa_node_params *new_root_info)
2629 {
2630 struct cgraph_edge *cs;
2631 tree target;
2632 bool agg_contents = ie->indirect_info->agg_contents;
2633
2634 if (ie->indirect_info->agg_contents)
2635 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2636 ie->indirect_info->offset,
2637 ie->indirect_info->by_ref);
2638 else
2639 target = ipa_value_from_jfunc (new_root_info, jfunc);
2640 if (!target)
2641 return NULL;
2642 cs = ipa_make_edge_direct_to_target (ie, target);
2643
2644 if (cs && !agg_contents)
2645 {
2646 bool ok;
2647 gcc_checking_assert (cs->callee
2648 && (cs != ie
2649 || jfunc->type != IPA_JF_CONST
2650 || !cgraph_node_for_jfunc (jfunc)
2651 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2652 ok = try_decrement_rdesc_refcount (jfunc);
2653 gcc_checking_assert (ok);
2654 }
2655
2656 return cs;
2657 }
2658
2659 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2660 call based on a formal parameter which is described by jump function JFUNC
2661 and if it can be determined, make it direct and return the direct edge.
2662 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2663 are relative to. */
2664
2665 static struct cgraph_edge *
2666 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2667 struct ipa_jump_func *jfunc,
2668 struct ipa_node_params *new_root_info)
2669 {
2670 tree binfo, target;
2671
2672 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2673
2674 if (!binfo)
2675 return NULL;
2676
2677 if (TREE_CODE (binfo) != TREE_BINFO)
2678 {
2679 binfo = gimple_extract_devirt_binfo_from_cst
2680 (binfo, ie->indirect_info->otr_type);
2681 if (!binfo)
2682 return NULL;
2683 }
2684
2685 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2686 ie->indirect_info->otr_type);
2687 if (binfo)
2688 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2689 binfo);
2690 else
2691 return NULL;
2692
2693 if (target)
2694 {
2695 #ifdef ENABLE_CHECKING
2696 gcc_assert (possible_polymorphic_call_target_p
2697 (ie, cgraph_get_node (target)));
2698 #endif
2699 return ipa_make_edge_direct_to_target (ie, target);
2700 }
2701 else
2702 return NULL;
2703 }
2704
2705 /* Update the param called notes associated with NODE when CS is being inlined,
2706 assuming NODE is (potentially indirectly) inlined into CS->callee.
2707 Moreover, if the callee is discovered to be constant, create a new cgraph
2708 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2709 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2710
2711 static bool
2712 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2713 struct cgraph_node *node,
2714 vec<cgraph_edge_p> *new_edges)
2715 {
2716 struct ipa_edge_args *top;
2717 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2718 struct ipa_node_params *new_root_info;
2719 bool res = false;
2720
2721 ipa_check_create_edge_args ();
2722 top = IPA_EDGE_REF (cs);
2723 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2724 ? cs->caller->global.inlined_to
2725 : cs->caller);
2726
2727 for (ie = node->indirect_calls; ie; ie = next_ie)
2728 {
2729 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2730 struct ipa_jump_func *jfunc;
2731 int param_index;
2732
2733 next_ie = ie->next_callee;
2734
2735 if (ici->param_index == -1)
2736 continue;
2737
2738 /* We must check range due to calls with variable number of arguments: */
2739 if (ici->param_index >= ipa_get_cs_argument_count (top))
2740 {
2741 ici->param_index = -1;
2742 continue;
2743 }
2744
2745 param_index = ici->param_index;
2746 jfunc = ipa_get_ith_jump_func (top, param_index);
2747
2748 if (!flag_indirect_inlining)
2749 new_direct_edge = NULL;
2750 else if (ici->polymorphic)
2751 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2752 new_root_info);
2753 else
2754 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2755 new_root_info);
2756 /* If speculation was removed, then we need to do nothing. */
2757 if (new_direct_edge && new_direct_edge != ie)
2758 {
2759 new_direct_edge->indirect_inlining_edge = 1;
2760 top = IPA_EDGE_REF (cs);
2761 res = true;
2762 }
2763 else if (new_direct_edge)
2764 {
2765 new_direct_edge->indirect_inlining_edge = 1;
2766 if (new_direct_edge->call_stmt)
2767 new_direct_edge->call_stmt_cannot_inline_p
2768 = !gimple_check_call_matching_types (
2769 new_direct_edge->call_stmt,
2770 new_direct_edge->callee->decl, false);
2771 if (new_edges)
2772 {
2773 new_edges->safe_push (new_direct_edge);
2774 res = true;
2775 }
2776 top = IPA_EDGE_REF (cs);
2777 }
2778 else if (jfunc->type == IPA_JF_PASS_THROUGH
2779 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2780 {
2781 if (ici->agg_contents
2782 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2783 ici->param_index = -1;
2784 else
2785 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2786 }
2787 else if (jfunc->type == IPA_JF_ANCESTOR)
2788 {
2789 if (ici->agg_contents
2790 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2791 ici->param_index = -1;
2792 else
2793 {
2794 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2795 if (ipa_get_jf_ancestor_offset (jfunc))
2796 ici->outer_type = NULL;
2797 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2798 }
2799 }
2800 else
2801 /* Either we can find a destination for this edge now or never. */
2802 ici->param_index = -1;
2803 }
2804
2805 return res;
2806 }
2807
2808 /* Recursively traverse subtree of NODE (including node) made of inlined
2809 cgraph_edges when CS has been inlined and invoke
2810 update_indirect_edges_after_inlining on all nodes and
2811 update_jump_functions_after_inlining on all non-inlined edges that lead out
2812 of this subtree. Newly discovered indirect edges will be added to
2813 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2814 created. */
2815
2816 static bool
2817 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2818 struct cgraph_node *node,
2819 vec<cgraph_edge_p> *new_edges)
2820 {
2821 struct cgraph_edge *e;
2822 bool res;
2823
2824 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2825
2826 for (e = node->callees; e; e = e->next_callee)
2827 if (!e->inline_failed)
2828 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2829 else
2830 update_jump_functions_after_inlining (cs, e);
2831 for (e = node->indirect_calls; e; e = e->next_callee)
2832 update_jump_functions_after_inlining (cs, e);
2833
2834 return res;
2835 }
2836
2837 /* Combine two controlled uses counts as done during inlining. */
2838
2839 static int
2840 combine_controlled_uses_counters (int c, int d)
2841 {
2842 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2843 return IPA_UNDESCRIBED_USE;
2844 else
2845 return c + d - 1;
2846 }
2847
2848 /* Propagate number of controlled users from CS->caleee to the new root of the
2849 tree of inlined nodes. */
2850
2851 static void
2852 propagate_controlled_uses (struct cgraph_edge *cs)
2853 {
2854 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2855 struct cgraph_node *new_root = cs->caller->global.inlined_to
2856 ? cs->caller->global.inlined_to : cs->caller;
2857 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2858 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2859 int count, i;
2860
2861 count = MIN (ipa_get_cs_argument_count (args),
2862 ipa_get_param_count (old_root_info));
2863 for (i = 0; i < count; i++)
2864 {
2865 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2866 struct ipa_cst_ref_desc *rdesc;
2867
2868 if (jf->type == IPA_JF_PASS_THROUGH)
2869 {
2870 int src_idx, c, d;
2871 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2872 c = ipa_get_controlled_uses (new_root_info, src_idx);
2873 d = ipa_get_controlled_uses (old_root_info, i);
2874
2875 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2876 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2877 c = combine_controlled_uses_counters (c, d);
2878 ipa_set_controlled_uses (new_root_info, src_idx, c);
2879 if (c == 0 && new_root_info->ipcp_orig_node)
2880 {
2881 struct cgraph_node *n;
2882 struct ipa_ref *ref;
2883 tree t = new_root_info->known_vals[src_idx];
2884
2885 if (t && TREE_CODE (t) == ADDR_EXPR
2886 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2887 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2888 && (ref = ipa_find_reference (new_root,
2889 n, NULL, 0)))
2890 {
2891 if (dump_file)
2892 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2893 "reference from %s/%i to %s/%i.\n",
2894 xstrdup (new_root->name ()),
2895 new_root->order,
2896 xstrdup (n->name ()), n->order);
2897 ipa_remove_reference (ref);
2898 }
2899 }
2900 }
2901 else if (jf->type == IPA_JF_CONST
2902 && (rdesc = jfunc_rdesc_usable (jf)))
2903 {
2904 int d = ipa_get_controlled_uses (old_root_info, i);
2905 int c = rdesc->refcount;
2906 rdesc->refcount = combine_controlled_uses_counters (c, d);
2907 if (rdesc->refcount == 0)
2908 {
2909 tree cst = ipa_get_jf_constant (jf);
2910 struct cgraph_node *n;
2911 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2912 && TREE_CODE (TREE_OPERAND (cst, 0))
2913 == FUNCTION_DECL);
2914 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2915 if (n)
2916 {
2917 struct cgraph_node *clone;
2918 bool ok;
2919 ok = remove_described_reference (n, rdesc);
2920 gcc_checking_assert (ok);
2921
2922 clone = cs->caller;
2923 while (clone->global.inlined_to
2924 && clone != rdesc->cs->caller
2925 && IPA_NODE_REF (clone)->ipcp_orig_node)
2926 {
2927 struct ipa_ref *ref;
2928 ref = ipa_find_reference (clone,
2929 n, NULL, 0);
2930 if (ref)
2931 {
2932 if (dump_file)
2933 fprintf (dump_file, "ipa-prop: Removing "
2934 "cloning-created reference "
2935 "from %s/%i to %s/%i.\n",
2936 xstrdup (clone->name ()),
2937 clone->order,
2938 xstrdup (n->name ()),
2939 n->order);
2940 ipa_remove_reference (ref);
2941 }
2942 clone = clone->callers->caller;
2943 }
2944 }
2945 }
2946 }
2947 }
2948
2949 for (i = ipa_get_param_count (old_root_info);
2950 i < ipa_get_cs_argument_count (args);
2951 i++)
2952 {
2953 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2954
2955 if (jf->type == IPA_JF_CONST)
2956 {
2957 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2958 if (rdesc)
2959 rdesc->refcount = IPA_UNDESCRIBED_USE;
2960 }
2961 else if (jf->type == IPA_JF_PASS_THROUGH)
2962 ipa_set_controlled_uses (new_root_info,
2963 jf->value.pass_through.formal_id,
2964 IPA_UNDESCRIBED_USE);
2965 }
2966 }
2967
2968 /* Update jump functions and call note functions on inlining the call site CS.
2969 CS is expected to lead to a node already cloned by
2970 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2971 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2972 created. */
2973
2974 bool
2975 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2976 vec<cgraph_edge_p> *new_edges)
2977 {
2978 bool changed;
2979 /* Do nothing if the preparation phase has not been carried out yet
2980 (i.e. during early inlining). */
2981 if (!ipa_node_params_vector.exists ())
2982 return false;
2983 gcc_assert (ipa_edge_args_vector);
2984
2985 propagate_controlled_uses (cs);
2986 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2987
2988 return changed;
2989 }
2990
2991 /* Frees all dynamically allocated structures that the argument info points
2992 to. */
2993
2994 void
2995 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2996 {
2997 vec_free (args->jump_functions);
2998 memset (args, 0, sizeof (*args));
2999 }
3000
3001 /* Free all ipa_edge structures. */
3002
3003 void
3004 ipa_free_all_edge_args (void)
3005 {
3006 int i;
3007 struct ipa_edge_args *args;
3008
3009 if (!ipa_edge_args_vector)
3010 return;
3011
3012 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3013 ipa_free_edge_args_substructures (args);
3014
3015 vec_free (ipa_edge_args_vector);
3016 }
3017
3018 /* Frees all dynamically allocated structures that the param info points
3019 to. */
3020
3021 void
3022 ipa_free_node_params_substructures (struct ipa_node_params *info)
3023 {
3024 info->descriptors.release ();
3025 free (info->lattices);
3026 /* Lattice values and their sources are deallocated with their alocation
3027 pool. */
3028 info->known_vals.release ();
3029 memset (info, 0, sizeof (*info));
3030 }
3031
3032 /* Free all ipa_node_params structures. */
3033
3034 void
3035 ipa_free_all_node_params (void)
3036 {
3037 int i;
3038 struct ipa_node_params *info;
3039
3040 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3041 ipa_free_node_params_substructures (info);
3042
3043 ipa_node_params_vector.release ();
3044 }
3045
3046 /* Set the aggregate replacements of NODE to be AGGVALS. */
3047
3048 void
3049 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3050 struct ipa_agg_replacement_value *aggvals)
3051 {
3052 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3053 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3054
3055 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3056 }
3057
3058 /* Hook that is called by cgraph.c when an edge is removed. */
3059
3060 static void
3061 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3062 {
3063 struct ipa_edge_args *args;
3064
3065 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3066 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3067 return;
3068
3069 args = IPA_EDGE_REF (cs);
3070 if (args->jump_functions)
3071 {
3072 struct ipa_jump_func *jf;
3073 int i;
3074 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3075 {
3076 struct ipa_cst_ref_desc *rdesc;
3077 try_decrement_rdesc_refcount (jf);
3078 if (jf->type == IPA_JF_CONST
3079 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3080 && rdesc->cs == cs)
3081 rdesc->cs = NULL;
3082 }
3083 }
3084
3085 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3086 }
3087
3088 /* Hook that is called by cgraph.c when a node is removed. */
3089
3090 static void
3091 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3092 {
3093 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3094 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3095 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3096 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3097 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3098 }
3099
3100 /* Hook that is called by cgraph.c when an edge is duplicated. */
3101
3102 static void
3103 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3104 __attribute__((unused)) void *data)
3105 {
3106 struct ipa_edge_args *old_args, *new_args;
3107 unsigned int i;
3108
3109 ipa_check_create_edge_args ();
3110
3111 old_args = IPA_EDGE_REF (src);
3112 new_args = IPA_EDGE_REF (dst);
3113
3114 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3115
3116 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3117 {
3118 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3119 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3120
3121 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3122
3123 if (src_jf->type == IPA_JF_CONST)
3124 {
3125 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3126
3127 if (!src_rdesc)
3128 dst_jf->value.constant.rdesc = NULL;
3129 else if (src->caller == dst->caller)
3130 {
3131 struct ipa_ref *ref;
3132 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3133 gcc_checking_assert (n);
3134 ref = ipa_find_reference (src->caller, n,
3135 src->call_stmt, src->lto_stmt_uid);
3136 gcc_checking_assert (ref);
3137 ipa_clone_ref (ref, dst->caller, ref->stmt);
3138
3139 gcc_checking_assert (ipa_refdesc_pool);
3140 struct ipa_cst_ref_desc *dst_rdesc
3141 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3142 dst_rdesc->cs = dst;
3143 dst_rdesc->refcount = src_rdesc->refcount;
3144 dst_rdesc->next_duplicate = NULL;
3145 dst_jf->value.constant.rdesc = dst_rdesc;
3146 }
3147 else if (src_rdesc->cs == src)
3148 {
3149 struct ipa_cst_ref_desc *dst_rdesc;
3150 gcc_checking_assert (ipa_refdesc_pool);
3151 dst_rdesc
3152 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3153 dst_rdesc->cs = dst;
3154 dst_rdesc->refcount = src_rdesc->refcount;
3155 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3156 src_rdesc->next_duplicate = dst_rdesc;
3157 dst_jf->value.constant.rdesc = dst_rdesc;
3158 }
3159 else
3160 {
3161 struct ipa_cst_ref_desc *dst_rdesc;
3162 /* This can happen during inlining, when a JFUNC can refer to a
3163 reference taken in a function up in the tree of inline clones.
3164 We need to find the duplicate that refers to our tree of
3165 inline clones. */
3166
3167 gcc_assert (dst->caller->global.inlined_to);
3168 for (dst_rdesc = src_rdesc->next_duplicate;
3169 dst_rdesc;
3170 dst_rdesc = dst_rdesc->next_duplicate)
3171 {
3172 struct cgraph_node *top;
3173 top = dst_rdesc->cs->caller->global.inlined_to
3174 ? dst_rdesc->cs->caller->global.inlined_to
3175 : dst_rdesc->cs->caller;
3176 if (dst->caller->global.inlined_to == top)
3177 break;
3178 }
3179 gcc_assert (dst_rdesc);
3180 dst_jf->value.constant.rdesc = dst_rdesc;
3181 }
3182 }
3183 }
3184 }
3185
3186 /* Hook that is called by cgraph.c when a node is duplicated. */
3187
3188 static void
3189 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3190 ATTRIBUTE_UNUSED void *data)
3191 {
3192 struct ipa_node_params *old_info, *new_info;
3193 struct ipa_agg_replacement_value *old_av, *new_av;
3194
3195 ipa_check_create_node_params ();
3196 old_info = IPA_NODE_REF (src);
3197 new_info = IPA_NODE_REF (dst);
3198
3199 new_info->descriptors = old_info->descriptors.copy ();
3200 new_info->lattices = NULL;
3201 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3202
3203 new_info->uses_analysis_done = old_info->uses_analysis_done;
3204 new_info->node_enqueued = old_info->node_enqueued;
3205
3206 old_av = ipa_get_agg_replacements_for_node (src);
3207 if (!old_av)
3208 return;
3209
3210 new_av = NULL;
3211 while (old_av)
3212 {
3213 struct ipa_agg_replacement_value *v;
3214
3215 v = ggc_alloc_ipa_agg_replacement_value ();
3216 memcpy (v, old_av, sizeof (*v));
3217 v->next = new_av;
3218 new_av = v;
3219 old_av = old_av->next;
3220 }
3221 ipa_set_node_agg_value_chain (dst, new_av);
3222 }
3223
3224
3225 /* Analyze newly added function into callgraph. */
3226
3227 static void
3228 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3229 {
3230 if (cgraph_function_with_gimple_body_p (node))
3231 ipa_analyze_node (node);
3232 }
3233
3234 /* Register our cgraph hooks if they are not already there. */
3235
3236 void
3237 ipa_register_cgraph_hooks (void)
3238 {
3239 if (!edge_removal_hook_holder)
3240 edge_removal_hook_holder =
3241 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3242 if (!node_removal_hook_holder)
3243 node_removal_hook_holder =
3244 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3245 if (!edge_duplication_hook_holder)
3246 edge_duplication_hook_holder =
3247 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3248 if (!node_duplication_hook_holder)
3249 node_duplication_hook_holder =
3250 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3251 function_insertion_hook_holder =
3252 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3253 }
3254
3255 /* Unregister our cgraph hooks if they are not already there. */
3256
3257 static void
3258 ipa_unregister_cgraph_hooks (void)
3259 {
3260 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3261 edge_removal_hook_holder = NULL;
3262 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3263 node_removal_hook_holder = NULL;
3264 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3265 edge_duplication_hook_holder = NULL;
3266 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3267 node_duplication_hook_holder = NULL;
3268 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3269 function_insertion_hook_holder = NULL;
3270 }
3271
3272 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3273 longer needed after ipa-cp. */
3274
3275 void
3276 ipa_free_all_structures_after_ipa_cp (void)
3277 {
3278 if (!optimize)
3279 {
3280 ipa_free_all_edge_args ();
3281 ipa_free_all_node_params ();
3282 free_alloc_pool (ipcp_sources_pool);
3283 free_alloc_pool (ipcp_values_pool);
3284 free_alloc_pool (ipcp_agg_lattice_pool);
3285 ipa_unregister_cgraph_hooks ();
3286 if (ipa_refdesc_pool)
3287 free_alloc_pool (ipa_refdesc_pool);
3288 }
3289 }
3290
3291 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3292 longer needed after indirect inlining. */
3293
3294 void
3295 ipa_free_all_structures_after_iinln (void)
3296 {
3297 ipa_free_all_edge_args ();
3298 ipa_free_all_node_params ();
3299 ipa_unregister_cgraph_hooks ();
3300 if (ipcp_sources_pool)
3301 free_alloc_pool (ipcp_sources_pool);
3302 if (ipcp_values_pool)
3303 free_alloc_pool (ipcp_values_pool);
3304 if (ipcp_agg_lattice_pool)
3305 free_alloc_pool (ipcp_agg_lattice_pool);
3306 if (ipa_refdesc_pool)
3307 free_alloc_pool (ipa_refdesc_pool);
3308 }
3309
3310 /* Print ipa_tree_map data structures of all functions in the
3311 callgraph to F. */
3312
3313 void
3314 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3315 {
3316 int i, count;
3317 struct ipa_node_params *info;
3318
3319 if (!node->definition)
3320 return;
3321 info = IPA_NODE_REF (node);
3322 fprintf (f, " function %s/%i parameter descriptors:\n",
3323 node->name (), node->order);
3324 count = ipa_get_param_count (info);
3325 for (i = 0; i < count; i++)
3326 {
3327 int c;
3328
3329 ipa_dump_param (f, info, i);
3330 if (ipa_is_param_used (info, i))
3331 fprintf (f, " used");
3332 c = ipa_get_controlled_uses (info, i);
3333 if (c == IPA_UNDESCRIBED_USE)
3334 fprintf (f, " undescribed_use");
3335 else
3336 fprintf (f, " controlled_uses=%i", c);
3337 fprintf (f, "\n");
3338 }
3339 }
3340
3341 /* Print ipa_tree_map data structures of all functions in the
3342 callgraph to F. */
3343
3344 void
3345 ipa_print_all_params (FILE * f)
3346 {
3347 struct cgraph_node *node;
3348
3349 fprintf (f, "\nFunction parameters:\n");
3350 FOR_EACH_FUNCTION (node)
3351 ipa_print_node_params (f, node);
3352 }
3353
3354 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3355
3356 vec<tree>
3357 ipa_get_vector_of_formal_parms (tree fndecl)
3358 {
3359 vec<tree> args;
3360 int count;
3361 tree parm;
3362
3363 gcc_assert (!flag_wpa);
3364 count = count_formal_params (fndecl);
3365 args.create (count);
3366 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3367 args.quick_push (parm);
3368
3369 return args;
3370 }
3371
3372 /* Return a heap allocated vector containing types of formal parameters of
3373 function type FNTYPE. */
3374
3375 vec<tree>
3376 ipa_get_vector_of_formal_parm_types (tree fntype)
3377 {
3378 vec<tree> types;
3379 int count = 0;
3380 tree t;
3381
3382 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3383 count++;
3384
3385 types.create (count);
3386 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3387 types.quick_push (TREE_VALUE (t));
3388
3389 return types;
3390 }
3391
3392 /* Modify the function declaration FNDECL and its type according to the plan in
3393 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3394 to reflect the actual parameters being modified which are determined by the
3395 base_index field. */
3396
3397 void
3398 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3399 {
3400 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3401 tree orig_type = TREE_TYPE (fndecl);
3402 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3403
3404 /* The following test is an ugly hack, some functions simply don't have any
3405 arguments in their type. This is probably a bug but well... */
3406 bool care_for_types = (old_arg_types != NULL_TREE);
3407 bool last_parm_void;
3408 vec<tree> otypes;
3409 if (care_for_types)
3410 {
3411 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3412 == void_type_node);
3413 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3414 if (last_parm_void)
3415 gcc_assert (oparms.length () + 1 == otypes.length ());
3416 else
3417 gcc_assert (oparms.length () == otypes.length ());
3418 }
3419 else
3420 {
3421 last_parm_void = false;
3422 otypes.create (0);
3423 }
3424
3425 int len = adjustments.length ();
3426 tree *link = &DECL_ARGUMENTS (fndecl);
3427 tree new_arg_types = NULL;
3428 for (int i = 0; i < len; i++)
3429 {
3430 struct ipa_parm_adjustment *adj;
3431 gcc_assert (link);
3432
3433 adj = &adjustments[i];
3434 tree parm;
3435 if (adj->op == IPA_PARM_OP_NEW)
3436 parm = NULL;
3437 else
3438 parm = oparms[adj->base_index];
3439 adj->base = parm;
3440
3441 if (adj->op == IPA_PARM_OP_COPY)
3442 {
3443 if (care_for_types)
3444 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3445 new_arg_types);
3446 *link = parm;
3447 link = &DECL_CHAIN (parm);
3448 }
3449 else if (adj->op != IPA_PARM_OP_REMOVE)
3450 {
3451 tree new_parm;
3452 tree ptype;
3453
3454 if (adj->by_ref)
3455 ptype = build_pointer_type (adj->type);
3456 else
3457 {
3458 ptype = adj->type;
3459 if (is_gimple_reg_type (ptype))
3460 {
3461 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3462 if (TYPE_ALIGN (ptype) < malign)
3463 ptype = build_aligned_type (ptype, malign);
3464 }
3465 }
3466
3467 if (care_for_types)
3468 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3469
3470 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3471 ptype);
3472 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3473 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3474 DECL_ARTIFICIAL (new_parm) = 1;
3475 DECL_ARG_TYPE (new_parm) = ptype;
3476 DECL_CONTEXT (new_parm) = fndecl;
3477 TREE_USED (new_parm) = 1;
3478 DECL_IGNORED_P (new_parm) = 1;
3479 layout_decl (new_parm, 0);
3480
3481 if (adj->op == IPA_PARM_OP_NEW)
3482 adj->base = NULL;
3483 else
3484 adj->base = parm;
3485 adj->new_decl = new_parm;
3486
3487 *link = new_parm;
3488 link = &DECL_CHAIN (new_parm);
3489 }
3490 }
3491
3492 *link = NULL_TREE;
3493
3494 tree new_reversed = NULL;
3495 if (care_for_types)
3496 {
3497 new_reversed = nreverse (new_arg_types);
3498 if (last_parm_void)
3499 {
3500 if (new_reversed)
3501 TREE_CHAIN (new_arg_types) = void_list_node;
3502 else
3503 new_reversed = void_list_node;
3504 }
3505 }
3506
3507 /* Use copy_node to preserve as much as possible from original type
3508 (debug info, attribute lists etc.)
3509 Exception is METHOD_TYPEs must have THIS argument.
3510 When we are asked to remove it, we need to build new FUNCTION_TYPE
3511 instead. */
3512 tree new_type = NULL;
3513 if (TREE_CODE (orig_type) != METHOD_TYPE
3514 || (adjustments[0].op == IPA_PARM_OP_COPY
3515 && adjustments[0].base_index == 0))
3516 {
3517 new_type = build_distinct_type_copy (orig_type);
3518 TYPE_ARG_TYPES (new_type) = new_reversed;
3519 }
3520 else
3521 {
3522 new_type
3523 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3524 new_reversed));
3525 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3526 DECL_VINDEX (fndecl) = NULL_TREE;
3527 }
3528
3529 /* When signature changes, we need to clear builtin info. */
3530 if (DECL_BUILT_IN (fndecl))
3531 {
3532 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3533 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3534 }
3535
3536 /* This is a new type, not a copy of an old type. Need to reassociate
3537 variants. We can handle everything except the main variant lazily. */
3538 tree t = TYPE_MAIN_VARIANT (orig_type);
3539 if (orig_type != t)
3540 {
3541 TYPE_MAIN_VARIANT (new_type) = t;
3542 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3543 TYPE_NEXT_VARIANT (t) = new_type;
3544 }
3545 else
3546 {
3547 TYPE_MAIN_VARIANT (new_type) = new_type;
3548 TYPE_NEXT_VARIANT (new_type) = NULL;
3549 }
3550
3551 TREE_TYPE (fndecl) = new_type;
3552 DECL_VIRTUAL_P (fndecl) = 0;
3553 otypes.release ();
3554 oparms.release ();
3555 }
3556
3557 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3558 If this is a directly recursive call, CS must be NULL. Otherwise it must
3559 contain the corresponding call graph edge. */
3560
3561 void
3562 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3563 ipa_parm_adjustment_vec adjustments)
3564 {
3565 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3566 vec<tree> vargs;
3567 vec<tree, va_gc> **debug_args = NULL;
3568 gimple new_stmt;
3569 gimple_stmt_iterator gsi, prev_gsi;
3570 tree callee_decl;
3571 int i, len;
3572
3573 len = adjustments.length ();
3574 vargs.create (len);
3575 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3576 ipa_remove_stmt_references (current_node, stmt);
3577
3578 gsi = gsi_for_stmt (stmt);
3579 prev_gsi = gsi;
3580 gsi_prev (&prev_gsi);
3581 for (i = 0; i < len; i++)
3582 {
3583 struct ipa_parm_adjustment *adj;
3584
3585 adj = &adjustments[i];
3586
3587 if (adj->op == IPA_PARM_OP_COPY)
3588 {
3589 tree arg = gimple_call_arg (stmt, adj->base_index);
3590
3591 vargs.quick_push (arg);
3592 }
3593 else if (adj->op != IPA_PARM_OP_REMOVE)
3594 {
3595 tree expr, base, off;
3596 location_t loc;
3597 unsigned int deref_align = 0;
3598 bool deref_base = false;
3599
3600 /* We create a new parameter out of the value of the old one, we can
3601 do the following kind of transformations:
3602
3603 - A scalar passed by reference is converted to a scalar passed by
3604 value. (adj->by_ref is false and the type of the original
3605 actual argument is a pointer to a scalar).
3606
3607 - A part of an aggregate is passed instead of the whole aggregate.
3608 The part can be passed either by value or by reference, this is
3609 determined by value of adj->by_ref. Moreover, the code below
3610 handles both situations when the original aggregate is passed by
3611 value (its type is not a pointer) and when it is passed by
3612 reference (it is a pointer to an aggregate).
3613
3614 When the new argument is passed by reference (adj->by_ref is true)
3615 it must be a part of an aggregate and therefore we form it by
3616 simply taking the address of a reference inside the original
3617 aggregate. */
3618
3619 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3620 base = gimple_call_arg (stmt, adj->base_index);
3621 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3622 : EXPR_LOCATION (base);
3623
3624 if (TREE_CODE (base) != ADDR_EXPR
3625 && POINTER_TYPE_P (TREE_TYPE (base)))
3626 off = build_int_cst (adj->alias_ptr_type,
3627 adj->offset / BITS_PER_UNIT);
3628 else
3629 {
3630 HOST_WIDE_INT base_offset;
3631 tree prev_base;
3632 bool addrof;
3633
3634 if (TREE_CODE (base) == ADDR_EXPR)
3635 {
3636 base = TREE_OPERAND (base, 0);
3637 addrof = true;
3638 }
3639 else
3640 addrof = false;
3641 prev_base = base;
3642 base = get_addr_base_and_unit_offset (base, &base_offset);
3643 /* Aggregate arguments can have non-invariant addresses. */
3644 if (!base)
3645 {
3646 base = build_fold_addr_expr (prev_base);
3647 off = build_int_cst (adj->alias_ptr_type,
3648 adj->offset / BITS_PER_UNIT);
3649 }
3650 else if (TREE_CODE (base) == MEM_REF)
3651 {
3652 if (!addrof)
3653 {
3654 deref_base = true;
3655 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3656 }
3657 off = build_int_cst (adj->alias_ptr_type,
3658 base_offset
3659 + adj->offset / BITS_PER_UNIT);
3660 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3661 off);
3662 base = TREE_OPERAND (base, 0);
3663 }
3664 else
3665 {
3666 off = build_int_cst (adj->alias_ptr_type,
3667 base_offset
3668 + adj->offset / BITS_PER_UNIT);
3669 base = build_fold_addr_expr (base);
3670 }
3671 }
3672
3673 if (!adj->by_ref)
3674 {
3675 tree type = adj->type;
3676 unsigned int align;
3677 unsigned HOST_WIDE_INT misalign;
3678
3679 if (deref_base)
3680 {
3681 align = deref_align;
3682 misalign = 0;
3683 }
3684 else
3685 {
3686 get_pointer_alignment_1 (base, &align, &misalign);
3687 if (TYPE_ALIGN (type) > align)
3688 align = TYPE_ALIGN (type);
3689 }
3690 misalign += (tree_to_double_int (off)
3691 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3692 * BITS_PER_UNIT);
3693 misalign = misalign & (align - 1);
3694 if (misalign != 0)
3695 align = (misalign & -misalign);
3696 if (align < TYPE_ALIGN (type))
3697 type = build_aligned_type (type, align);
3698 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3699 }
3700 else
3701 {
3702 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3703 expr = build_fold_addr_expr (expr);
3704 }
3705
3706 expr = force_gimple_operand_gsi (&gsi, expr,
3707 adj->by_ref
3708 || is_gimple_reg_type (adj->type),
3709 NULL, true, GSI_SAME_STMT);
3710 vargs.quick_push (expr);
3711 }
3712 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
3713 {
3714 unsigned int ix;
3715 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3716 gimple def_temp;
3717
3718 arg = gimple_call_arg (stmt, adj->base_index);
3719 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3720 {
3721 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3722 continue;
3723 arg = fold_convert_loc (gimple_location (stmt),
3724 TREE_TYPE (origin), arg);
3725 }
3726 if (debug_args == NULL)
3727 debug_args = decl_debug_args_insert (callee_decl);
3728 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3729 if (ddecl == origin)
3730 {
3731 ddecl = (**debug_args)[ix + 1];
3732 break;
3733 }
3734 if (ddecl == NULL)
3735 {
3736 ddecl = make_node (DEBUG_EXPR_DECL);
3737 DECL_ARTIFICIAL (ddecl) = 1;
3738 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3739 DECL_MODE (ddecl) = DECL_MODE (origin);
3740
3741 vec_safe_push (*debug_args, origin);
3742 vec_safe_push (*debug_args, ddecl);
3743 }
3744 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3745 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3746 }
3747 }
3748
3749 if (dump_file && (dump_flags & TDF_DETAILS))
3750 {
3751 fprintf (dump_file, "replacing stmt:");
3752 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3753 }
3754
3755 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3756 vargs.release ();
3757 if (gimple_call_lhs (stmt))
3758 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3759
3760 gimple_set_block (new_stmt, gimple_block (stmt));
3761 if (gimple_has_location (stmt))
3762 gimple_set_location (new_stmt, gimple_location (stmt));
3763 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3764 gimple_call_copy_flags (new_stmt, stmt);
3765
3766 if (dump_file && (dump_flags & TDF_DETAILS))
3767 {
3768 fprintf (dump_file, "with stmt:");
3769 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3770 fprintf (dump_file, "\n");
3771 }
3772 gsi_replace (&gsi, new_stmt, true);
3773 if (cs)
3774 cgraph_set_call_stmt (cs, new_stmt);
3775 do
3776 {
3777 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3778 gsi_prev (&gsi);
3779 }
3780 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3781 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3782
3783 update_ssa (TODO_update_ssa);
3784 free_dominance_info (CDI_DOMINATORS);
3785 }
3786
3787 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3788 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3789 specifies whether the function should care about type incompatibility the
3790 current and new expressions. If it is false, the function will leave
3791 incompatibility issues to the caller. Return true iff the expression
3792 was modified. */
3793
3794 bool
3795 ipa_modify_expr (tree *expr, bool convert,
3796 ipa_parm_adjustment_vec adjustments)
3797 {
3798 struct ipa_parm_adjustment *cand
3799 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
3800 if (!cand)
3801 return false;
3802
3803 tree src;
3804 if (cand->by_ref)
3805 src = build_simple_mem_ref (cand->new_decl);
3806 else
3807 src = cand->new_decl;
3808
3809 if (dump_file && (dump_flags & TDF_DETAILS))
3810 {
3811 fprintf (dump_file, "About to replace expr ");
3812 print_generic_expr (dump_file, *expr, 0);
3813 fprintf (dump_file, " with ");
3814 print_generic_expr (dump_file, src, 0);
3815 fprintf (dump_file, "\n");
3816 }
3817
3818 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3819 {
3820 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3821 *expr = vce;
3822 }
3823 else
3824 *expr = src;
3825 return true;
3826 }
3827
3828 /* If T is an SSA_NAME, return NULL if it is not a default def or
3829 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3830 the base variable is always returned, regardless if it is a default
3831 def. Return T if it is not an SSA_NAME. */
3832
3833 static tree
3834 get_ssa_base_param (tree t, bool ignore_default_def)
3835 {
3836 if (TREE_CODE (t) == SSA_NAME)
3837 {
3838 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
3839 return SSA_NAME_VAR (t);
3840 else
3841 return NULL_TREE;
3842 }
3843 return t;
3844 }
3845
3846 /* Given an expression, return an adjustment entry specifying the
3847 transformation to be done on EXPR. If no suitable adjustment entry
3848 was found, returns NULL.
3849
3850 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3851 default def, otherwise bail on them.
3852
3853 If CONVERT is non-NULL, this function will set *CONVERT if the
3854 expression provided is a component reference. ADJUSTMENTS is the
3855 adjustments vector. */
3856
3857 ipa_parm_adjustment *
3858 ipa_get_adjustment_candidate (tree **expr, bool *convert,
3859 ipa_parm_adjustment_vec adjustments,
3860 bool ignore_default_def)
3861 {
3862 if (TREE_CODE (**expr) == BIT_FIELD_REF
3863 || TREE_CODE (**expr) == IMAGPART_EXPR
3864 || TREE_CODE (**expr) == REALPART_EXPR)
3865 {
3866 *expr = &TREE_OPERAND (**expr, 0);
3867 if (convert)
3868 *convert = true;
3869 }
3870
3871 HOST_WIDE_INT offset, size, max_size;
3872 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
3873 if (!base || size == -1 || max_size == -1)
3874 return NULL;
3875
3876 if (TREE_CODE (base) == MEM_REF)
3877 {
3878 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
3879 base = TREE_OPERAND (base, 0);
3880 }
3881
3882 base = get_ssa_base_param (base, ignore_default_def);
3883 if (!base || TREE_CODE (base) != PARM_DECL)
3884 return NULL;
3885
3886 struct ipa_parm_adjustment *cand = NULL;
3887 unsigned int len = adjustments.length ();
3888 for (unsigned i = 0; i < len; i++)
3889 {
3890 struct ipa_parm_adjustment *adj = &adjustments[i];
3891
3892 if (adj->base == base
3893 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
3894 {
3895 cand = adj;
3896 break;
3897 }
3898 }
3899
3900 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
3901 return NULL;
3902 return cand;
3903 }
3904
3905 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3906
3907 static bool
3908 index_in_adjustments_multiple_times_p (int base_index,
3909 ipa_parm_adjustment_vec adjustments)
3910 {
3911 int i, len = adjustments.length ();
3912 bool one = false;
3913
3914 for (i = 0; i < len; i++)
3915 {
3916 struct ipa_parm_adjustment *adj;
3917 adj = &adjustments[i];
3918
3919 if (adj->base_index == base_index)
3920 {
3921 if (one)
3922 return true;
3923 else
3924 one = true;
3925 }
3926 }
3927 return false;
3928 }
3929
3930
3931 /* Return adjustments that should have the same effect on function parameters
3932 and call arguments as if they were first changed according to adjustments in
3933 INNER and then by adjustments in OUTER. */
3934
3935 ipa_parm_adjustment_vec
3936 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3937 ipa_parm_adjustment_vec outer)
3938 {
3939 int i, outlen = outer.length ();
3940 int inlen = inner.length ();
3941 int removals = 0;
3942 ipa_parm_adjustment_vec adjustments, tmp;
3943
3944 tmp.create (inlen);
3945 for (i = 0; i < inlen; i++)
3946 {
3947 struct ipa_parm_adjustment *n;
3948 n = &inner[i];
3949
3950 if (n->op == IPA_PARM_OP_REMOVE)
3951 removals++;
3952 else
3953 {
3954 /* FIXME: Handling of new arguments are not implemented yet. */
3955 gcc_assert (n->op != IPA_PARM_OP_NEW);
3956 tmp.quick_push (*n);
3957 }
3958 }
3959
3960 adjustments.create (outlen + removals);
3961 for (i = 0; i < outlen; i++)
3962 {
3963 struct ipa_parm_adjustment r;
3964 struct ipa_parm_adjustment *out = &outer[i];
3965 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3966
3967 memset (&r, 0, sizeof (r));
3968 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
3969 if (out->op == IPA_PARM_OP_REMOVE)
3970 {
3971 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3972 {
3973 r.op = IPA_PARM_OP_REMOVE;
3974 adjustments.quick_push (r);
3975 }
3976 continue;
3977 }
3978 else
3979 {
3980 /* FIXME: Handling of new arguments are not implemented yet. */
3981 gcc_assert (out->op != IPA_PARM_OP_NEW);
3982 }
3983
3984 r.base_index = in->base_index;
3985 r.type = out->type;
3986
3987 /* FIXME: Create nonlocal value too. */
3988
3989 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
3990 r.op = IPA_PARM_OP_COPY;
3991 else if (in->op == IPA_PARM_OP_COPY)
3992 r.offset = out->offset;
3993 else if (out->op == IPA_PARM_OP_COPY)
3994 r.offset = in->offset;
3995 else
3996 r.offset = in->offset + out->offset;
3997 adjustments.quick_push (r);
3998 }
3999
4000 for (i = 0; i < inlen; i++)
4001 {
4002 struct ipa_parm_adjustment *n = &inner[i];
4003
4004 if (n->op == IPA_PARM_OP_REMOVE)
4005 adjustments.quick_push (*n);
4006 }
4007
4008 tmp.release ();
4009 return adjustments;
4010 }
4011
4012 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4013 friendly way, assuming they are meant to be applied to FNDECL. */
4014
4015 void
4016 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4017 tree fndecl)
4018 {
4019 int i, len = adjustments.length ();
4020 bool first = true;
4021 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4022
4023 fprintf (file, "IPA param adjustments: ");
4024 for (i = 0; i < len; i++)
4025 {
4026 struct ipa_parm_adjustment *adj;
4027 adj = &adjustments[i];
4028
4029 if (!first)
4030 fprintf (file, " ");
4031 else
4032 first = false;
4033
4034 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4035 print_generic_expr (file, parms[adj->base_index], 0);
4036 if (adj->base)
4037 {
4038 fprintf (file, ", base: ");
4039 print_generic_expr (file, adj->base, 0);
4040 }
4041 if (adj->new_decl)
4042 {
4043 fprintf (file, ", new_decl: ");
4044 print_generic_expr (file, adj->new_decl, 0);
4045 }
4046 if (adj->new_ssa_base)
4047 {
4048 fprintf (file, ", new_ssa_base: ");
4049 print_generic_expr (file, adj->new_ssa_base, 0);
4050 }
4051
4052 if (adj->op == IPA_PARM_OP_COPY)
4053 fprintf (file, ", copy_param");
4054 else if (adj->op == IPA_PARM_OP_REMOVE)
4055 fprintf (file, ", remove_param");
4056 else
4057 fprintf (file, ", offset %li", (long) adj->offset);
4058 if (adj->by_ref)
4059 fprintf (file, ", by_ref");
4060 print_node_brief (file, ", type: ", adj->type, 0);
4061 fprintf (file, "\n");
4062 }
4063 parms.release ();
4064 }
4065
4066 /* Dump the AV linked list. */
4067
4068 void
4069 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4070 {
4071 bool comma = false;
4072 fprintf (f, " Aggregate replacements:");
4073 for (; av; av = av->next)
4074 {
4075 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4076 av->index, av->offset);
4077 print_generic_expr (f, av->value, 0);
4078 comma = true;
4079 }
4080 fprintf (f, "\n");
4081 }
4082
4083 /* Stream out jump function JUMP_FUNC to OB. */
4084
4085 static void
4086 ipa_write_jump_function (struct output_block *ob,
4087 struct ipa_jump_func *jump_func)
4088 {
4089 struct ipa_agg_jf_item *item;
4090 struct bitpack_d bp;
4091 int i, count;
4092
4093 streamer_write_uhwi (ob, jump_func->type);
4094 switch (jump_func->type)
4095 {
4096 case IPA_JF_UNKNOWN:
4097 break;
4098 case IPA_JF_KNOWN_TYPE:
4099 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4100 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4101 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4102 break;
4103 case IPA_JF_CONST:
4104 gcc_assert (
4105 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4106 stream_write_tree (ob, jump_func->value.constant.value, true);
4107 break;
4108 case IPA_JF_PASS_THROUGH:
4109 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4110 if (jump_func->value.pass_through.operation == NOP_EXPR)
4111 {
4112 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4113 bp = bitpack_create (ob->main_stream);
4114 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4115 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4116 streamer_write_bitpack (&bp);
4117 }
4118 else
4119 {
4120 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4121 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4122 }
4123 break;
4124 case IPA_JF_ANCESTOR:
4125 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4126 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4127 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4128 bp = bitpack_create (ob->main_stream);
4129 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4130 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4131 streamer_write_bitpack (&bp);
4132 break;
4133 }
4134
4135 count = vec_safe_length (jump_func->agg.items);
4136 streamer_write_uhwi (ob, count);
4137 if (count)
4138 {
4139 bp = bitpack_create (ob->main_stream);
4140 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4141 streamer_write_bitpack (&bp);
4142 }
4143
4144 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4145 {
4146 streamer_write_uhwi (ob, item->offset);
4147 stream_write_tree (ob, item->value, true);
4148 }
4149 }
4150
4151 /* Read in jump function JUMP_FUNC from IB. */
4152
4153 static void
4154 ipa_read_jump_function (struct lto_input_block *ib,
4155 struct ipa_jump_func *jump_func,
4156 struct cgraph_edge *cs,
4157 struct data_in *data_in)
4158 {
4159 enum jump_func_type jftype;
4160 enum tree_code operation;
4161 int i, count;
4162
4163 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4164 switch (jftype)
4165 {
4166 case IPA_JF_UNKNOWN:
4167 jump_func->type = IPA_JF_UNKNOWN;
4168 break;
4169 case IPA_JF_KNOWN_TYPE:
4170 {
4171 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4172 tree base_type = stream_read_tree (ib, data_in);
4173 tree component_type = stream_read_tree (ib, data_in);
4174
4175 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4176 break;
4177 }
4178 case IPA_JF_CONST:
4179 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4180 break;
4181 case IPA_JF_PASS_THROUGH:
4182 operation = (enum tree_code) streamer_read_uhwi (ib);
4183 if (operation == NOP_EXPR)
4184 {
4185 int formal_id = streamer_read_uhwi (ib);
4186 struct bitpack_d bp = streamer_read_bitpack (ib);
4187 bool agg_preserved = bp_unpack_value (&bp, 1);
4188 bool type_preserved = bp_unpack_value (&bp, 1);
4189 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4190 type_preserved);
4191 }
4192 else
4193 {
4194 tree operand = stream_read_tree (ib, data_in);
4195 int formal_id = streamer_read_uhwi (ib);
4196 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4197 operation);
4198 }
4199 break;
4200 case IPA_JF_ANCESTOR:
4201 {
4202 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4203 tree type = stream_read_tree (ib, data_in);
4204 int formal_id = streamer_read_uhwi (ib);
4205 struct bitpack_d bp = streamer_read_bitpack (ib);
4206 bool agg_preserved = bp_unpack_value (&bp, 1);
4207 bool type_preserved = bp_unpack_value (&bp, 1);
4208
4209 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4210 type_preserved);
4211 break;
4212 }
4213 }
4214
4215 count = streamer_read_uhwi (ib);
4216 vec_alloc (jump_func->agg.items, count);
4217 if (count)
4218 {
4219 struct bitpack_d bp = streamer_read_bitpack (ib);
4220 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4221 }
4222 for (i = 0; i < count; i++)
4223 {
4224 struct ipa_agg_jf_item item;
4225 item.offset = streamer_read_uhwi (ib);
4226 item.value = stream_read_tree (ib, data_in);
4227 jump_func->agg.items->quick_push (item);
4228 }
4229 }
4230
4231 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4232 relevant to indirect inlining to OB. */
4233
4234 static void
4235 ipa_write_indirect_edge_info (struct output_block *ob,
4236 struct cgraph_edge *cs)
4237 {
4238 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4239 struct bitpack_d bp;
4240
4241 streamer_write_hwi (ob, ii->param_index);
4242 streamer_write_hwi (ob, ii->offset);
4243 bp = bitpack_create (ob->main_stream);
4244 bp_pack_value (&bp, ii->polymorphic, 1);
4245 bp_pack_value (&bp, ii->agg_contents, 1);
4246 bp_pack_value (&bp, ii->member_ptr, 1);
4247 bp_pack_value (&bp, ii->by_ref, 1);
4248 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4249 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4250 streamer_write_bitpack (&bp);
4251
4252 if (ii->polymorphic)
4253 {
4254 streamer_write_hwi (ob, ii->otr_token);
4255 stream_write_tree (ob, ii->otr_type, true);
4256 stream_write_tree (ob, ii->outer_type, true);
4257 }
4258 }
4259
4260 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4261 relevant to indirect inlining from IB. */
4262
4263 static void
4264 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4265 struct data_in *data_in ATTRIBUTE_UNUSED,
4266 struct cgraph_edge *cs)
4267 {
4268 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4269 struct bitpack_d bp;
4270
4271 ii->param_index = (int) streamer_read_hwi (ib);
4272 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4273 bp = streamer_read_bitpack (ib);
4274 ii->polymorphic = bp_unpack_value (&bp, 1);
4275 ii->agg_contents = bp_unpack_value (&bp, 1);
4276 ii->member_ptr = bp_unpack_value (&bp, 1);
4277 ii->by_ref = bp_unpack_value (&bp, 1);
4278 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4279 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4280 if (ii->polymorphic)
4281 {
4282 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4283 ii->otr_type = stream_read_tree (ib, data_in);
4284 ii->outer_type = stream_read_tree (ib, data_in);
4285 }
4286 }
4287
4288 /* Stream out NODE info to OB. */
4289
4290 static void
4291 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4292 {
4293 int node_ref;
4294 lto_symtab_encoder_t encoder;
4295 struct ipa_node_params *info = IPA_NODE_REF (node);
4296 int j;
4297 struct cgraph_edge *e;
4298 struct bitpack_d bp;
4299
4300 encoder = ob->decl_state->symtab_node_encoder;
4301 node_ref = lto_symtab_encoder_encode (encoder, node);
4302 streamer_write_uhwi (ob, node_ref);
4303
4304 streamer_write_uhwi (ob, ipa_get_param_count (info));
4305 for (j = 0; j < ipa_get_param_count (info); j++)
4306 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4307 bp = bitpack_create (ob->main_stream);
4308 gcc_assert (info->uses_analysis_done
4309 || ipa_get_param_count (info) == 0);
4310 gcc_assert (!info->node_enqueued);
4311 gcc_assert (!info->ipcp_orig_node);
4312 for (j = 0; j < ipa_get_param_count (info); j++)
4313 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4314 streamer_write_bitpack (&bp);
4315 for (j = 0; j < ipa_get_param_count (info); j++)
4316 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4317 for (e = node->callees; e; e = e->next_callee)
4318 {
4319 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4320
4321 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4322 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4323 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4324 }
4325 for (e = node->indirect_calls; e; e = e->next_callee)
4326 {
4327 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4328
4329 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4330 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4331 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4332 ipa_write_indirect_edge_info (ob, e);
4333 }
4334 }
4335
4336 /* Stream in NODE info from IB. */
4337
4338 static void
4339 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4340 struct data_in *data_in)
4341 {
4342 struct ipa_node_params *info = IPA_NODE_REF (node);
4343 int k;
4344 struct cgraph_edge *e;
4345 struct bitpack_d bp;
4346
4347 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4348
4349 for (k = 0; k < ipa_get_param_count (info); k++)
4350 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4351
4352 bp = streamer_read_bitpack (ib);
4353 if (ipa_get_param_count (info) != 0)
4354 info->uses_analysis_done = true;
4355 info->node_enqueued = false;
4356 for (k = 0; k < ipa_get_param_count (info); k++)
4357 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4358 for (k = 0; k < ipa_get_param_count (info); k++)
4359 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4360 for (e = node->callees; e; e = e->next_callee)
4361 {
4362 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4363 int count = streamer_read_uhwi (ib);
4364
4365 if (!count)
4366 continue;
4367 vec_safe_grow_cleared (args->jump_functions, count);
4368
4369 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4370 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4371 data_in);
4372 }
4373 for (e = node->indirect_calls; e; e = e->next_callee)
4374 {
4375 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4376 int count = streamer_read_uhwi (ib);
4377
4378 if (count)
4379 {
4380 vec_safe_grow_cleared (args->jump_functions, count);
4381 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4382 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4383 data_in);
4384 }
4385 ipa_read_indirect_edge_info (ib, data_in, e);
4386 }
4387 }
4388
4389 /* Write jump functions for nodes in SET. */
4390
4391 void
4392 ipa_prop_write_jump_functions (void)
4393 {
4394 struct cgraph_node *node;
4395 struct output_block *ob;
4396 unsigned int count = 0;
4397 lto_symtab_encoder_iterator lsei;
4398 lto_symtab_encoder_t encoder;
4399
4400
4401 if (!ipa_node_params_vector.exists ())
4402 return;
4403
4404 ob = create_output_block (LTO_section_jump_functions);
4405 encoder = ob->decl_state->symtab_node_encoder;
4406 ob->cgraph_node = NULL;
4407 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4408 lsei_next_function_in_partition (&lsei))
4409 {
4410 node = lsei_cgraph_node (lsei);
4411 if (cgraph_function_with_gimple_body_p (node)
4412 && IPA_NODE_REF (node) != NULL)
4413 count++;
4414 }
4415
4416 streamer_write_uhwi (ob, count);
4417
4418 /* Process all of the functions. */
4419 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4420 lsei_next_function_in_partition (&lsei))
4421 {
4422 node = lsei_cgraph_node (lsei);
4423 if (cgraph_function_with_gimple_body_p (node)
4424 && IPA_NODE_REF (node) != NULL)
4425 ipa_write_node_info (ob, node);
4426 }
4427 streamer_write_char_stream (ob->main_stream, 0);
4428 produce_asm (ob, NULL);
4429 destroy_output_block (ob);
4430 }
4431
4432 /* Read section in file FILE_DATA of length LEN with data DATA. */
4433
4434 static void
4435 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4436 size_t len)
4437 {
4438 const struct lto_function_header *header =
4439 (const struct lto_function_header *) data;
4440 const int cfg_offset = sizeof (struct lto_function_header);
4441 const int main_offset = cfg_offset + header->cfg_size;
4442 const int string_offset = main_offset + header->main_size;
4443 struct data_in *data_in;
4444 struct lto_input_block ib_main;
4445 unsigned int i;
4446 unsigned int count;
4447
4448 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4449 header->main_size);
4450
4451 data_in =
4452 lto_data_in_create (file_data, (const char *) data + string_offset,
4453 header->string_size, vNULL);
4454 count = streamer_read_uhwi (&ib_main);
4455
4456 for (i = 0; i < count; i++)
4457 {
4458 unsigned int index;
4459 struct cgraph_node *node;
4460 lto_symtab_encoder_t encoder;
4461
4462 index = streamer_read_uhwi (&ib_main);
4463 encoder = file_data->symtab_node_encoder;
4464 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4465 gcc_assert (node->definition);
4466 ipa_read_node_info (&ib_main, node, data_in);
4467 }
4468 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4469 len);
4470 lto_data_in_delete (data_in);
4471 }
4472
4473 /* Read ipcp jump functions. */
4474
4475 void
4476 ipa_prop_read_jump_functions (void)
4477 {
4478 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4479 struct lto_file_decl_data *file_data;
4480 unsigned int j = 0;
4481
4482 ipa_check_create_node_params ();
4483 ipa_check_create_edge_args ();
4484 ipa_register_cgraph_hooks ();
4485
4486 while ((file_data = file_data_vec[j++]))
4487 {
4488 size_t len;
4489 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4490
4491 if (data)
4492 ipa_prop_read_section (file_data, data, len);
4493 }
4494 }
4495
4496 /* After merging units, we can get mismatch in argument counts.
4497 Also decl merging might've rendered parameter lists obsolete.
4498 Also compute called_with_variable_arg info. */
4499
4500 void
4501 ipa_update_after_lto_read (void)
4502 {
4503 ipa_check_create_node_params ();
4504 ipa_check_create_edge_args ();
4505 }
4506
4507 void
4508 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4509 {
4510 int node_ref;
4511 unsigned int count = 0;
4512 lto_symtab_encoder_t encoder;
4513 struct ipa_agg_replacement_value *aggvals, *av;
4514
4515 aggvals = ipa_get_agg_replacements_for_node (node);
4516 encoder = ob->decl_state->symtab_node_encoder;
4517 node_ref = lto_symtab_encoder_encode (encoder, node);
4518 streamer_write_uhwi (ob, node_ref);
4519
4520 for (av = aggvals; av; av = av->next)
4521 count++;
4522 streamer_write_uhwi (ob, count);
4523
4524 for (av = aggvals; av; av = av->next)
4525 {
4526 struct bitpack_d bp;
4527
4528 streamer_write_uhwi (ob, av->offset);
4529 streamer_write_uhwi (ob, av->index);
4530 stream_write_tree (ob, av->value, true);
4531
4532 bp = bitpack_create (ob->main_stream);
4533 bp_pack_value (&bp, av->by_ref, 1);
4534 streamer_write_bitpack (&bp);
4535 }
4536 }
4537
4538 /* Stream in the aggregate value replacement chain for NODE from IB. */
4539
4540 static void
4541 read_agg_replacement_chain (struct lto_input_block *ib,
4542 struct cgraph_node *node,
4543 struct data_in *data_in)
4544 {
4545 struct ipa_agg_replacement_value *aggvals = NULL;
4546 unsigned int count, i;
4547
4548 count = streamer_read_uhwi (ib);
4549 for (i = 0; i <count; i++)
4550 {
4551 struct ipa_agg_replacement_value *av;
4552 struct bitpack_d bp;
4553
4554 av = ggc_alloc_ipa_agg_replacement_value ();
4555 av->offset = streamer_read_uhwi (ib);
4556 av->index = streamer_read_uhwi (ib);
4557 av->value = stream_read_tree (ib, data_in);
4558 bp = streamer_read_bitpack (ib);
4559 av->by_ref = bp_unpack_value (&bp, 1);
4560 av->next = aggvals;
4561 aggvals = av;
4562 }
4563 ipa_set_node_agg_value_chain (node, aggvals);
4564 }
4565
4566 /* Write all aggregate replacement for nodes in set. */
4567
4568 void
4569 ipa_prop_write_all_agg_replacement (void)
4570 {
4571 struct cgraph_node *node;
4572 struct output_block *ob;
4573 unsigned int count = 0;
4574 lto_symtab_encoder_iterator lsei;
4575 lto_symtab_encoder_t encoder;
4576
4577 if (!ipa_node_agg_replacements)
4578 return;
4579
4580 ob = create_output_block (LTO_section_ipcp_transform);
4581 encoder = ob->decl_state->symtab_node_encoder;
4582 ob->cgraph_node = NULL;
4583 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4584 lsei_next_function_in_partition (&lsei))
4585 {
4586 node = lsei_cgraph_node (lsei);
4587 if (cgraph_function_with_gimple_body_p (node)
4588 && ipa_get_agg_replacements_for_node (node) != NULL)
4589 count++;
4590 }
4591
4592 streamer_write_uhwi (ob, count);
4593
4594 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4595 lsei_next_function_in_partition (&lsei))
4596 {
4597 node = lsei_cgraph_node (lsei);
4598 if (cgraph_function_with_gimple_body_p (node)
4599 && ipa_get_agg_replacements_for_node (node) != NULL)
4600 write_agg_replacement_chain (ob, node);
4601 }
4602 streamer_write_char_stream (ob->main_stream, 0);
4603 produce_asm (ob, NULL);
4604 destroy_output_block (ob);
4605 }
4606
4607 /* Read replacements section in file FILE_DATA of length LEN with data
4608 DATA. */
4609
4610 static void
4611 read_replacements_section (struct lto_file_decl_data *file_data,
4612 const char *data,
4613 size_t len)
4614 {
4615 const struct lto_function_header *header =
4616 (const struct lto_function_header *) data;
4617 const int cfg_offset = sizeof (struct lto_function_header);
4618 const int main_offset = cfg_offset + header->cfg_size;
4619 const int string_offset = main_offset + header->main_size;
4620 struct data_in *data_in;
4621 struct lto_input_block ib_main;
4622 unsigned int i;
4623 unsigned int count;
4624
4625 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4626 header->main_size);
4627
4628 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4629 header->string_size, vNULL);
4630 count = streamer_read_uhwi (&ib_main);
4631
4632 for (i = 0; i < count; i++)
4633 {
4634 unsigned int index;
4635 struct cgraph_node *node;
4636 lto_symtab_encoder_t encoder;
4637
4638 index = streamer_read_uhwi (&ib_main);
4639 encoder = file_data->symtab_node_encoder;
4640 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4641 gcc_assert (node->definition);
4642 read_agg_replacement_chain (&ib_main, node, data_in);
4643 }
4644 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4645 len);
4646 lto_data_in_delete (data_in);
4647 }
4648
4649 /* Read IPA-CP aggregate replacements. */
4650
4651 void
4652 ipa_prop_read_all_agg_replacement (void)
4653 {
4654 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4655 struct lto_file_decl_data *file_data;
4656 unsigned int j = 0;
4657
4658 while ((file_data = file_data_vec[j++]))
4659 {
4660 size_t len;
4661 const char *data = lto_get_section_data (file_data,
4662 LTO_section_ipcp_transform,
4663 NULL, &len);
4664 if (data)
4665 read_replacements_section (file_data, data, len);
4666 }
4667 }
4668
4669 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4670 NODE. */
4671
4672 static void
4673 adjust_agg_replacement_values (struct cgraph_node *node,
4674 struct ipa_agg_replacement_value *aggval)
4675 {
4676 struct ipa_agg_replacement_value *v;
4677 int i, c = 0, d = 0, *adj;
4678
4679 if (!node->clone.combined_args_to_skip)
4680 return;
4681
4682 for (v = aggval; v; v = v->next)
4683 {
4684 gcc_assert (v->index >= 0);
4685 if (c < v->index)
4686 c = v->index;
4687 }
4688 c++;
4689
4690 adj = XALLOCAVEC (int, c);
4691 for (i = 0; i < c; i++)
4692 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4693 {
4694 adj[i] = -1;
4695 d++;
4696 }
4697 else
4698 adj[i] = i - d;
4699
4700 for (v = aggval; v; v = v->next)
4701 v->index = adj[v->index];
4702 }
4703
4704
4705 /* Function body transformation phase. */
4706
4707 unsigned int
4708 ipcp_transform_function (struct cgraph_node *node)
4709 {
4710 vec<ipa_param_descriptor> descriptors = vNULL;
4711 struct param_analysis_info *parms_ainfo;
4712 struct ipa_agg_replacement_value *aggval;
4713 gimple_stmt_iterator gsi;
4714 basic_block bb;
4715 int param_count;
4716 bool cfg_changed = false, something_changed = false;
4717
4718 gcc_checking_assert (cfun);
4719 gcc_checking_assert (current_function_decl);
4720
4721 if (dump_file)
4722 fprintf (dump_file, "Modification phase of node %s/%i\n",
4723 node->name (), node->order);
4724
4725 aggval = ipa_get_agg_replacements_for_node (node);
4726 if (!aggval)
4727 return 0;
4728 param_count = count_formal_params (node->decl);
4729 if (param_count == 0)
4730 return 0;
4731 adjust_agg_replacement_values (node, aggval);
4732 if (dump_file)
4733 ipa_dump_agg_replacement_values (dump_file, aggval);
4734 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4735 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
4736 descriptors.safe_grow_cleared (param_count);
4737 ipa_populate_param_decls (node, descriptors);
4738
4739 FOR_EACH_BB_FN (bb, cfun)
4740 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4741 {
4742 struct ipa_agg_replacement_value *v;
4743 gimple stmt = gsi_stmt (gsi);
4744 tree rhs, val, t;
4745 HOST_WIDE_INT offset, size;
4746 int index;
4747 bool by_ref, vce;
4748
4749 if (!gimple_assign_load_p (stmt))
4750 continue;
4751 rhs = gimple_assign_rhs1 (stmt);
4752 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4753 continue;
4754
4755 vce = false;
4756 t = rhs;
4757 while (handled_component_p (t))
4758 {
4759 /* V_C_E can do things like convert an array of integers to one
4760 bigger integer and similar things we do not handle below. */
4761 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4762 {
4763 vce = true;
4764 break;
4765 }
4766 t = TREE_OPERAND (t, 0);
4767 }
4768 if (vce)
4769 continue;
4770
4771 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4772 rhs, &index, &offset, &size, &by_ref))
4773 continue;
4774 for (v = aggval; v; v = v->next)
4775 if (v->index == index
4776 && v->offset == offset)
4777 break;
4778 if (!v
4779 || v->by_ref != by_ref
4780 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4781 continue;
4782
4783 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4784 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4785 {
4786 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4787 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4788 else if (TYPE_SIZE (TREE_TYPE (rhs))
4789 == TYPE_SIZE (TREE_TYPE (v->value)))
4790 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4791 else
4792 {
4793 if (dump_file)
4794 {
4795 fprintf (dump_file, " const ");
4796 print_generic_expr (dump_file, v->value, 0);
4797 fprintf (dump_file, " can't be converted to type of ");
4798 print_generic_expr (dump_file, rhs, 0);
4799 fprintf (dump_file, "\n");
4800 }
4801 continue;
4802 }
4803 }
4804 else
4805 val = v->value;
4806
4807 if (dump_file && (dump_flags & TDF_DETAILS))
4808 {
4809 fprintf (dump_file, "Modifying stmt:\n ");
4810 print_gimple_stmt (dump_file, stmt, 0, 0);
4811 }
4812 gimple_assign_set_rhs_from_tree (&gsi, val);
4813 update_stmt (stmt);
4814
4815 if (dump_file && (dump_flags & TDF_DETAILS))
4816 {
4817 fprintf (dump_file, "into:\n ");
4818 print_gimple_stmt (dump_file, stmt, 0, 0);
4819 fprintf (dump_file, "\n");
4820 }
4821
4822 something_changed = true;
4823 if (maybe_clean_eh_stmt (stmt)
4824 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4825 cfg_changed = true;
4826 }
4827
4828 (*ipa_node_agg_replacements)[node->uid] = NULL;
4829 free_parms_ainfo (parms_ainfo, param_count);
4830 descriptors.release ();
4831
4832 if (!something_changed)
4833 return 0;
4834 else if (cfg_changed)
4835 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4836 else
4837 return TODO_update_ssa_only_virtuals;
4838 }