re PR middle-end/55078 (FAIL: g++.dg/torture/pr46154.C)
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 vec<ipa_node_params_t> ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 static int
70 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
71 {
72 int i, count;
73
74 count = descriptors.length ();
75 for (i = 0; i < count; i++)
76 if (descriptors[i].decl == ptree)
77 return i;
78
79 return -1;
80 }
81
82 /* Return index of the formal whose tree is PTREE in function which corresponds
83 to INFO. */
84
85 int
86 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
87 {
88 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
89 }
90
91 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
92 NODE. */
93
94 static void
95 ipa_populate_param_decls (struct cgraph_node *node,
96 vec<ipa_param_descriptor_t> &descriptors)
97 {
98 tree fndecl;
99 tree fnargs;
100 tree parm;
101 int param_num;
102
103 fndecl = node->symbol.decl;
104 fnargs = DECL_ARGUMENTS (fndecl);
105 param_num = 0;
106 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
107 {
108 descriptors[param_num].decl = parm;
109 param_num++;
110 }
111 }
112
113 /* Return how many formal parameters FNDECL has. */
114
115 static inline int
116 count_formal_params (tree fndecl)
117 {
118 tree parm;
119 int count = 0;
120
121 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
122 count++;
123
124 return count;
125 }
126
127 /* Initialize the ipa_node_params structure associated with NODE by counting
128 the function parameters, creating the descriptors and populating their
129 param_decls. */
130
131 void
132 ipa_initialize_node_params (struct cgraph_node *node)
133 {
134 struct ipa_node_params *info = IPA_NODE_REF (node);
135
136 if (!info->descriptors.exists ())
137 {
138 int param_count;
139
140 param_count = count_formal_params (node->symbol.decl);
141 if (param_count)
142 {
143 info->descriptors.safe_grow_cleared (param_count);
144 ipa_populate_param_decls (node, info->descriptors);
145 }
146 }
147 }
148
149 /* Print the jump functions associated with call graph edge CS to file F. */
150
151 static void
152 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
153 {
154 int i, count;
155
156 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
157 for (i = 0; i < count; i++)
158 {
159 struct ipa_jump_func *jump_func;
160 enum jump_func_type type;
161
162 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
163 type = jump_func->type;
164
165 fprintf (f, " param %d: ", i);
166 if (type == IPA_JF_UNKNOWN)
167 fprintf (f, "UNKNOWN\n");
168 else if (type == IPA_JF_KNOWN_TYPE)
169 {
170 fprintf (f, "KNOWN TYPE: base ");
171 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
172 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
173 jump_func->value.known_type.offset);
174 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
175 fprintf (f, "\n");
176 }
177 else if (type == IPA_JF_CONST)
178 {
179 tree val = jump_func->value.constant;
180 fprintf (f, "CONST: ");
181 print_generic_expr (f, val, 0);
182 if (TREE_CODE (val) == ADDR_EXPR
183 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
184 {
185 fprintf (f, " -> ");
186 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
187 0);
188 }
189 fprintf (f, "\n");
190 }
191 else if (type == IPA_JF_PASS_THROUGH)
192 {
193 fprintf (f, "PASS THROUGH: ");
194 fprintf (f, "%d, op %s",
195 jump_func->value.pass_through.formal_id,
196 tree_code_name[(int)
197 jump_func->value.pass_through.operation]);
198 if (jump_func->value.pass_through.operation != NOP_EXPR)
199 {
200 fprintf (f, " ");
201 print_generic_expr (f,
202 jump_func->value.pass_through.operand, 0);
203 }
204 if (jump_func->value.pass_through.agg_preserved)
205 fprintf (f, ", agg_preserved");
206 fprintf (f, "\n");
207 }
208 else if (type == IPA_JF_ANCESTOR)
209 {
210 fprintf (f, "ANCESTOR: ");
211 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
212 jump_func->value.ancestor.formal_id,
213 jump_func->value.ancestor.offset);
214 print_generic_expr (f, jump_func->value.ancestor.type, 0);
215 if (jump_func->value.ancestor.agg_preserved)
216 fprintf (f, ", agg_preserved");
217 fprintf (f, "\n");
218 }
219
220 if (jump_func->agg.items)
221 {
222 struct ipa_agg_jf_item *item;
223 int j;
224
225 fprintf (f, " Aggregate passed by %s:\n",
226 jump_func->agg.by_ref ? "reference" : "value");
227 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
228 {
229 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
230 item->offset);
231 if (TYPE_P (item->value))
232 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
233 tree_low_cst (TYPE_SIZE (item->value), 1));
234 else
235 {
236 fprintf (f, "cst: ");
237 print_generic_expr (f, item->value, 0);
238 }
239 fprintf (f, "\n");
240 }
241 }
242 }
243 }
244
245
246 /* Print the jump functions of all arguments on all call graph edges going from
247 NODE to file F. */
248
249 void
250 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
251 {
252 struct cgraph_edge *cs;
253 int i;
254
255 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
256 for (cs = node->callees; cs; cs = cs->next_callee)
257 {
258 if (!ipa_edge_args_info_available_for_edge_p (cs))
259 continue;
260
261 fprintf (f, " callsite %s/%i -> %s/%i : \n",
262 xstrdup (cgraph_node_name (node)), node->uid,
263 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
264 ipa_print_node_jump_functions_for_edge (f, cs);
265 }
266
267 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
268 {
269 if (!ipa_edge_args_info_available_for_edge_p (cs))
270 continue;
271
272 if (cs->call_stmt)
273 {
274 fprintf (f, " indirect callsite %d for stmt ", i);
275 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
276 }
277 else
278 fprintf (f, " indirect callsite %d :\n", i);
279 ipa_print_node_jump_functions_for_edge (f, cs);
280
281 }
282 }
283
284 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
285
286 void
287 ipa_print_all_jump_functions (FILE *f)
288 {
289 struct cgraph_node *node;
290
291 fprintf (f, "\nJump functions:\n");
292 FOR_EACH_FUNCTION (node)
293 {
294 ipa_print_node_jump_functions (f, node);
295 }
296 }
297
298 /* Worker for prune_expression_for_jf. */
299
300 static tree
301 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
302 {
303 if (EXPR_P (*tp))
304 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
305 else
306 *walk_subtrees = 0;
307 return NULL_TREE;
308 }
309
310 /* Return the expression tree EXPR unshared and with location stripped off. */
311
312 static tree
313 prune_expression_for_jf (tree exp)
314 {
315 if (EXPR_P (exp))
316 {
317 exp = unshare_expr (exp);
318 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
319 }
320 return exp;
321 }
322
323 /* Set JFUNC to be a known type jump function. */
324
325 static void
326 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
327 tree base_type, tree component_type)
328 {
329 jfunc->type = IPA_JF_KNOWN_TYPE;
330 jfunc->value.known_type.offset = offset,
331 jfunc->value.known_type.base_type = base_type;
332 jfunc->value.known_type.component_type = component_type;
333 }
334
335 /* Set JFUNC to be a constant jmp function. */
336
337 static void
338 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
339 {
340 constant = unshare_expr (constant);
341 if (constant && EXPR_P (constant))
342 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
343 jfunc->type = IPA_JF_CONST;
344 jfunc->value.constant = prune_expression_for_jf (constant);
345 }
346
347 /* Set JFUNC to be a simple pass-through jump function. */
348 static void
349 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
350 bool agg_preserved)
351 {
352 jfunc->type = IPA_JF_PASS_THROUGH;
353 jfunc->value.pass_through.operand = NULL_TREE;
354 jfunc->value.pass_through.formal_id = formal_id;
355 jfunc->value.pass_through.operation = NOP_EXPR;
356 jfunc->value.pass_through.agg_preserved = agg_preserved;
357 }
358
359 /* Set JFUNC to be an arithmetic pass through jump function. */
360
361 static void
362 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
363 tree operand, enum tree_code operation)
364 {
365 jfunc->type = IPA_JF_PASS_THROUGH;
366 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
367 jfunc->value.pass_through.formal_id = formal_id;
368 jfunc->value.pass_through.operation = operation;
369 jfunc->value.pass_through.agg_preserved = false;
370 }
371
372 /* Set JFUNC to be an ancestor jump function. */
373
374 static void
375 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
376 tree type, int formal_id, bool agg_preserved)
377 {
378 jfunc->type = IPA_JF_ANCESTOR;
379 jfunc->value.ancestor.formal_id = formal_id;
380 jfunc->value.ancestor.offset = offset;
381 jfunc->value.ancestor.type = type;
382 jfunc->value.ancestor.agg_preserved = agg_preserved;
383 }
384
385 /* Structure to be passed in between detect_type_change and
386 check_stmt_for_type_change. */
387
388 struct type_change_info
389 {
390 /* Offset into the object where there is the virtual method pointer we are
391 looking for. */
392 HOST_WIDE_INT offset;
393 /* The declaration or SSA_NAME pointer of the base that we are checking for
394 type change. */
395 tree object;
396 /* If we actually can tell the type that the object has changed to, it is
397 stored in this field. Otherwise it remains NULL_TREE. */
398 tree known_current_type;
399 /* Set to true if dynamic type change has been detected. */
400 bool type_maybe_changed;
401 /* Set to true if multiple types have been encountered. known_current_type
402 must be disregarded in that case. */
403 bool multiple_types_encountered;
404 };
405
406 /* Return true if STMT can modify a virtual method table pointer.
407
408 This function makes special assumptions about both constructors and
409 destructors which are all the functions that are allowed to alter the VMT
410 pointers. It assumes that destructors begin with assignment into all VMT
411 pointers and that constructors essentially look in the following way:
412
413 1) The very first thing they do is that they call constructors of ancestor
414 sub-objects that have them.
415
416 2) Then VMT pointers of this and all its ancestors is set to new values
417 corresponding to the type corresponding to the constructor.
418
419 3) Only afterwards, other stuff such as constructor of member sub-objects
420 and the code written by the user is run. Only this may include calling
421 virtual functions, directly or indirectly.
422
423 There is no way to call a constructor of an ancestor sub-object in any
424 other way.
425
426 This means that we do not have to care whether constructors get the correct
427 type information because they will always change it (in fact, if we define
428 the type to be given by the VMT pointer, it is undefined).
429
430 The most important fact to derive from the above is that if, for some
431 statement in the section 3, we try to detect whether the dynamic type has
432 changed, we can safely ignore all calls as we examine the function body
433 backwards until we reach statements in section 2 because these calls cannot
434 be ancestor constructors or destructors (if the input is not bogus) and so
435 do not change the dynamic type (this holds true only for automatically
436 allocated objects but at the moment we devirtualize only these). We then
437 must detect that statements in section 2 change the dynamic type and can try
438 to derive the new type. That is enough and we can stop, we will never see
439 the calls into constructors of sub-objects in this code. Therefore we can
440 safely ignore all call statements that we traverse.
441 */
442
443 static bool
444 stmt_may_be_vtbl_ptr_store (gimple stmt)
445 {
446 if (is_gimple_call (stmt))
447 return false;
448 else if (is_gimple_assign (stmt))
449 {
450 tree lhs = gimple_assign_lhs (stmt);
451
452 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
453 {
454 if (flag_strict_aliasing
455 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
456 return false;
457
458 if (TREE_CODE (lhs) == COMPONENT_REF
459 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
460 return false;
461 /* In the future we might want to use get_base_ref_and_offset to find
462 if there is a field corresponding to the offset and if so, proceed
463 almost like if it was a component ref. */
464 }
465 }
466 return true;
467 }
468
469 /* If STMT can be proved to be an assignment to the virtual method table
470 pointer of ANALYZED_OBJ and the type associated with the new table
471 identified, return the type. Otherwise return NULL_TREE. */
472
473 static tree
474 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
475 {
476 HOST_WIDE_INT offset, size, max_size;
477 tree lhs, rhs, base;
478
479 if (!gimple_assign_single_p (stmt))
480 return NULL_TREE;
481
482 lhs = gimple_assign_lhs (stmt);
483 rhs = gimple_assign_rhs1 (stmt);
484 if (TREE_CODE (lhs) != COMPONENT_REF
485 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
486 || TREE_CODE (rhs) != ADDR_EXPR)
487 return NULL_TREE;
488 rhs = get_base_address (TREE_OPERAND (rhs, 0));
489 if (!rhs
490 || TREE_CODE (rhs) != VAR_DECL
491 || !DECL_VIRTUAL_P (rhs))
492 return NULL_TREE;
493
494 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
495 if (offset != tci->offset
496 || size != POINTER_SIZE
497 || max_size != POINTER_SIZE)
498 return NULL_TREE;
499 if (TREE_CODE (base) == MEM_REF)
500 {
501 if (TREE_CODE (tci->object) != MEM_REF
502 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
503 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
504 TREE_OPERAND (base, 1)))
505 return NULL_TREE;
506 }
507 else if (tci->object != base)
508 return NULL_TREE;
509
510 return DECL_CONTEXT (rhs);
511 }
512
513 /* Callback of walk_aliased_vdefs and a helper function for
514 detect_type_change to check whether a particular statement may modify
515 the virtual table pointer, and if possible also determine the new type of
516 the (sub-)object. It stores its result into DATA, which points to a
517 type_change_info structure. */
518
519 static bool
520 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
521 {
522 gimple stmt = SSA_NAME_DEF_STMT (vdef);
523 struct type_change_info *tci = (struct type_change_info *) data;
524
525 if (stmt_may_be_vtbl_ptr_store (stmt))
526 {
527 tree type;
528 type = extr_type_from_vtbl_ptr_store (stmt, tci);
529 if (tci->type_maybe_changed
530 && type != tci->known_current_type)
531 tci->multiple_types_encountered = true;
532 tci->known_current_type = type;
533 tci->type_maybe_changed = true;
534 return true;
535 }
536 else
537 return false;
538 }
539
540
541
542 /* Like detect_type_change but with extra argument COMP_TYPE which will become
543 the component type part of new JFUNC of dynamic type change is detected and
544 the new base type is identified. */
545
546 static bool
547 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
548 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
549 {
550 struct type_change_info tci;
551 ao_ref ao;
552
553 gcc_checking_assert (DECL_P (arg)
554 || TREE_CODE (arg) == MEM_REF
555 || handled_component_p (arg));
556 /* Const calls cannot call virtual methods through VMT and so type changes do
557 not matter. */
558 if (!flag_devirtualize || !gimple_vuse (call))
559 return false;
560
561 ao_ref_init (&ao, arg);
562 ao.base = base;
563 ao.offset = offset;
564 ao.size = POINTER_SIZE;
565 ao.max_size = ao.size;
566
567 tci.offset = offset;
568 tci.object = get_base_address (arg);
569 tci.known_current_type = NULL_TREE;
570 tci.type_maybe_changed = false;
571 tci.multiple_types_encountered = false;
572
573 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
574 &tci, NULL);
575 if (!tci.type_maybe_changed)
576 return false;
577
578 if (!tci.known_current_type
579 || tci.multiple_types_encountered
580 || offset != 0)
581 jfunc->type = IPA_JF_UNKNOWN;
582 else
583 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
584
585 return true;
586 }
587
588 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
589 looking for assignments to its virtual table pointer. If it is, return true
590 and fill in the jump function JFUNC with relevant type information or set it
591 to unknown. ARG is the object itself (not a pointer to it, unless
592 dereferenced). BASE is the base of the memory access as returned by
593 get_ref_base_and_extent, as is the offset. */
594
595 static bool
596 detect_type_change (tree arg, tree base, gimple call,
597 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
598 {
599 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
600 }
601
602 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
603 SSA name (its dereference will become the base and the offset is assumed to
604 be zero). */
605
606 static bool
607 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
608 {
609 tree comp_type;
610
611 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
612 if (!flag_devirtualize
613 || !POINTER_TYPE_P (TREE_TYPE (arg))
614 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
615 return false;
616
617 comp_type = TREE_TYPE (TREE_TYPE (arg));
618 arg = build2 (MEM_REF, ptr_type_node, arg,
619 build_int_cst (ptr_type_node, 0));
620
621 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
622 }
623
624 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
625 boolean variable pointed to by DATA. */
626
627 static bool
628 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
629 void *data)
630 {
631 bool *b = (bool *) data;
632 *b = true;
633 return true;
634 }
635
636 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
637 a value known not to be modified in this function before reaching the
638 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
639 information about the parameter. */
640
641 static bool
642 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
643 gimple stmt, tree parm_load)
644 {
645 bool modified = false;
646 bitmap *visited_stmts;
647 ao_ref refd;
648
649 if (parm_ainfo && parm_ainfo->parm_modified)
650 return false;
651
652 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
653 ao_ref_init (&refd, parm_load);
654 /* We can cache visited statements only when parm_ainfo is available and when
655 we are looking at a naked load of the whole parameter. */
656 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
657 visited_stmts = NULL;
658 else
659 visited_stmts = &parm_ainfo->parm_visited_statements;
660 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
661 visited_stmts);
662 if (parm_ainfo && modified)
663 parm_ainfo->parm_modified = true;
664 return !modified;
665 }
666
667 /* If STMT is an assignment that loads a value from an parameter declaration,
668 return the index of the parameter in ipa_node_params which has not been
669 modified. Otherwise return -1. */
670
671 static int
672 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
673 struct param_analysis_info *parms_ainfo,
674 gimple stmt)
675 {
676 int index;
677 tree op1;
678
679 if (!gimple_assign_single_p (stmt))
680 return -1;
681
682 op1 = gimple_assign_rhs1 (stmt);
683 if (TREE_CODE (op1) != PARM_DECL)
684 return -1;
685
686 index = ipa_get_param_decl_index_1 (descriptors, op1);
687 if (index < 0
688 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
689 : NULL, stmt, op1))
690 return -1;
691
692 return index;
693 }
694
695 /* Return true if memory reference REF loads data that are known to be
696 unmodified in this function before reaching statement STMT. PARM_AINFO, if
697 non-NULL, is a pointer to a structure containing temporary information about
698 PARM. */
699
700 static bool
701 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
702 gimple stmt, tree ref)
703 {
704 bool modified = false;
705 ao_ref refd;
706
707 gcc_checking_assert (gimple_vuse (stmt));
708 if (parm_ainfo && parm_ainfo->ref_modified)
709 return false;
710
711 ao_ref_init (&refd, ref);
712 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
713 NULL);
714 if (parm_ainfo && modified)
715 parm_ainfo->ref_modified = true;
716 return !modified;
717 }
718
719 /* Return true if the data pointed to by PARM is known to be unmodified in this
720 function before reaching call statement CALL into which it is passed.
721 PARM_AINFO is a pointer to a structure containing temporary information
722 about PARM. */
723
724 static bool
725 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
726 gimple call, tree parm)
727 {
728 bool modified = false;
729 ao_ref refd;
730
731 /* It's unnecessary to calculate anything about memory contnets for a const
732 function because it is not goin to use it. But do not cache the result
733 either. Also, no such calculations for non-pointers. */
734 if (!gimple_vuse (call)
735 || !POINTER_TYPE_P (TREE_TYPE (parm)))
736 return false;
737
738 if (parm_ainfo->pt_modified)
739 return false;
740
741 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
742 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
743 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
744 if (modified)
745 parm_ainfo->pt_modified = true;
746 return !modified;
747 }
748
749 /* Return true if we can prove that OP is a memory reference loading unmodified
750 data from an aggregate passed as a parameter and if the aggregate is passed
751 by reference, that the alias type of the load corresponds to the type of the
752 formal parameter (so that we can rely on this type for TBAA in callers).
753 INFO and PARMS_AINFO describe parameters of the current function (but the
754 latter can be NULL), STMT is the load statement. If function returns true,
755 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
756 within the aggregate and whether it is a load from a value passed by
757 reference respectively. */
758
759 static bool
760 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
761 struct param_analysis_info *parms_ainfo, gimple stmt,
762 tree op, int *index_p, HOST_WIDE_INT *offset_p,
763 bool *by_ref_p)
764 {
765 int index;
766 HOST_WIDE_INT size, max_size;
767 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
768
769 if (max_size == -1 || max_size != size || *offset_p < 0)
770 return false;
771
772 if (DECL_P (base))
773 {
774 int index = ipa_get_param_decl_index_1 (descriptors, base);
775 if (index >= 0
776 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
777 : NULL, stmt, op))
778 {
779 *index_p = index;
780 *by_ref_p = false;
781 return true;
782 }
783 return false;
784 }
785
786 if (TREE_CODE (base) != MEM_REF
787 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
788 || !integer_zerop (TREE_OPERAND (base, 1)))
789 return false;
790
791 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
792 {
793 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
794 index = ipa_get_param_decl_index_1 (descriptors, parm);
795 }
796 else
797 {
798 /* This branch catches situations where a pointer parameter is not a
799 gimple register, for example:
800
801 void hip7(S*) (struct S * p)
802 {
803 void (*<T2e4>) (struct S *) D.1867;
804 struct S * p.1;
805
806 <bb 2>:
807 p.1_1 = p;
808 D.1867_2 = p.1_1->f;
809 D.1867_2 ();
810 gdp = &p;
811 */
812
813 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
814 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
815 }
816
817 if (index >= 0
818 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
819 stmt, op))
820 {
821 *index_p = index;
822 *by_ref_p = true;
823 return true;
824 }
825 return false;
826 }
827
828 /* Just like the previous function, just without the param_analysis_info
829 pointer, for users outside of this file. */
830
831 bool
832 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
833 tree op, int *index_p, HOST_WIDE_INT *offset_p,
834 bool *by_ref_p)
835 {
836 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
837 offset_p, by_ref_p);
838 }
839
840 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
841 of an assignment statement STMT, try to determine whether we are actually
842 handling any of the following cases and construct an appropriate jump
843 function into JFUNC if so:
844
845 1) The passed value is loaded from a formal parameter which is not a gimple
846 register (most probably because it is addressable, the value has to be
847 scalar) and we can guarantee the value has not changed. This case can
848 therefore be described by a simple pass-through jump function. For example:
849
850 foo (int a)
851 {
852 int a.0;
853
854 a.0_2 = a;
855 bar (a.0_2);
856
857 2) The passed value can be described by a simple arithmetic pass-through
858 jump function. E.g.
859
860 foo (int a)
861 {
862 int D.2064;
863
864 D.2064_4 = a.1(D) + 4;
865 bar (D.2064_4);
866
867 This case can also occur in combination of the previous one, e.g.:
868
869 foo (int a, int z)
870 {
871 int a.0;
872 int D.2064;
873
874 a.0_3 = a;
875 D.2064_4 = a.0_3 + 4;
876 foo (D.2064_4);
877
878 3) The passed value is an address of an object within another one (which
879 also passed by reference). Such situations are described by an ancestor
880 jump function and describe situations such as:
881
882 B::foo() (struct B * const this)
883 {
884 struct A * D.1845;
885
886 D.1845_2 = &this_1(D)->D.1748;
887 A::bar (D.1845_2);
888
889 INFO is the structure describing individual parameters access different
890 stages of IPA optimizations. PARMS_AINFO contains the information that is
891 only needed for intraprocedural analysis. */
892
893 static void
894 compute_complex_assign_jump_func (struct ipa_node_params *info,
895 struct param_analysis_info *parms_ainfo,
896 struct ipa_jump_func *jfunc,
897 gimple call, gimple stmt, tree name)
898 {
899 HOST_WIDE_INT offset, size, max_size;
900 tree op1, tc_ssa, base, ssa;
901 int index;
902
903 op1 = gimple_assign_rhs1 (stmt);
904
905 if (TREE_CODE (op1) == SSA_NAME)
906 {
907 if (SSA_NAME_IS_DEFAULT_DEF (op1))
908 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
909 else
910 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
911 SSA_NAME_DEF_STMT (op1));
912 tc_ssa = op1;
913 }
914 else
915 {
916 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
917 tc_ssa = gimple_assign_lhs (stmt);
918 }
919
920 if (index >= 0)
921 {
922 tree op2 = gimple_assign_rhs2 (stmt);
923
924 if (op2)
925 {
926 if (!is_gimple_ip_invariant (op2)
927 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
928 && !useless_type_conversion_p (TREE_TYPE (name),
929 TREE_TYPE (op1))))
930 return;
931
932 ipa_set_jf_arith_pass_through (jfunc, index, op2,
933 gimple_assign_rhs_code (stmt));
934 }
935 else if (gimple_assign_single_p (stmt)
936 && !detect_type_change_ssa (tc_ssa, call, jfunc))
937 {
938 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
939 call, tc_ssa);
940 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
941 }
942 return;
943 }
944
945 if (TREE_CODE (op1) != ADDR_EXPR)
946 return;
947 op1 = TREE_OPERAND (op1, 0);
948 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
949 return;
950 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
951 if (TREE_CODE (base) != MEM_REF
952 /* If this is a varying address, punt. */
953 || max_size == -1
954 || max_size != size)
955 return;
956 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
957 ssa = TREE_OPERAND (base, 0);
958 if (TREE_CODE (ssa) != SSA_NAME
959 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
960 || offset < 0)
961 return;
962
963 /* Dynamic types are changed only in constructors and destructors and */
964 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
965 if (index >= 0
966 && !detect_type_change (op1, base, call, jfunc, offset))
967 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
968 parm_ref_data_pass_through_p (&parms_ainfo[index],
969 call, ssa));
970 }
971
972 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
973 it looks like:
974
975 iftmp.1_3 = &obj_2(D)->D.1762;
976
977 The base of the MEM_REF must be a default definition SSA NAME of a
978 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
979 whole MEM_REF expression is returned and the offset calculated from any
980 handled components and the MEM_REF itself is stored into *OFFSET. The whole
981 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
982
983 static tree
984 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
985 {
986 HOST_WIDE_INT size, max_size;
987 tree expr, parm, obj;
988
989 if (!gimple_assign_single_p (assign))
990 return NULL_TREE;
991 expr = gimple_assign_rhs1 (assign);
992
993 if (TREE_CODE (expr) != ADDR_EXPR)
994 return NULL_TREE;
995 expr = TREE_OPERAND (expr, 0);
996 obj = expr;
997 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
998
999 if (TREE_CODE (expr) != MEM_REF
1000 /* If this is a varying address, punt. */
1001 || max_size == -1
1002 || max_size != size
1003 || *offset < 0)
1004 return NULL_TREE;
1005 parm = TREE_OPERAND (expr, 0);
1006 if (TREE_CODE (parm) != SSA_NAME
1007 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1008 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1009 return NULL_TREE;
1010
1011 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1012 *obj_p = obj;
1013 return expr;
1014 }
1015
1016
1017 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1018 statement PHI, try to find out whether NAME is in fact a
1019 multiple-inheritance typecast from a descendant into an ancestor of a formal
1020 parameter and thus can be described by an ancestor jump function and if so,
1021 write the appropriate function into JFUNC.
1022
1023 Essentially we want to match the following pattern:
1024
1025 if (obj_2(D) != 0B)
1026 goto <bb 3>;
1027 else
1028 goto <bb 4>;
1029
1030 <bb 3>:
1031 iftmp.1_3 = &obj_2(D)->D.1762;
1032
1033 <bb 4>:
1034 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1035 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1036 return D.1879_6; */
1037
1038 static void
1039 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1040 struct param_analysis_info *parms_ainfo,
1041 struct ipa_jump_func *jfunc,
1042 gimple call, gimple phi)
1043 {
1044 HOST_WIDE_INT offset;
1045 gimple assign, cond;
1046 basic_block phi_bb, assign_bb, cond_bb;
1047 tree tmp, parm, expr, obj;
1048 int index, i;
1049
1050 if (gimple_phi_num_args (phi) != 2)
1051 return;
1052
1053 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1054 tmp = PHI_ARG_DEF (phi, 0);
1055 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1056 tmp = PHI_ARG_DEF (phi, 1);
1057 else
1058 return;
1059 if (TREE_CODE (tmp) != SSA_NAME
1060 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1061 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1062 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1063 return;
1064
1065 assign = SSA_NAME_DEF_STMT (tmp);
1066 assign_bb = gimple_bb (assign);
1067 if (!single_pred_p (assign_bb))
1068 return;
1069 expr = get_ancestor_addr_info (assign, &obj, &offset);
1070 if (!expr)
1071 return;
1072 parm = TREE_OPERAND (expr, 0);
1073 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1074 gcc_assert (index >= 0);
1075
1076 cond_bb = single_pred (assign_bb);
1077 cond = last_stmt (cond_bb);
1078 if (!cond
1079 || gimple_code (cond) != GIMPLE_COND
1080 || gimple_cond_code (cond) != NE_EXPR
1081 || gimple_cond_lhs (cond) != parm
1082 || !integer_zerop (gimple_cond_rhs (cond)))
1083 return;
1084
1085 phi_bb = gimple_bb (phi);
1086 for (i = 0; i < 2; i++)
1087 {
1088 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1089 if (pred != assign_bb && pred != cond_bb)
1090 return;
1091 }
1092
1093 if (!detect_type_change (obj, expr, call, jfunc, offset))
1094 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1095 parm_ref_data_pass_through_p (&parms_ainfo[index],
1096 call, parm));
1097 }
1098
1099 /* Given OP which is passed as an actual argument to a called function,
1100 determine if it is possible to construct a KNOWN_TYPE jump function for it
1101 and if so, create one and store it to JFUNC. */
1102
1103 static void
1104 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1105 gimple call)
1106 {
1107 HOST_WIDE_INT offset, size, max_size;
1108 tree base;
1109
1110 if (!flag_devirtualize
1111 || TREE_CODE (op) != ADDR_EXPR
1112 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1113 return;
1114
1115 op = TREE_OPERAND (op, 0);
1116 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1117 if (!DECL_P (base)
1118 || max_size == -1
1119 || max_size != size
1120 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1121 || is_global_var (base))
1122 return;
1123
1124 if (!TYPE_BINFO (TREE_TYPE (base))
1125 || detect_type_change (op, base, call, jfunc, offset))
1126 return;
1127
1128 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1129 }
1130
1131 /* Inspect the given TYPE and return true iff it has the same structure (the
1132 same number of fields of the same types) as a C++ member pointer. If
1133 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1134 corresponding fields there. */
1135
1136 static bool
1137 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1138 {
1139 tree fld;
1140
1141 if (TREE_CODE (type) != RECORD_TYPE)
1142 return false;
1143
1144 fld = TYPE_FIELDS (type);
1145 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1146 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1147 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1148 return false;
1149
1150 if (method_ptr)
1151 *method_ptr = fld;
1152
1153 fld = DECL_CHAIN (fld);
1154 if (!fld || INTEGRAL_TYPE_P (fld)
1155 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1156 return false;
1157 if (delta)
1158 *delta = fld;
1159
1160 if (DECL_CHAIN (fld))
1161 return false;
1162
1163 return true;
1164 }
1165
1166 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1167 return the rhs of its defining statement. Otherwise return RHS as it
1168 is. */
1169
1170 static inline tree
1171 get_ssa_def_if_simple_copy (tree rhs)
1172 {
1173 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1174 {
1175 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1176
1177 if (gimple_assign_single_p (def_stmt))
1178 rhs = gimple_assign_rhs1 (def_stmt);
1179 else
1180 break;
1181 }
1182 return rhs;
1183 }
1184
1185 /* Simple linked list, describing known contents of an aggregate beforere
1186 call. */
1187
1188 struct ipa_known_agg_contents_list
1189 {
1190 /* Offset and size of the described part of the aggregate. */
1191 HOST_WIDE_INT offset, size;
1192 /* Known constant value or NULL if the contents is known to be unknown. */
1193 tree constant;
1194 /* Pointer to the next structure in the list. */
1195 struct ipa_known_agg_contents_list *next;
1196 };
1197
1198 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1199 in ARG is filled in with constant values. ARG can either be an aggregate
1200 expression or a pointer to an aggregate. JFUNC is the jump function into
1201 which the constants are subsequently stored. */
1202
1203 static void
1204 determine_known_aggregate_parts (gimple call, tree arg,
1205 struct ipa_jump_func *jfunc)
1206 {
1207 struct ipa_known_agg_contents_list *list = NULL;
1208 int item_count = 0, const_count = 0;
1209 HOST_WIDE_INT arg_offset, arg_size;
1210 gimple_stmt_iterator gsi;
1211 tree arg_base;
1212 bool check_ref, by_ref;
1213 ao_ref r;
1214
1215 /* The function operates in three stages. First, we prepare check_ref, r,
1216 arg_base and arg_offset based on what is actually passed as an actual
1217 argument. */
1218
1219 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1220 {
1221 by_ref = true;
1222 if (TREE_CODE (arg) == SSA_NAME)
1223 {
1224 tree type_size;
1225 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1226 return;
1227 check_ref = true;
1228 arg_base = arg;
1229 arg_offset = 0;
1230 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1231 arg_size = tree_low_cst (type_size, 1);
1232 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1233 }
1234 else if (TREE_CODE (arg) == ADDR_EXPR)
1235 {
1236 HOST_WIDE_INT arg_max_size;
1237
1238 arg = TREE_OPERAND (arg, 0);
1239 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1240 &arg_max_size);
1241 if (arg_max_size == -1
1242 || arg_max_size != arg_size
1243 || arg_offset < 0)
1244 return;
1245 if (DECL_P (arg_base))
1246 {
1247 tree size;
1248 check_ref = false;
1249 size = build_int_cst (integer_type_node, arg_size);
1250 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1251 }
1252 else
1253 return;
1254 }
1255 else
1256 return;
1257 }
1258 else
1259 {
1260 HOST_WIDE_INT arg_max_size;
1261
1262 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1263
1264 by_ref = false;
1265 check_ref = false;
1266 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1267 &arg_max_size);
1268 if (arg_max_size == -1
1269 || arg_max_size != arg_size
1270 || arg_offset < 0)
1271 return;
1272
1273 ao_ref_init (&r, arg);
1274 }
1275
1276 /* Second stage walks back the BB, looks at individual statements and as long
1277 as it is confident of how the statements affect contents of the
1278 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1279 describing it. */
1280 gsi = gsi_for_stmt (call);
1281 gsi_prev (&gsi);
1282 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1283 {
1284 struct ipa_known_agg_contents_list *n, **p;
1285 gimple stmt = gsi_stmt (gsi);
1286 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1287 tree lhs, rhs, lhs_base;
1288 bool partial_overlap;
1289
1290 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1291 continue;
1292 if (!gimple_assign_single_p (stmt))
1293 break;
1294
1295 lhs = gimple_assign_lhs (stmt);
1296 rhs = gimple_assign_rhs1 (stmt);
1297 if (!is_gimple_reg_type (rhs))
1298 break;
1299
1300 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1301 &lhs_max_size);
1302 if (lhs_max_size == -1
1303 || lhs_max_size != lhs_size
1304 || (lhs_offset < arg_offset
1305 && lhs_offset + lhs_size > arg_offset)
1306 || (lhs_offset < arg_offset + arg_size
1307 && lhs_offset + lhs_size > arg_offset + arg_size))
1308 break;
1309
1310 if (check_ref)
1311 {
1312 if (TREE_CODE (lhs_base) != MEM_REF
1313 || TREE_OPERAND (lhs_base, 0) != arg_base
1314 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1315 break;
1316 }
1317 else if (lhs_base != arg_base)
1318 {
1319 if (DECL_P (lhs_base))
1320 continue;
1321 else
1322 break;
1323 }
1324
1325 if (lhs_offset + lhs_size < arg_offset
1326 || lhs_offset >= (arg_offset + arg_size))
1327 continue;
1328
1329 partial_overlap = false;
1330 p = &list;
1331 while (*p && (*p)->offset < lhs_offset)
1332 {
1333 if ((*p)->offset + (*p)->size > lhs_offset)
1334 {
1335 partial_overlap = true;
1336 break;
1337 }
1338 p = &(*p)->next;
1339 }
1340 if (partial_overlap)
1341 break;
1342 if (*p && (*p)->offset < lhs_offset + lhs_size)
1343 {
1344 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1345 /* We already know this value is subsequently overwritten with
1346 something else. */
1347 continue;
1348 else
1349 /* Otherwise this is a partial overlap which we cannot
1350 represent. */
1351 break;
1352 }
1353
1354 rhs = get_ssa_def_if_simple_copy (rhs);
1355 n = XALLOCA (struct ipa_known_agg_contents_list);
1356 n->size = lhs_size;
1357 n->offset = lhs_offset;
1358 if (is_gimple_ip_invariant (rhs))
1359 {
1360 n->constant = rhs;
1361 const_count++;
1362 }
1363 else
1364 n->constant = NULL_TREE;
1365 n->next = *p;
1366 *p = n;
1367
1368 item_count++;
1369 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1370 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1371 break;
1372 }
1373
1374 /* Third stage just goes over the list and creates an appropriate vector of
1375 ipa_agg_jf_item structures out of it, of sourse only if there are
1376 any known constants to begin with. */
1377
1378 if (const_count)
1379 {
1380 jfunc->agg.by_ref = by_ref;
1381 vec_alloc (jfunc->agg.items, const_count);
1382 while (list)
1383 {
1384 if (list->constant)
1385 {
1386 struct ipa_agg_jf_item item;
1387 item.offset = list->offset - arg_offset;
1388 item.value = prune_expression_for_jf (list->constant);
1389 jfunc->agg.items->quick_push (item);
1390 }
1391 list = list->next;
1392 }
1393 }
1394 }
1395
1396 /* Compute jump function for all arguments of callsite CS and insert the
1397 information in the jump_functions array in the ipa_edge_args corresponding
1398 to this callsite. */
1399
1400 static void
1401 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1402 struct cgraph_edge *cs)
1403 {
1404 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1405 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1406 gimple call = cs->call_stmt;
1407 int n, arg_num = gimple_call_num_args (call);
1408
1409 if (arg_num == 0 || args->jump_functions)
1410 return;
1411 vec_safe_grow_cleared (args->jump_functions, arg_num);
1412
1413 for (n = 0; n < arg_num; n++)
1414 {
1415 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1416 tree arg = gimple_call_arg (call, n);
1417
1418 if (is_gimple_ip_invariant (arg))
1419 ipa_set_jf_constant (jfunc, arg);
1420 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1421 && TREE_CODE (arg) == PARM_DECL)
1422 {
1423 int index = ipa_get_param_decl_index (info, arg);
1424
1425 gcc_assert (index >=0);
1426 /* Aggregate passed by value, check for pass-through, otherwise we
1427 will attempt to fill in aggregate contents later in this
1428 for cycle. */
1429 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1430 {
1431 ipa_set_jf_simple_pass_through (jfunc, index, false);
1432 continue;
1433 }
1434 }
1435 else if (TREE_CODE (arg) == SSA_NAME)
1436 {
1437 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1438 {
1439 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1440 if (index >= 0
1441 && !detect_type_change_ssa (arg, call, jfunc))
1442 {
1443 bool agg_p;
1444 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1445 call, arg);
1446 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1447 }
1448 }
1449 else
1450 {
1451 gimple stmt = SSA_NAME_DEF_STMT (arg);
1452 if (is_gimple_assign (stmt))
1453 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1454 call, stmt, arg);
1455 else if (gimple_code (stmt) == GIMPLE_PHI)
1456 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1457 call, stmt);
1458 }
1459 }
1460 else
1461 compute_known_type_jump_func (arg, jfunc, call);
1462
1463 if ((jfunc->type != IPA_JF_PASS_THROUGH
1464 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1465 && (jfunc->type != IPA_JF_ANCESTOR
1466 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1467 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1468 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1469 determine_known_aggregate_parts (call, arg, jfunc);
1470 }
1471 }
1472
1473 /* Compute jump functions for all edges - both direct and indirect - outgoing
1474 from NODE. Also count the actual arguments in the process. */
1475
1476 static void
1477 ipa_compute_jump_functions (struct cgraph_node *node,
1478 struct param_analysis_info *parms_ainfo)
1479 {
1480 struct cgraph_edge *cs;
1481
1482 for (cs = node->callees; cs; cs = cs->next_callee)
1483 {
1484 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1485 NULL);
1486 /* We do not need to bother analyzing calls to unknown
1487 functions unless they may become known during lto/whopr. */
1488 if (!callee->analyzed && !flag_lto)
1489 continue;
1490 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1491 }
1492
1493 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1494 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1495 }
1496
1497 /* If STMT looks like a statement loading a value from a member pointer formal
1498 parameter, return that parameter and store the offset of the field to
1499 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1500 might be clobbered). If USE_DELTA, then we look for a use of the delta
1501 field rather than the pfn. */
1502
1503 static tree
1504 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1505 HOST_WIDE_INT *offset_p)
1506 {
1507 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1508
1509 if (!gimple_assign_single_p (stmt))
1510 return NULL_TREE;
1511
1512 rhs = gimple_assign_rhs1 (stmt);
1513 if (TREE_CODE (rhs) == COMPONENT_REF)
1514 {
1515 ref_field = TREE_OPERAND (rhs, 1);
1516 rhs = TREE_OPERAND (rhs, 0);
1517 }
1518 else
1519 ref_field = NULL_TREE;
1520 if (TREE_CODE (rhs) != MEM_REF)
1521 return NULL_TREE;
1522 rec = TREE_OPERAND (rhs, 0);
1523 if (TREE_CODE (rec) != ADDR_EXPR)
1524 return NULL_TREE;
1525 rec = TREE_OPERAND (rec, 0);
1526 if (TREE_CODE (rec) != PARM_DECL
1527 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1528 return NULL_TREE;
1529 ref_offset = TREE_OPERAND (rhs, 1);
1530
1531 if (use_delta)
1532 fld = delta_field;
1533 else
1534 fld = ptr_field;
1535 if (offset_p)
1536 *offset_p = int_bit_position (fld);
1537
1538 if (ref_field)
1539 {
1540 if (integer_nonzerop (ref_offset))
1541 return NULL_TREE;
1542 return ref_field == fld ? rec : NULL_TREE;
1543 }
1544 else
1545 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1546 : NULL_TREE;
1547 }
1548
1549 /* Returns true iff T is an SSA_NAME defined by a statement. */
1550
1551 static bool
1552 ipa_is_ssa_with_stmt_def (tree t)
1553 {
1554 if (TREE_CODE (t) == SSA_NAME
1555 && !SSA_NAME_IS_DEFAULT_DEF (t))
1556 return true;
1557 else
1558 return false;
1559 }
1560
1561 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1562 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1563 indirect call graph edge. */
1564
1565 static struct cgraph_edge *
1566 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1567 {
1568 struct cgraph_edge *cs;
1569
1570 cs = cgraph_edge (node, stmt);
1571 cs->indirect_info->param_index = param_index;
1572 cs->indirect_info->offset = 0;
1573 cs->indirect_info->polymorphic = 0;
1574 cs->indirect_info->agg_contents = 0;
1575 return cs;
1576 }
1577
1578 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1579 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1580 intermediate information about each formal parameter. Currently it checks
1581 whether the call calls a pointer that is a formal parameter and if so, the
1582 parameter is marked with the called flag and an indirect call graph edge
1583 describing the call is created. This is very simple for ordinary pointers
1584 represented in SSA but not-so-nice when it comes to member pointers. The
1585 ugly part of this function does nothing more than trying to match the
1586 pattern of such a call. An example of such a pattern is the gimple dump
1587 below, the call is on the last line:
1588
1589 <bb 2>:
1590 f$__delta_5 = f.__delta;
1591 f$__pfn_24 = f.__pfn;
1592
1593 or
1594 <bb 2>:
1595 f$__delta_5 = MEM[(struct *)&f];
1596 f$__pfn_24 = MEM[(struct *)&f + 4B];
1597
1598 and a few lines below:
1599
1600 <bb 5>
1601 D.2496_3 = (int) f$__pfn_24;
1602 D.2497_4 = D.2496_3 & 1;
1603 if (D.2497_4 != 0)
1604 goto <bb 3>;
1605 else
1606 goto <bb 4>;
1607
1608 <bb 6>:
1609 D.2500_7 = (unsigned int) f$__delta_5;
1610 D.2501_8 = &S + D.2500_7;
1611 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1612 D.2503_10 = *D.2502_9;
1613 D.2504_12 = f$__pfn_24 + -1;
1614 D.2505_13 = (unsigned int) D.2504_12;
1615 D.2506_14 = D.2503_10 + D.2505_13;
1616 D.2507_15 = *D.2506_14;
1617 iftmp.11_16 = (String:: *) D.2507_15;
1618
1619 <bb 7>:
1620 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1621 D.2500_19 = (unsigned int) f$__delta_5;
1622 D.2508_20 = &S + D.2500_19;
1623 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1624
1625 Such patterns are results of simple calls to a member pointer:
1626
1627 int doprinting (int (MyString::* f)(int) const)
1628 {
1629 MyString S ("somestring");
1630
1631 return (S.*f)(4);
1632 }
1633
1634 Moreover, the function also looks for called pointers loaded from aggregates
1635 passed by value or reference. */
1636
1637 static void
1638 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1639 struct ipa_node_params *info,
1640 struct param_analysis_info *parms_ainfo,
1641 gimple call, tree target)
1642 {
1643 gimple def;
1644 tree n1, n2;
1645 gimple d1, d2;
1646 tree rec, rec2, cond;
1647 gimple branch;
1648 int index;
1649 basic_block bb, virt_bb, join;
1650 HOST_WIDE_INT offset;
1651 bool by_ref;
1652
1653 if (SSA_NAME_IS_DEFAULT_DEF (target))
1654 {
1655 tree var = SSA_NAME_VAR (target);
1656 index = ipa_get_param_decl_index (info, var);
1657 if (index >= 0)
1658 ipa_note_param_call (node, index, call);
1659 return;
1660 }
1661
1662 def = SSA_NAME_DEF_STMT (target);
1663 if (gimple_assign_single_p (def)
1664 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1665 gimple_assign_rhs1 (def), &index, &offset,
1666 &by_ref))
1667 {
1668 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1669 cs->indirect_info->offset = offset;
1670 cs->indirect_info->agg_contents = 1;
1671 cs->indirect_info->by_ref = by_ref;
1672 return;
1673 }
1674
1675 /* Now we need to try to match the complex pattern of calling a member
1676 pointer. */
1677 if (gimple_code (def) != GIMPLE_PHI
1678 || gimple_phi_num_args (def) != 2
1679 || !POINTER_TYPE_P (TREE_TYPE (target))
1680 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1681 return;
1682
1683 /* First, we need to check whether one of these is a load from a member
1684 pointer that is a parameter to this function. */
1685 n1 = PHI_ARG_DEF (def, 0);
1686 n2 = PHI_ARG_DEF (def, 1);
1687 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1688 return;
1689 d1 = SSA_NAME_DEF_STMT (n1);
1690 d2 = SSA_NAME_DEF_STMT (n2);
1691
1692 join = gimple_bb (def);
1693 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1694 {
1695 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1696 return;
1697
1698 bb = EDGE_PRED (join, 0)->src;
1699 virt_bb = gimple_bb (d2);
1700 }
1701 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1702 {
1703 bb = EDGE_PRED (join, 1)->src;
1704 virt_bb = gimple_bb (d1);
1705 }
1706 else
1707 return;
1708
1709 /* Second, we need to check that the basic blocks are laid out in the way
1710 corresponding to the pattern. */
1711
1712 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1713 || single_pred (virt_bb) != bb
1714 || single_succ (virt_bb) != join)
1715 return;
1716
1717 /* Third, let's see that the branching is done depending on the least
1718 significant bit of the pfn. */
1719
1720 branch = last_stmt (bb);
1721 if (!branch || gimple_code (branch) != GIMPLE_COND)
1722 return;
1723
1724 if ((gimple_cond_code (branch) != NE_EXPR
1725 && gimple_cond_code (branch) != EQ_EXPR)
1726 || !integer_zerop (gimple_cond_rhs (branch)))
1727 return;
1728
1729 cond = gimple_cond_lhs (branch);
1730 if (!ipa_is_ssa_with_stmt_def (cond))
1731 return;
1732
1733 def = SSA_NAME_DEF_STMT (cond);
1734 if (!is_gimple_assign (def)
1735 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1736 || !integer_onep (gimple_assign_rhs2 (def)))
1737 return;
1738
1739 cond = gimple_assign_rhs1 (def);
1740 if (!ipa_is_ssa_with_stmt_def (cond))
1741 return;
1742
1743 def = SSA_NAME_DEF_STMT (cond);
1744
1745 if (is_gimple_assign (def)
1746 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1747 {
1748 cond = gimple_assign_rhs1 (def);
1749 if (!ipa_is_ssa_with_stmt_def (cond))
1750 return;
1751 def = SSA_NAME_DEF_STMT (cond);
1752 }
1753
1754 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1755 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1756 == ptrmemfunc_vbit_in_delta),
1757 NULL);
1758 if (rec != rec2)
1759 return;
1760
1761 index = ipa_get_param_decl_index (info, rec);
1762 if (index >= 0
1763 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1764 {
1765 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1766 cs->indirect_info->offset = offset;
1767 cs->indirect_info->agg_contents = 1;
1768 }
1769
1770 return;
1771 }
1772
1773 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1774 object referenced in the expression is a formal parameter of the caller
1775 (described by INFO), create a call note for the statement. */
1776
1777 static void
1778 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1779 struct ipa_node_params *info, gimple call,
1780 tree target)
1781 {
1782 struct cgraph_edge *cs;
1783 struct cgraph_indirect_call_info *ii;
1784 struct ipa_jump_func jfunc;
1785 tree obj = OBJ_TYPE_REF_OBJECT (target);
1786 int index;
1787 HOST_WIDE_INT anc_offset;
1788
1789 if (!flag_devirtualize)
1790 return;
1791
1792 if (TREE_CODE (obj) != SSA_NAME)
1793 return;
1794
1795 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1796 {
1797 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1798 return;
1799
1800 anc_offset = 0;
1801 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1802 gcc_assert (index >= 0);
1803 if (detect_type_change_ssa (obj, call, &jfunc))
1804 return;
1805 }
1806 else
1807 {
1808 gimple stmt = SSA_NAME_DEF_STMT (obj);
1809 tree expr;
1810
1811 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1812 if (!expr)
1813 return;
1814 index = ipa_get_param_decl_index (info,
1815 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1816 gcc_assert (index >= 0);
1817 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1818 return;
1819 }
1820
1821 cs = ipa_note_param_call (node, index, call);
1822 ii = cs->indirect_info;
1823 ii->offset = anc_offset;
1824 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1825 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1826 ii->polymorphic = 1;
1827 }
1828
1829 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1830 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1831 containing intermediate information about each formal parameter. */
1832
1833 static void
1834 ipa_analyze_call_uses (struct cgraph_node *node,
1835 struct ipa_node_params *info,
1836 struct param_analysis_info *parms_ainfo, gimple call)
1837 {
1838 tree target = gimple_call_fn (call);
1839
1840 if (!target)
1841 return;
1842 if (TREE_CODE (target) == SSA_NAME)
1843 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1844 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1845 ipa_analyze_virtual_call_uses (node, info, call, target);
1846 }
1847
1848
1849 /* Analyze the call statement STMT with respect to formal parameters (described
1850 in INFO) of caller given by NODE. Currently it only checks whether formal
1851 parameters are called. PARMS_AINFO is a pointer to a vector containing
1852 intermediate information about each formal parameter. */
1853
1854 static void
1855 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1856 struct param_analysis_info *parms_ainfo, gimple stmt)
1857 {
1858 if (is_gimple_call (stmt))
1859 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1860 }
1861
1862 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1863 If OP is a parameter declaration, mark it as used in the info structure
1864 passed in DATA. */
1865
1866 static bool
1867 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1868 tree op, void *data)
1869 {
1870 struct ipa_node_params *info = (struct ipa_node_params *) data;
1871
1872 op = get_base_address (op);
1873 if (op
1874 && TREE_CODE (op) == PARM_DECL)
1875 {
1876 int index = ipa_get_param_decl_index (info, op);
1877 gcc_assert (index >= 0);
1878 ipa_set_param_used (info, index, true);
1879 }
1880
1881 return false;
1882 }
1883
1884 /* Scan the function body of NODE and inspect the uses of formal parameters.
1885 Store the findings in various structures of the associated ipa_node_params
1886 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1887 vector containing intermediate information about each formal parameter. */
1888
1889 static void
1890 ipa_analyze_params_uses (struct cgraph_node *node,
1891 struct param_analysis_info *parms_ainfo)
1892 {
1893 tree decl = node->symbol.decl;
1894 basic_block bb;
1895 struct function *func;
1896 gimple_stmt_iterator gsi;
1897 struct ipa_node_params *info = IPA_NODE_REF (node);
1898 int i;
1899
1900 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1901 return;
1902
1903 for (i = 0; i < ipa_get_param_count (info); i++)
1904 {
1905 tree parm = ipa_get_param (info, i);
1906 tree ddef;
1907 /* For SSA regs see if parameter is used. For non-SSA we compute
1908 the flag during modification analysis. */
1909 if (is_gimple_reg (parm)
1910 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1911 parm)) != NULL_TREE
1912 && !has_zero_uses (ddef))
1913 ipa_set_param_used (info, i, true);
1914 }
1915
1916 func = DECL_STRUCT_FUNCTION (decl);
1917 FOR_EACH_BB_FN (bb, func)
1918 {
1919 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1920 {
1921 gimple stmt = gsi_stmt (gsi);
1922
1923 if (is_gimple_debug (stmt))
1924 continue;
1925
1926 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1927 walk_stmt_load_store_addr_ops (stmt, info,
1928 visit_ref_for_mod_analysis,
1929 visit_ref_for_mod_analysis,
1930 visit_ref_for_mod_analysis);
1931 }
1932 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1933 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1934 visit_ref_for_mod_analysis,
1935 visit_ref_for_mod_analysis,
1936 visit_ref_for_mod_analysis);
1937 }
1938
1939 info->uses_analysis_done = 1;
1940 }
1941
1942 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
1943
1944 static void
1945 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
1946 {
1947 int i;
1948
1949 for (i = 0; i < param_count; i++)
1950 {
1951 if (parms_ainfo[i].parm_visited_statements)
1952 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1953 if (parms_ainfo[i].pt_visited_statements)
1954 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1955 }
1956 }
1957
1958 /* Initialize the array describing properties of of formal parameters
1959 of NODE, analyze their uses and compute jump functions associated
1960 with actual arguments of calls from within NODE. */
1961
1962 void
1963 ipa_analyze_node (struct cgraph_node *node)
1964 {
1965 struct ipa_node_params *info;
1966 struct param_analysis_info *parms_ainfo;
1967 int param_count;
1968
1969 ipa_check_create_node_params ();
1970 ipa_check_create_edge_args ();
1971 info = IPA_NODE_REF (node);
1972 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1973 ipa_initialize_node_params (node);
1974
1975 param_count = ipa_get_param_count (info);
1976 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1977 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1978
1979 ipa_analyze_params_uses (node, parms_ainfo);
1980 ipa_compute_jump_functions (node, parms_ainfo);
1981
1982 free_parms_ainfo (parms_ainfo, param_count);
1983 pop_cfun ();
1984 }
1985
1986
1987 /* Update the jump function DST when the call graph edge corresponding to SRC is
1988 is being inlined, knowing that DST is of type ancestor and src of known
1989 type. */
1990
1991 static void
1992 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1993 struct ipa_jump_func *dst)
1994 {
1995 HOST_WIDE_INT combined_offset;
1996 tree combined_type;
1997
1998 combined_offset = ipa_get_jf_known_type_offset (src)
1999 + ipa_get_jf_ancestor_offset (dst);
2000 combined_type = ipa_get_jf_ancestor_type (dst);
2001
2002 ipa_set_jf_known_type (dst, combined_offset,
2003 ipa_get_jf_known_type_base_type (src),
2004 combined_type);
2005 }
2006
2007 /* Update the jump functions associated with call graph edge E when the call
2008 graph edge CS is being inlined, assuming that E->caller is already (possibly
2009 indirectly) inlined into CS->callee and that E has not been inlined. */
2010
2011 static void
2012 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2013 struct cgraph_edge *e)
2014 {
2015 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2016 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2017 int count = ipa_get_cs_argument_count (args);
2018 int i;
2019
2020 for (i = 0; i < count; i++)
2021 {
2022 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2023
2024 if (dst->type == IPA_JF_ANCESTOR)
2025 {
2026 struct ipa_jump_func *src;
2027 int dst_fid = dst->value.ancestor.formal_id;
2028
2029 /* Variable number of arguments can cause havoc if we try to access
2030 one that does not exist in the inlined edge. So make sure we
2031 don't. */
2032 if (dst_fid >= ipa_get_cs_argument_count (top))
2033 {
2034 dst->type = IPA_JF_UNKNOWN;
2035 continue;
2036 }
2037
2038 src = ipa_get_ith_jump_func (top, dst_fid);
2039
2040 if (src->agg.items
2041 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2042 {
2043 struct ipa_agg_jf_item *item;
2044 int j;
2045
2046 /* Currently we do not produce clobber aggregate jump functions,
2047 replace with merging when we do. */
2048 gcc_assert (!dst->agg.items);
2049
2050 dst->agg.items = vec_safe_copy (src->agg.items);
2051 dst->agg.by_ref = src->agg.by_ref;
2052 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2053 item->offset -= dst->value.ancestor.offset;
2054 }
2055
2056 if (src->type == IPA_JF_KNOWN_TYPE)
2057 combine_known_type_and_ancestor_jfs (src, dst);
2058 else if (src->type == IPA_JF_PASS_THROUGH
2059 && src->value.pass_through.operation == NOP_EXPR)
2060 {
2061 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2062 dst->value.ancestor.agg_preserved &=
2063 src->value.pass_through.agg_preserved;
2064 }
2065 else if (src->type == IPA_JF_ANCESTOR)
2066 {
2067 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2068 dst->value.ancestor.offset += src->value.ancestor.offset;
2069 dst->value.ancestor.agg_preserved &=
2070 src->value.ancestor.agg_preserved;
2071 }
2072 else
2073 dst->type = IPA_JF_UNKNOWN;
2074 }
2075 else if (dst->type == IPA_JF_PASS_THROUGH)
2076 {
2077 struct ipa_jump_func *src;
2078 /* We must check range due to calls with variable number of arguments
2079 and we cannot combine jump functions with operations. */
2080 if (dst->value.pass_through.operation == NOP_EXPR
2081 && (dst->value.pass_through.formal_id
2082 < ipa_get_cs_argument_count (top)))
2083 {
2084 bool agg_p;
2085 int dst_fid = dst->value.pass_through.formal_id;
2086 src = ipa_get_ith_jump_func (top, dst_fid);
2087 agg_p = dst->value.pass_through.agg_preserved;
2088
2089 dst->type = src->type;
2090 dst->value = src->value;
2091
2092 if (src->agg.items
2093 && (agg_p || !src->agg.by_ref))
2094 {
2095 /* Currently we do not produce clobber aggregate jump
2096 functions, replace with merging when we do. */
2097 gcc_assert (!dst->agg.items);
2098
2099 dst->agg.by_ref = src->agg.by_ref;
2100 dst->agg.items = vec_safe_copy (src->agg.items);
2101 }
2102
2103 if (!agg_p)
2104 {
2105 if (dst->type == IPA_JF_PASS_THROUGH)
2106 dst->value.pass_through.agg_preserved = false;
2107 else if (dst->type == IPA_JF_ANCESTOR)
2108 dst->value.ancestor.agg_preserved = false;
2109 }
2110 }
2111 else
2112 dst->type = IPA_JF_UNKNOWN;
2113 }
2114 }
2115 }
2116
2117 /* If TARGET is an addr_expr of a function declaration, make it the destination
2118 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2119
2120 struct cgraph_edge *
2121 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2122 {
2123 struct cgraph_node *callee;
2124 struct inline_edge_summary *es = inline_edge_summary (ie);
2125
2126 if (TREE_CODE (target) == ADDR_EXPR)
2127 target = TREE_OPERAND (target, 0);
2128 if (TREE_CODE (target) != FUNCTION_DECL)
2129 return NULL;
2130 callee = cgraph_get_node (target);
2131 if (!callee)
2132 return NULL;
2133 ipa_check_create_node_params ();
2134
2135 /* We can not make edges to inline clones. It is bug that someone removed
2136 the cgraph node too early. */
2137 gcc_assert (!callee->global.inlined_to);
2138
2139 cgraph_make_edge_direct (ie, callee);
2140 es = inline_edge_summary (ie);
2141 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2142 - eni_size_weights.call_cost);
2143 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2144 - eni_time_weights.call_cost);
2145 if (dump_file)
2146 {
2147 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2148 "(%s/%i -> %s/%i), for stmt ",
2149 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2150 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2151 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2152 if (ie->call_stmt)
2153 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2154 else
2155 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2156 }
2157 callee = cgraph_function_or_thunk_node (callee, NULL);
2158
2159 return ie;
2160 }
2161
2162 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2163 return NULL if there is not any. BY_REF specifies whether the value has to
2164 be passed by reference or by value. */
2165
2166 tree
2167 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2168 HOST_WIDE_INT offset, bool by_ref)
2169 {
2170 struct ipa_agg_jf_item *item;
2171 int i;
2172
2173 if (by_ref != agg->by_ref)
2174 return NULL;
2175
2176 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2177 if (item->offset == offset)
2178 {
2179 /* Currently we do not have clobber values, return NULL for them once
2180 we do. */
2181 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2182 return item->value;
2183 }
2184 return NULL;
2185 }
2186
2187 /* Try to find a destination for indirect edge IE that corresponds to a simple
2188 call or a call of a member function pointer and where the destination is a
2189 pointer formal parameter described by jump function JFUNC. If it can be
2190 determined, return the newly direct edge, otherwise return NULL.
2191 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2192
2193 static struct cgraph_edge *
2194 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2195 struct ipa_jump_func *jfunc,
2196 struct ipa_node_params *new_root_info)
2197 {
2198 tree target;
2199
2200 if (ie->indirect_info->agg_contents)
2201 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2202 ie->indirect_info->offset,
2203 ie->indirect_info->by_ref);
2204 else
2205 target = ipa_value_from_jfunc (new_root_info, jfunc);
2206 if (!target)
2207 return NULL;
2208 return ipa_make_edge_direct_to_target (ie, target);
2209 }
2210
2211 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2212 call based on a formal parameter which is described by jump function JFUNC
2213 and if it can be determined, make it direct and return the direct edge.
2214 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2215 are relative to. */
2216
2217 static struct cgraph_edge *
2218 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2219 struct ipa_jump_func *jfunc,
2220 struct ipa_node_params *new_root_info)
2221 {
2222 tree binfo, target;
2223
2224 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2225
2226 if (!binfo || TREE_CODE (binfo) != TREE_BINFO)
2227 return NULL;
2228
2229 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2230 ie->indirect_info->otr_type);
2231 if (binfo)
2232 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2233 binfo);
2234 else
2235 return NULL;
2236
2237 if (target)
2238 return ipa_make_edge_direct_to_target (ie, target);
2239 else
2240 return NULL;
2241 }
2242
2243 /* Update the param called notes associated with NODE when CS is being inlined,
2244 assuming NODE is (potentially indirectly) inlined into CS->callee.
2245 Moreover, if the callee is discovered to be constant, create a new cgraph
2246 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2247 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2248
2249 static bool
2250 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2251 struct cgraph_node *node,
2252 vec<cgraph_edge_p> *new_edges)
2253 {
2254 struct ipa_edge_args *top;
2255 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2256 struct ipa_node_params *new_root_info;
2257 bool res = false;
2258
2259 ipa_check_create_edge_args ();
2260 top = IPA_EDGE_REF (cs);
2261 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2262 ? cs->caller->global.inlined_to
2263 : cs->caller);
2264
2265 for (ie = node->indirect_calls; ie; ie = next_ie)
2266 {
2267 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2268 struct ipa_jump_func *jfunc;
2269 int param_index;
2270
2271 next_ie = ie->next_callee;
2272
2273 if (ici->param_index == -1)
2274 continue;
2275
2276 /* We must check range due to calls with variable number of arguments: */
2277 if (ici->param_index >= ipa_get_cs_argument_count (top))
2278 {
2279 ici->param_index = -1;
2280 continue;
2281 }
2282
2283 param_index = ici->param_index;
2284 jfunc = ipa_get_ith_jump_func (top, param_index);
2285 if (jfunc->type == IPA_JF_PASS_THROUGH
2286 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2287 {
2288 if (ici->agg_contents
2289 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2290 ici->param_index = -1;
2291 else
2292 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2293 }
2294 else if (jfunc->type == IPA_JF_ANCESTOR)
2295 {
2296 if (ici->agg_contents
2297 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2298 ici->param_index = -1;
2299 else
2300 {
2301 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2302 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2303 }
2304 }
2305 else
2306 /* Either we can find a destination for this edge now or never. */
2307 ici->param_index = -1;
2308
2309 if (!flag_indirect_inlining)
2310 continue;
2311
2312 if (ici->polymorphic)
2313 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2314 new_root_info);
2315 else
2316 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2317 new_root_info);
2318
2319 if (new_direct_edge)
2320 {
2321 new_direct_edge->indirect_inlining_edge = 1;
2322 if (new_direct_edge->call_stmt)
2323 new_direct_edge->call_stmt_cannot_inline_p
2324 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2325 new_direct_edge->callee->symbol.decl);
2326 if (new_edges)
2327 {
2328 new_edges->safe_push (new_direct_edge);
2329 top = IPA_EDGE_REF (cs);
2330 res = true;
2331 }
2332 }
2333 }
2334
2335 return res;
2336 }
2337
2338 /* Recursively traverse subtree of NODE (including node) made of inlined
2339 cgraph_edges when CS has been inlined and invoke
2340 update_indirect_edges_after_inlining on all nodes and
2341 update_jump_functions_after_inlining on all non-inlined edges that lead out
2342 of this subtree. Newly discovered indirect edges will be added to
2343 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2344 created. */
2345
2346 static bool
2347 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2348 struct cgraph_node *node,
2349 vec<cgraph_edge_p> *new_edges)
2350 {
2351 struct cgraph_edge *e;
2352 bool res;
2353
2354 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2355
2356 for (e = node->callees; e; e = e->next_callee)
2357 if (!e->inline_failed)
2358 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2359 else
2360 update_jump_functions_after_inlining (cs, e);
2361 for (e = node->indirect_calls; e; e = e->next_callee)
2362 update_jump_functions_after_inlining (cs, e);
2363
2364 return res;
2365 }
2366
2367 /* Update jump functions and call note functions on inlining the call site CS.
2368 CS is expected to lead to a node already cloned by
2369 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2370 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2371 created. */
2372
2373 bool
2374 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2375 vec<cgraph_edge_p> *new_edges)
2376 {
2377 bool changed;
2378 /* Do nothing if the preparation phase has not been carried out yet
2379 (i.e. during early inlining). */
2380 if (!ipa_node_params_vector.exists ())
2381 return false;
2382 gcc_assert (ipa_edge_args_vector);
2383
2384 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2385
2386 /* We do not keep jump functions of inlined edges up to date. Better to free
2387 them so we do not access them accidentally. */
2388 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2389 return changed;
2390 }
2391
2392 /* Frees all dynamically allocated structures that the argument info points
2393 to. */
2394
2395 void
2396 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2397 {
2398 vec_free (args->jump_functions);
2399 memset (args, 0, sizeof (*args));
2400 }
2401
2402 /* Free all ipa_edge structures. */
2403
2404 void
2405 ipa_free_all_edge_args (void)
2406 {
2407 int i;
2408 struct ipa_edge_args *args;
2409
2410 if (!ipa_edge_args_vector)
2411 return;
2412
2413 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2414 ipa_free_edge_args_substructures (args);
2415
2416 vec_free (ipa_edge_args_vector);
2417 }
2418
2419 /* Frees all dynamically allocated structures that the param info points
2420 to. */
2421
2422 void
2423 ipa_free_node_params_substructures (struct ipa_node_params *info)
2424 {
2425 info->descriptors.release ();
2426 free (info->lattices);
2427 /* Lattice values and their sources are deallocated with their alocation
2428 pool. */
2429 info->known_vals.release ();
2430 memset (info, 0, sizeof (*info));
2431 }
2432
2433 /* Free all ipa_node_params structures. */
2434
2435 void
2436 ipa_free_all_node_params (void)
2437 {
2438 int i;
2439 struct ipa_node_params *info;
2440
2441 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2442 ipa_free_node_params_substructures (info);
2443
2444 ipa_node_params_vector.release ();
2445 }
2446
2447 /* Set the aggregate replacements of NODE to be AGGVALS. */
2448
2449 void
2450 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2451 struct ipa_agg_replacement_value *aggvals)
2452 {
2453 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2454 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2455
2456 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2457 }
2458
2459 /* Hook that is called by cgraph.c when an edge is removed. */
2460
2461 static void
2462 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2463 {
2464 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2465 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2466 return;
2467 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2468 }
2469
2470 /* Hook that is called by cgraph.c when a node is removed. */
2471
2472 static void
2473 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2474 {
2475 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2476 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2477 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2478 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2479 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2480 }
2481
2482 /* Hook that is called by cgraph.c when an edge is duplicated. */
2483
2484 static void
2485 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2486 __attribute__((unused)) void *data)
2487 {
2488 struct ipa_edge_args *old_args, *new_args;
2489 unsigned int i;
2490
2491 ipa_check_create_edge_args ();
2492
2493 old_args = IPA_EDGE_REF (src);
2494 new_args = IPA_EDGE_REF (dst);
2495
2496 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2497
2498 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2499 (*new_args->jump_functions)[i].agg.items
2500 = vec_safe_copy ((*old_args->jump_functions)[i].agg.items);
2501 }
2502
2503 /* Hook that is called by cgraph.c when a node is duplicated. */
2504
2505 static void
2506 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2507 ATTRIBUTE_UNUSED void *data)
2508 {
2509 struct ipa_node_params *old_info, *new_info;
2510 struct ipa_agg_replacement_value *old_av, *new_av;
2511
2512 ipa_check_create_node_params ();
2513 old_info = IPA_NODE_REF (src);
2514 new_info = IPA_NODE_REF (dst);
2515
2516 new_info->descriptors = old_info->descriptors.copy ();
2517 new_info->lattices = NULL;
2518 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2519
2520 new_info->uses_analysis_done = old_info->uses_analysis_done;
2521 new_info->node_enqueued = old_info->node_enqueued;
2522
2523 old_av = ipa_get_agg_replacements_for_node (src);
2524 if (!old_av)
2525 return;
2526
2527 new_av = NULL;
2528 while (old_av)
2529 {
2530 struct ipa_agg_replacement_value *v;
2531
2532 v = ggc_alloc_ipa_agg_replacement_value ();
2533 memcpy (v, old_av, sizeof (*v));
2534 v->next = new_av;
2535 new_av = v;
2536 old_av = old_av->next;
2537 }
2538 ipa_set_node_agg_value_chain (dst, new_av);
2539 }
2540
2541
2542 /* Analyze newly added function into callgraph. */
2543
2544 static void
2545 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2546 {
2547 ipa_analyze_node (node);
2548 }
2549
2550 /* Register our cgraph hooks if they are not already there. */
2551
2552 void
2553 ipa_register_cgraph_hooks (void)
2554 {
2555 if (!edge_removal_hook_holder)
2556 edge_removal_hook_holder =
2557 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2558 if (!node_removal_hook_holder)
2559 node_removal_hook_holder =
2560 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2561 if (!edge_duplication_hook_holder)
2562 edge_duplication_hook_holder =
2563 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2564 if (!node_duplication_hook_holder)
2565 node_duplication_hook_holder =
2566 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2567 function_insertion_hook_holder =
2568 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2569 }
2570
2571 /* Unregister our cgraph hooks if they are not already there. */
2572
2573 static void
2574 ipa_unregister_cgraph_hooks (void)
2575 {
2576 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2577 edge_removal_hook_holder = NULL;
2578 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2579 node_removal_hook_holder = NULL;
2580 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2581 edge_duplication_hook_holder = NULL;
2582 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2583 node_duplication_hook_holder = NULL;
2584 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2585 function_insertion_hook_holder = NULL;
2586 }
2587
2588 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2589 longer needed after ipa-cp. */
2590
2591 void
2592 ipa_free_all_structures_after_ipa_cp (void)
2593 {
2594 if (!optimize)
2595 {
2596 ipa_free_all_edge_args ();
2597 ipa_free_all_node_params ();
2598 free_alloc_pool (ipcp_sources_pool);
2599 free_alloc_pool (ipcp_values_pool);
2600 free_alloc_pool (ipcp_agg_lattice_pool);
2601 ipa_unregister_cgraph_hooks ();
2602 }
2603 }
2604
2605 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2606 longer needed after indirect inlining. */
2607
2608 void
2609 ipa_free_all_structures_after_iinln (void)
2610 {
2611 ipa_free_all_edge_args ();
2612 ipa_free_all_node_params ();
2613 ipa_unregister_cgraph_hooks ();
2614 if (ipcp_sources_pool)
2615 free_alloc_pool (ipcp_sources_pool);
2616 if (ipcp_values_pool)
2617 free_alloc_pool (ipcp_values_pool);
2618 if (ipcp_agg_lattice_pool)
2619 free_alloc_pool (ipcp_agg_lattice_pool);
2620 }
2621
2622 /* Print ipa_tree_map data structures of all functions in the
2623 callgraph to F. */
2624
2625 void
2626 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2627 {
2628 int i, count;
2629 tree temp;
2630 struct ipa_node_params *info;
2631
2632 if (!node->analyzed)
2633 return;
2634 info = IPA_NODE_REF (node);
2635 fprintf (f, " function %s parameter descriptors:\n",
2636 cgraph_node_name (node));
2637 count = ipa_get_param_count (info);
2638 for (i = 0; i < count; i++)
2639 {
2640 temp = ipa_get_param (info, i);
2641 if (TREE_CODE (temp) == PARM_DECL)
2642 fprintf (f, " param %d : %s", i,
2643 (DECL_NAME (temp)
2644 ? (*lang_hooks.decl_printable_name) (temp, 2)
2645 : "(unnamed)"));
2646 if (ipa_is_param_used (info, i))
2647 fprintf (f, " used");
2648 fprintf (f, "\n");
2649 }
2650 }
2651
2652 /* Print ipa_tree_map data structures of all functions in the
2653 callgraph to F. */
2654
2655 void
2656 ipa_print_all_params (FILE * f)
2657 {
2658 struct cgraph_node *node;
2659
2660 fprintf (f, "\nFunction parameters:\n");
2661 FOR_EACH_FUNCTION (node)
2662 ipa_print_node_params (f, node);
2663 }
2664
2665 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2666
2667 vec<tree>
2668 ipa_get_vector_of_formal_parms (tree fndecl)
2669 {
2670 vec<tree> args;
2671 int count;
2672 tree parm;
2673
2674 count = count_formal_params (fndecl);
2675 args.create (count);
2676 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2677 args.quick_push (parm);
2678
2679 return args;
2680 }
2681
2682 /* Return a heap allocated vector containing types of formal parameters of
2683 function type FNTYPE. */
2684
2685 static inline vec<tree>
2686 get_vector_of_formal_parm_types (tree fntype)
2687 {
2688 vec<tree> types;
2689 int count = 0;
2690 tree t;
2691
2692 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2693 count++;
2694
2695 types.create (count);
2696 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2697 types.quick_push (TREE_VALUE (t));
2698
2699 return types;
2700 }
2701
2702 /* Modify the function declaration FNDECL and its type according to the plan in
2703 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2704 to reflect the actual parameters being modified which are determined by the
2705 base_index field. */
2706
2707 void
2708 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2709 const char *synth_parm_prefix)
2710 {
2711 vec<tree> oparms, otypes;
2712 tree orig_type, new_type = NULL;
2713 tree old_arg_types, t, new_arg_types = NULL;
2714 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2715 int i, len = adjustments.length ();
2716 tree new_reversed = NULL;
2717 bool care_for_types, last_parm_void;
2718
2719 if (!synth_parm_prefix)
2720 synth_parm_prefix = "SYNTH";
2721
2722 oparms = ipa_get_vector_of_formal_parms (fndecl);
2723 orig_type = TREE_TYPE (fndecl);
2724 old_arg_types = TYPE_ARG_TYPES (orig_type);
2725
2726 /* The following test is an ugly hack, some functions simply don't have any
2727 arguments in their type. This is probably a bug but well... */
2728 care_for_types = (old_arg_types != NULL_TREE);
2729 if (care_for_types)
2730 {
2731 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2732 == void_type_node);
2733 otypes = get_vector_of_formal_parm_types (orig_type);
2734 if (last_parm_void)
2735 gcc_assert (oparms.length () + 1 == otypes.length ());
2736 else
2737 gcc_assert (oparms.length () == otypes.length ());
2738 }
2739 else
2740 {
2741 last_parm_void = false;
2742 otypes.create (0);
2743 }
2744
2745 for (i = 0; i < len; i++)
2746 {
2747 struct ipa_parm_adjustment *adj;
2748 gcc_assert (link);
2749
2750 adj = &adjustments[i];
2751 parm = oparms[adj->base_index];
2752 adj->base = parm;
2753
2754 if (adj->copy_param)
2755 {
2756 if (care_for_types)
2757 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
2758 new_arg_types);
2759 *link = parm;
2760 link = &DECL_CHAIN (parm);
2761 }
2762 else if (!adj->remove_param)
2763 {
2764 tree new_parm;
2765 tree ptype;
2766
2767 if (adj->by_ref)
2768 ptype = build_pointer_type (adj->type);
2769 else
2770 ptype = adj->type;
2771
2772 if (care_for_types)
2773 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2774
2775 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2776 ptype);
2777 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2778
2779 DECL_ARTIFICIAL (new_parm) = 1;
2780 DECL_ARG_TYPE (new_parm) = ptype;
2781 DECL_CONTEXT (new_parm) = fndecl;
2782 TREE_USED (new_parm) = 1;
2783 DECL_IGNORED_P (new_parm) = 1;
2784 layout_decl (new_parm, 0);
2785
2786 adj->base = parm;
2787 adj->reduction = new_parm;
2788
2789 *link = new_parm;
2790
2791 link = &DECL_CHAIN (new_parm);
2792 }
2793 }
2794
2795 *link = NULL_TREE;
2796
2797 if (care_for_types)
2798 {
2799 new_reversed = nreverse (new_arg_types);
2800 if (last_parm_void)
2801 {
2802 if (new_reversed)
2803 TREE_CHAIN (new_arg_types) = void_list_node;
2804 else
2805 new_reversed = void_list_node;
2806 }
2807 }
2808
2809 /* Use copy_node to preserve as much as possible from original type
2810 (debug info, attribute lists etc.)
2811 Exception is METHOD_TYPEs must have THIS argument.
2812 When we are asked to remove it, we need to build new FUNCTION_TYPE
2813 instead. */
2814 if (TREE_CODE (orig_type) != METHOD_TYPE
2815 || (adjustments[0].copy_param
2816 && adjustments[0].base_index == 0))
2817 {
2818 new_type = build_distinct_type_copy (orig_type);
2819 TYPE_ARG_TYPES (new_type) = new_reversed;
2820 }
2821 else
2822 {
2823 new_type
2824 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2825 new_reversed));
2826 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2827 DECL_VINDEX (fndecl) = NULL_TREE;
2828 }
2829
2830 /* When signature changes, we need to clear builtin info. */
2831 if (DECL_BUILT_IN (fndecl))
2832 {
2833 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2834 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2835 }
2836
2837 /* This is a new type, not a copy of an old type. Need to reassociate
2838 variants. We can handle everything except the main variant lazily. */
2839 t = TYPE_MAIN_VARIANT (orig_type);
2840 if (orig_type != t)
2841 {
2842 TYPE_MAIN_VARIANT (new_type) = t;
2843 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2844 TYPE_NEXT_VARIANT (t) = new_type;
2845 }
2846 else
2847 {
2848 TYPE_MAIN_VARIANT (new_type) = new_type;
2849 TYPE_NEXT_VARIANT (new_type) = NULL;
2850 }
2851
2852 TREE_TYPE (fndecl) = new_type;
2853 DECL_VIRTUAL_P (fndecl) = 0;
2854 otypes.release ();
2855 oparms.release ();
2856 }
2857
2858 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2859 If this is a directly recursive call, CS must be NULL. Otherwise it must
2860 contain the corresponding call graph edge. */
2861
2862 void
2863 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2864 ipa_parm_adjustment_vec adjustments)
2865 {
2866 vec<tree> vargs;
2867 vec<tree, va_gc> **debug_args = NULL;
2868 gimple new_stmt;
2869 gimple_stmt_iterator gsi;
2870 tree callee_decl;
2871 int i, len;
2872
2873 len = adjustments.length ();
2874 vargs.create (len);
2875 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2876
2877 gsi = gsi_for_stmt (stmt);
2878 for (i = 0; i < len; i++)
2879 {
2880 struct ipa_parm_adjustment *adj;
2881
2882 adj = &adjustments[i];
2883
2884 if (adj->copy_param)
2885 {
2886 tree arg = gimple_call_arg (stmt, adj->base_index);
2887
2888 vargs.quick_push (arg);
2889 }
2890 else if (!adj->remove_param)
2891 {
2892 tree expr, base, off;
2893 location_t loc;
2894 unsigned int deref_align;
2895 bool deref_base = false;
2896
2897 /* We create a new parameter out of the value of the old one, we can
2898 do the following kind of transformations:
2899
2900 - A scalar passed by reference is converted to a scalar passed by
2901 value. (adj->by_ref is false and the type of the original
2902 actual argument is a pointer to a scalar).
2903
2904 - A part of an aggregate is passed instead of the whole aggregate.
2905 The part can be passed either by value or by reference, this is
2906 determined by value of adj->by_ref. Moreover, the code below
2907 handles both situations when the original aggregate is passed by
2908 value (its type is not a pointer) and when it is passed by
2909 reference (it is a pointer to an aggregate).
2910
2911 When the new argument is passed by reference (adj->by_ref is true)
2912 it must be a part of an aggregate and therefore we form it by
2913 simply taking the address of a reference inside the original
2914 aggregate. */
2915
2916 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2917 base = gimple_call_arg (stmt, adj->base_index);
2918 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
2919 : EXPR_LOCATION (base);
2920
2921 if (TREE_CODE (base) != ADDR_EXPR
2922 && POINTER_TYPE_P (TREE_TYPE (base)))
2923 off = build_int_cst (adj->alias_ptr_type,
2924 adj->offset / BITS_PER_UNIT);
2925 else
2926 {
2927 HOST_WIDE_INT base_offset;
2928 tree prev_base;
2929 bool addrof;
2930
2931 if (TREE_CODE (base) == ADDR_EXPR)
2932 {
2933 base = TREE_OPERAND (base, 0);
2934 addrof = true;
2935 }
2936 else
2937 addrof = false;
2938 prev_base = base;
2939 base = get_addr_base_and_unit_offset (base, &base_offset);
2940 /* Aggregate arguments can have non-invariant addresses. */
2941 if (!base)
2942 {
2943 base = build_fold_addr_expr (prev_base);
2944 off = build_int_cst (adj->alias_ptr_type,
2945 adj->offset / BITS_PER_UNIT);
2946 }
2947 else if (TREE_CODE (base) == MEM_REF)
2948 {
2949 if (!addrof)
2950 {
2951 deref_base = true;
2952 deref_align = TYPE_ALIGN (TREE_TYPE (base));
2953 }
2954 off = build_int_cst (adj->alias_ptr_type,
2955 base_offset
2956 + adj->offset / BITS_PER_UNIT);
2957 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2958 off);
2959 base = TREE_OPERAND (base, 0);
2960 }
2961 else
2962 {
2963 off = build_int_cst (adj->alias_ptr_type,
2964 base_offset
2965 + adj->offset / BITS_PER_UNIT);
2966 base = build_fold_addr_expr (base);
2967 }
2968 }
2969
2970 if (!adj->by_ref)
2971 {
2972 tree type = adj->type;
2973 unsigned int align;
2974 unsigned HOST_WIDE_INT misalign;
2975
2976 if (deref_base)
2977 {
2978 align = deref_align;
2979 misalign = 0;
2980 }
2981 else
2982 {
2983 get_pointer_alignment_1 (base, &align, &misalign);
2984 if (TYPE_ALIGN (type) > align)
2985 align = TYPE_ALIGN (type);
2986 }
2987 misalign += (tree_to_double_int (off)
2988 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2989 * BITS_PER_UNIT);
2990 misalign = misalign & (align - 1);
2991 if (misalign != 0)
2992 align = (misalign & -misalign);
2993 if (align < TYPE_ALIGN (type))
2994 type = build_aligned_type (type, align);
2995 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2996 }
2997 else
2998 {
2999 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3000 expr = build_fold_addr_expr (expr);
3001 }
3002
3003 expr = force_gimple_operand_gsi (&gsi, expr,
3004 adj->by_ref
3005 || is_gimple_reg_type (adj->type),
3006 NULL, true, GSI_SAME_STMT);
3007 vargs.quick_push (expr);
3008 }
3009 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3010 {
3011 unsigned int ix;
3012 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3013 gimple def_temp;
3014
3015 arg = gimple_call_arg (stmt, adj->base_index);
3016 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3017 {
3018 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3019 continue;
3020 arg = fold_convert_loc (gimple_location (stmt),
3021 TREE_TYPE (origin), arg);
3022 }
3023 if (debug_args == NULL)
3024 debug_args = decl_debug_args_insert (callee_decl);
3025 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3026 if (ddecl == origin)
3027 {
3028 ddecl = (**debug_args)[ix + 1];
3029 break;
3030 }
3031 if (ddecl == NULL)
3032 {
3033 ddecl = make_node (DEBUG_EXPR_DECL);
3034 DECL_ARTIFICIAL (ddecl) = 1;
3035 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3036 DECL_MODE (ddecl) = DECL_MODE (origin);
3037
3038 vec_safe_push (*debug_args, origin);
3039 vec_safe_push (*debug_args, ddecl);
3040 }
3041 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3042 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3043 }
3044 }
3045
3046 if (dump_file && (dump_flags & TDF_DETAILS))
3047 {
3048 fprintf (dump_file, "replacing stmt:");
3049 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3050 }
3051
3052 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3053 vargs.release ();
3054 if (gimple_call_lhs (stmt))
3055 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3056
3057 gimple_set_block (new_stmt, gimple_block (stmt));
3058 if (gimple_has_location (stmt))
3059 gimple_set_location (new_stmt, gimple_location (stmt));
3060 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3061 gimple_call_copy_flags (new_stmt, stmt);
3062
3063 if (dump_file && (dump_flags & TDF_DETAILS))
3064 {
3065 fprintf (dump_file, "with stmt:");
3066 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3067 fprintf (dump_file, "\n");
3068 }
3069 gsi_replace (&gsi, new_stmt, true);
3070 if (cs)
3071 cgraph_set_call_stmt (cs, new_stmt);
3072 update_ssa (TODO_update_ssa);
3073 free_dominance_info (CDI_DOMINATORS);
3074 }
3075
3076 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3077
3078 static bool
3079 index_in_adjustments_multiple_times_p (int base_index,
3080 ipa_parm_adjustment_vec adjustments)
3081 {
3082 int i, len = adjustments.length ();
3083 bool one = false;
3084
3085 for (i = 0; i < len; i++)
3086 {
3087 struct ipa_parm_adjustment *adj;
3088 adj = &adjustments[i];
3089
3090 if (adj->base_index == base_index)
3091 {
3092 if (one)
3093 return true;
3094 else
3095 one = true;
3096 }
3097 }
3098 return false;
3099 }
3100
3101
3102 /* Return adjustments that should have the same effect on function parameters
3103 and call arguments as if they were first changed according to adjustments in
3104 INNER and then by adjustments in OUTER. */
3105
3106 ipa_parm_adjustment_vec
3107 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3108 ipa_parm_adjustment_vec outer)
3109 {
3110 int i, outlen = outer.length ();
3111 int inlen = inner.length ();
3112 int removals = 0;
3113 ipa_parm_adjustment_vec adjustments, tmp;
3114
3115 tmp.create (inlen);
3116 for (i = 0; i < inlen; i++)
3117 {
3118 struct ipa_parm_adjustment *n;
3119 n = &inner[i];
3120
3121 if (n->remove_param)
3122 removals++;
3123 else
3124 tmp.quick_push (*n);
3125 }
3126
3127 adjustments.create (outlen + removals);
3128 for (i = 0; i < outlen; i++)
3129 {
3130 struct ipa_parm_adjustment r;
3131 struct ipa_parm_adjustment *out = &outer[i];
3132 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3133
3134 memset (&r, 0, sizeof (r));
3135 gcc_assert (!in->remove_param);
3136 if (out->remove_param)
3137 {
3138 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3139 {
3140 r.remove_param = true;
3141 adjustments.quick_push (r);
3142 }
3143 continue;
3144 }
3145
3146 r.base_index = in->base_index;
3147 r.type = out->type;
3148
3149 /* FIXME: Create nonlocal value too. */
3150
3151 if (in->copy_param && out->copy_param)
3152 r.copy_param = true;
3153 else if (in->copy_param)
3154 r.offset = out->offset;
3155 else if (out->copy_param)
3156 r.offset = in->offset;
3157 else
3158 r.offset = in->offset + out->offset;
3159 adjustments.quick_push (r);
3160 }
3161
3162 for (i = 0; i < inlen; i++)
3163 {
3164 struct ipa_parm_adjustment *n = &inner[i];
3165
3166 if (n->remove_param)
3167 adjustments.quick_push (*n);
3168 }
3169
3170 tmp.release ();
3171 return adjustments;
3172 }
3173
3174 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3175 friendly way, assuming they are meant to be applied to FNDECL. */
3176
3177 void
3178 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3179 tree fndecl)
3180 {
3181 int i, len = adjustments.length ();
3182 bool first = true;
3183 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3184
3185 fprintf (file, "IPA param adjustments: ");
3186 for (i = 0; i < len; i++)
3187 {
3188 struct ipa_parm_adjustment *adj;
3189 adj = &adjustments[i];
3190
3191 if (!first)
3192 fprintf (file, " ");
3193 else
3194 first = false;
3195
3196 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3197 print_generic_expr (file, parms[adj->base_index], 0);
3198 if (adj->base)
3199 {
3200 fprintf (file, ", base: ");
3201 print_generic_expr (file, adj->base, 0);
3202 }
3203 if (adj->reduction)
3204 {
3205 fprintf (file, ", reduction: ");
3206 print_generic_expr (file, adj->reduction, 0);
3207 }
3208 if (adj->new_ssa_base)
3209 {
3210 fprintf (file, ", new_ssa_base: ");
3211 print_generic_expr (file, adj->new_ssa_base, 0);
3212 }
3213
3214 if (adj->copy_param)
3215 fprintf (file, ", copy_param");
3216 else if (adj->remove_param)
3217 fprintf (file, ", remove_param");
3218 else
3219 fprintf (file, ", offset %li", (long) adj->offset);
3220 if (adj->by_ref)
3221 fprintf (file, ", by_ref");
3222 print_node_brief (file, ", type: ", adj->type, 0);
3223 fprintf (file, "\n");
3224 }
3225 parms.release ();
3226 }
3227
3228 /* Dump the AV linked list. */
3229
3230 void
3231 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3232 {
3233 bool comma = false;
3234 fprintf (f, " Aggregate replacements:");
3235 for (; av; av = av->next)
3236 {
3237 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3238 av->index, av->offset);
3239 print_generic_expr (f, av->value, 0);
3240 comma = true;
3241 }
3242 fprintf (f, "\n");
3243 }
3244
3245 /* Stream out jump function JUMP_FUNC to OB. */
3246
3247 static void
3248 ipa_write_jump_function (struct output_block *ob,
3249 struct ipa_jump_func *jump_func)
3250 {
3251 struct ipa_agg_jf_item *item;
3252 struct bitpack_d bp;
3253 int i, count;
3254
3255 streamer_write_uhwi (ob, jump_func->type);
3256 switch (jump_func->type)
3257 {
3258 case IPA_JF_UNKNOWN:
3259 break;
3260 case IPA_JF_KNOWN_TYPE:
3261 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3262 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3263 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3264 break;
3265 case IPA_JF_CONST:
3266 gcc_assert (
3267 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3268 stream_write_tree (ob, jump_func->value.constant, true);
3269 break;
3270 case IPA_JF_PASS_THROUGH:
3271 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3272 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3273 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3274 bp = bitpack_create (ob->main_stream);
3275 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3276 streamer_write_bitpack (&bp);
3277 break;
3278 case IPA_JF_ANCESTOR:
3279 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3280 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3281 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3282 bp = bitpack_create (ob->main_stream);
3283 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3284 streamer_write_bitpack (&bp);
3285 break;
3286 }
3287
3288 count = vec_safe_length (jump_func->agg.items);
3289 streamer_write_uhwi (ob, count);
3290 if (count)
3291 {
3292 bp = bitpack_create (ob->main_stream);
3293 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3294 streamer_write_bitpack (&bp);
3295 }
3296
3297 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3298 {
3299 streamer_write_uhwi (ob, item->offset);
3300 stream_write_tree (ob, item->value, true);
3301 }
3302 }
3303
3304 /* Read in jump function JUMP_FUNC from IB. */
3305
3306 static void
3307 ipa_read_jump_function (struct lto_input_block *ib,
3308 struct ipa_jump_func *jump_func,
3309 struct data_in *data_in)
3310 {
3311 struct bitpack_d bp;
3312 int i, count;
3313
3314 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3315 switch (jump_func->type)
3316 {
3317 case IPA_JF_UNKNOWN:
3318 break;
3319 case IPA_JF_KNOWN_TYPE:
3320 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3321 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3322 jump_func->value.known_type.component_type = stream_read_tree (ib,
3323 data_in);
3324 break;
3325 case IPA_JF_CONST:
3326 jump_func->value.constant = stream_read_tree (ib, data_in);
3327 break;
3328 case IPA_JF_PASS_THROUGH:
3329 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3330 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3331 jump_func->value.pass_through.operation
3332 = (enum tree_code) streamer_read_uhwi (ib);
3333 bp = streamer_read_bitpack (ib);
3334 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3335 break;
3336 case IPA_JF_ANCESTOR:
3337 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3338 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3339 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3340 bp = streamer_read_bitpack (ib);
3341 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3342 break;
3343 }
3344
3345 count = streamer_read_uhwi (ib);
3346 vec_alloc (jump_func->agg.items, count);
3347 if (count)
3348 {
3349 bp = streamer_read_bitpack (ib);
3350 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3351 }
3352 for (i = 0; i < count; i++)
3353 {
3354 struct ipa_agg_jf_item item;
3355 item.offset = streamer_read_uhwi (ib);
3356 item.value = stream_read_tree (ib, data_in);
3357 jump_func->agg.items->quick_push (item);
3358 }
3359 }
3360
3361 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3362 relevant to indirect inlining to OB. */
3363
3364 static void
3365 ipa_write_indirect_edge_info (struct output_block *ob,
3366 struct cgraph_edge *cs)
3367 {
3368 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3369 struct bitpack_d bp;
3370
3371 streamer_write_hwi (ob, ii->param_index);
3372 streamer_write_hwi (ob, ii->offset);
3373 bp = bitpack_create (ob->main_stream);
3374 bp_pack_value (&bp, ii->polymorphic, 1);
3375 bp_pack_value (&bp, ii->agg_contents, 1);
3376 bp_pack_value (&bp, ii->by_ref, 1);
3377 streamer_write_bitpack (&bp);
3378
3379 if (ii->polymorphic)
3380 {
3381 streamer_write_hwi (ob, ii->otr_token);
3382 stream_write_tree (ob, ii->otr_type, true);
3383 }
3384 }
3385
3386 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3387 relevant to indirect inlining from IB. */
3388
3389 static void
3390 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3391 struct data_in *data_in ATTRIBUTE_UNUSED,
3392 struct cgraph_edge *cs)
3393 {
3394 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3395 struct bitpack_d bp;
3396
3397 ii->param_index = (int) streamer_read_hwi (ib);
3398 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3399 bp = streamer_read_bitpack (ib);
3400 ii->polymorphic = bp_unpack_value (&bp, 1);
3401 ii->agg_contents = bp_unpack_value (&bp, 1);
3402 ii->by_ref = bp_unpack_value (&bp, 1);
3403 if (ii->polymorphic)
3404 {
3405 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3406 ii->otr_type = stream_read_tree (ib, data_in);
3407 }
3408 }
3409
3410 /* Stream out NODE info to OB. */
3411
3412 static void
3413 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3414 {
3415 int node_ref;
3416 lto_symtab_encoder_t encoder;
3417 struct ipa_node_params *info = IPA_NODE_REF (node);
3418 int j;
3419 struct cgraph_edge *e;
3420 struct bitpack_d bp;
3421
3422 encoder = ob->decl_state->symtab_node_encoder;
3423 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3424 streamer_write_uhwi (ob, node_ref);
3425
3426 bp = bitpack_create (ob->main_stream);
3427 gcc_assert (info->uses_analysis_done
3428 || ipa_get_param_count (info) == 0);
3429 gcc_assert (!info->node_enqueued);
3430 gcc_assert (!info->ipcp_orig_node);
3431 for (j = 0; j < ipa_get_param_count (info); j++)
3432 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3433 streamer_write_bitpack (&bp);
3434 for (e = node->callees; e; e = e->next_callee)
3435 {
3436 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3437
3438 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3439 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3440 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3441 }
3442 for (e = node->indirect_calls; e; e = e->next_callee)
3443 {
3444 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3445
3446 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3447 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3448 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3449 ipa_write_indirect_edge_info (ob, e);
3450 }
3451 }
3452
3453 /* Stream in NODE info from IB. */
3454
3455 static void
3456 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3457 struct data_in *data_in)
3458 {
3459 struct ipa_node_params *info = IPA_NODE_REF (node);
3460 int k;
3461 struct cgraph_edge *e;
3462 struct bitpack_d bp;
3463
3464 ipa_initialize_node_params (node);
3465
3466 bp = streamer_read_bitpack (ib);
3467 if (ipa_get_param_count (info) != 0)
3468 info->uses_analysis_done = true;
3469 info->node_enqueued = false;
3470 for (k = 0; k < ipa_get_param_count (info); k++)
3471 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3472 for (e = node->callees; e; e = e->next_callee)
3473 {
3474 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3475 int count = streamer_read_uhwi (ib);
3476
3477 if (!count)
3478 continue;
3479 vec_safe_grow_cleared (args->jump_functions, count);
3480
3481 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3482 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3483 }
3484 for (e = node->indirect_calls; e; e = e->next_callee)
3485 {
3486 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3487 int count = streamer_read_uhwi (ib);
3488
3489 if (count)
3490 {
3491 vec_safe_grow_cleared (args->jump_functions, count);
3492 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3493 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3494 data_in);
3495 }
3496 ipa_read_indirect_edge_info (ib, data_in, e);
3497 }
3498 }
3499
3500 /* Write jump functions for nodes in SET. */
3501
3502 void
3503 ipa_prop_write_jump_functions (void)
3504 {
3505 struct cgraph_node *node;
3506 struct output_block *ob;
3507 unsigned int count = 0;
3508 lto_symtab_encoder_iterator lsei;
3509 lto_symtab_encoder_t encoder;
3510
3511
3512 if (!ipa_node_params_vector.exists ())
3513 return;
3514
3515 ob = create_output_block (LTO_section_jump_functions);
3516 encoder = ob->decl_state->symtab_node_encoder;
3517 ob->cgraph_node = NULL;
3518 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3519 lsei_next_function_in_partition (&lsei))
3520 {
3521 node = lsei_cgraph_node (lsei);
3522 if (cgraph_function_with_gimple_body_p (node)
3523 && IPA_NODE_REF (node) != NULL)
3524 count++;
3525 }
3526
3527 streamer_write_uhwi (ob, count);
3528
3529 /* Process all of the functions. */
3530 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3531 lsei_next_function_in_partition (&lsei))
3532 {
3533 node = lsei_cgraph_node (lsei);
3534 if (cgraph_function_with_gimple_body_p (node)
3535 && IPA_NODE_REF (node) != NULL)
3536 ipa_write_node_info (ob, node);
3537 }
3538 streamer_write_char_stream (ob->main_stream, 0);
3539 produce_asm (ob, NULL);
3540 destroy_output_block (ob);
3541 }
3542
3543 /* Read section in file FILE_DATA of length LEN with data DATA. */
3544
3545 static void
3546 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3547 size_t len)
3548 {
3549 const struct lto_function_header *header =
3550 (const struct lto_function_header *) data;
3551 const int cfg_offset = sizeof (struct lto_function_header);
3552 const int main_offset = cfg_offset + header->cfg_size;
3553 const int string_offset = main_offset + header->main_size;
3554 struct data_in *data_in;
3555 struct lto_input_block ib_main;
3556 unsigned int i;
3557 unsigned int count;
3558
3559 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3560 header->main_size);
3561
3562 data_in =
3563 lto_data_in_create (file_data, (const char *) data + string_offset,
3564 header->string_size, vNULL);
3565 count = streamer_read_uhwi (&ib_main);
3566
3567 for (i = 0; i < count; i++)
3568 {
3569 unsigned int index;
3570 struct cgraph_node *node;
3571 lto_symtab_encoder_t encoder;
3572
3573 index = streamer_read_uhwi (&ib_main);
3574 encoder = file_data->symtab_node_encoder;
3575 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3576 gcc_assert (node->analyzed);
3577 ipa_read_node_info (&ib_main, node, data_in);
3578 }
3579 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3580 len);
3581 lto_data_in_delete (data_in);
3582 }
3583
3584 /* Read ipcp jump functions. */
3585
3586 void
3587 ipa_prop_read_jump_functions (void)
3588 {
3589 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3590 struct lto_file_decl_data *file_data;
3591 unsigned int j = 0;
3592
3593 ipa_check_create_node_params ();
3594 ipa_check_create_edge_args ();
3595 ipa_register_cgraph_hooks ();
3596
3597 while ((file_data = file_data_vec[j++]))
3598 {
3599 size_t len;
3600 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3601
3602 if (data)
3603 ipa_prop_read_section (file_data, data, len);
3604 }
3605 }
3606
3607 /* After merging units, we can get mismatch in argument counts.
3608 Also decl merging might've rendered parameter lists obsolete.
3609 Also compute called_with_variable_arg info. */
3610
3611 void
3612 ipa_update_after_lto_read (void)
3613 {
3614 struct cgraph_node *node;
3615
3616 ipa_check_create_node_params ();
3617 ipa_check_create_edge_args ();
3618
3619 FOR_EACH_DEFINED_FUNCTION (node)
3620 if (node->analyzed)
3621 ipa_initialize_node_params (node);
3622 }
3623
3624 void
3625 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
3626 {
3627 int node_ref;
3628 unsigned int count = 0;
3629 lto_symtab_encoder_t encoder;
3630 struct ipa_agg_replacement_value *aggvals, *av;
3631
3632 aggvals = ipa_get_agg_replacements_for_node (node);
3633 encoder = ob->decl_state->symtab_node_encoder;
3634 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3635 streamer_write_uhwi (ob, node_ref);
3636
3637 for (av = aggvals; av; av = av->next)
3638 count++;
3639 streamer_write_uhwi (ob, count);
3640
3641 for (av = aggvals; av; av = av->next)
3642 {
3643 streamer_write_uhwi (ob, av->offset);
3644 streamer_write_uhwi (ob, av->index);
3645 stream_write_tree (ob, av->value, true);
3646 }
3647 }
3648
3649 /* Stream in the aggregate value replacement chain for NODE from IB. */
3650
3651 static void
3652 read_agg_replacement_chain (struct lto_input_block *ib,
3653 struct cgraph_node *node,
3654 struct data_in *data_in)
3655 {
3656 struct ipa_agg_replacement_value *aggvals = NULL;
3657 unsigned int count, i;
3658
3659 count = streamer_read_uhwi (ib);
3660 for (i = 0; i <count; i++)
3661 {
3662 struct ipa_agg_replacement_value *av;
3663
3664 av = ggc_alloc_ipa_agg_replacement_value ();
3665 av->offset = streamer_read_uhwi (ib);
3666 av->index = streamer_read_uhwi (ib);
3667 av->value = stream_read_tree (ib, data_in);
3668 av->next = aggvals;
3669 aggvals = av;
3670 }
3671 ipa_set_node_agg_value_chain (node, aggvals);
3672 }
3673
3674 /* Write all aggregate replacement for nodes in set. */
3675
3676 void
3677 ipa_prop_write_all_agg_replacement (void)
3678 {
3679 struct cgraph_node *node;
3680 struct output_block *ob;
3681 unsigned int count = 0;
3682 lto_symtab_encoder_iterator lsei;
3683 lto_symtab_encoder_t encoder;
3684
3685 if (!ipa_node_agg_replacements)
3686 return;
3687
3688 ob = create_output_block (LTO_section_ipcp_transform);
3689 encoder = ob->decl_state->symtab_node_encoder;
3690 ob->cgraph_node = NULL;
3691 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3692 lsei_next_function_in_partition (&lsei))
3693 {
3694 node = lsei_cgraph_node (lsei);
3695 if (cgraph_function_with_gimple_body_p (node)
3696 && ipa_get_agg_replacements_for_node (node) != NULL)
3697 count++;
3698 }
3699
3700 streamer_write_uhwi (ob, count);
3701
3702 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3703 lsei_next_function_in_partition (&lsei))
3704 {
3705 node = lsei_cgraph_node (lsei);
3706 if (cgraph_function_with_gimple_body_p (node)
3707 && ipa_get_agg_replacements_for_node (node) != NULL)
3708 write_agg_replacement_chain (ob, node);
3709 }
3710 streamer_write_char_stream (ob->main_stream, 0);
3711 produce_asm (ob, NULL);
3712 destroy_output_block (ob);
3713 }
3714
3715 /* Read replacements section in file FILE_DATA of length LEN with data
3716 DATA. */
3717
3718 static void
3719 read_replacements_section (struct lto_file_decl_data *file_data,
3720 const char *data,
3721 size_t len)
3722 {
3723 const struct lto_function_header *header =
3724 (const struct lto_function_header *) data;
3725 const int cfg_offset = sizeof (struct lto_function_header);
3726 const int main_offset = cfg_offset + header->cfg_size;
3727 const int string_offset = main_offset + header->main_size;
3728 struct data_in *data_in;
3729 struct lto_input_block ib_main;
3730 unsigned int i;
3731 unsigned int count;
3732
3733 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3734 header->main_size);
3735
3736 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
3737 header->string_size, vNULL);
3738 count = streamer_read_uhwi (&ib_main);
3739
3740 for (i = 0; i < count; i++)
3741 {
3742 unsigned int index;
3743 struct cgraph_node *node;
3744 lto_symtab_encoder_t encoder;
3745
3746 index = streamer_read_uhwi (&ib_main);
3747 encoder = file_data->symtab_node_encoder;
3748 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3749 gcc_assert (node->analyzed);
3750 read_agg_replacement_chain (&ib_main, node, data_in);
3751 }
3752 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3753 len);
3754 lto_data_in_delete (data_in);
3755 }
3756
3757 /* Read IPA-CP aggregate replacements. */
3758
3759 void
3760 ipa_prop_read_all_agg_replacement (void)
3761 {
3762 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3763 struct lto_file_decl_data *file_data;
3764 unsigned int j = 0;
3765
3766 while ((file_data = file_data_vec[j++]))
3767 {
3768 size_t len;
3769 const char *data = lto_get_section_data (file_data,
3770 LTO_section_ipcp_transform,
3771 NULL, &len);
3772 if (data)
3773 read_replacements_section (file_data, data, len);
3774 }
3775 }
3776
3777 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
3778 NODE. */
3779
3780 static void
3781 adjust_agg_replacement_values (struct cgraph_node *node,
3782 struct ipa_agg_replacement_value *aggval)
3783 {
3784 struct ipa_agg_replacement_value *v;
3785 int i, c = 0, d = 0, *adj;
3786
3787 if (!node->clone.combined_args_to_skip)
3788 return;
3789
3790 for (v = aggval; v; v = v->next)
3791 {
3792 gcc_assert (v->index >= 0);
3793 if (c < v->index)
3794 c = v->index;
3795 }
3796 c++;
3797
3798 adj = XALLOCAVEC (int, c);
3799 for (i = 0; i < c; i++)
3800 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
3801 {
3802 adj[i] = -1;
3803 d++;
3804 }
3805 else
3806 adj[i] = i - d;
3807
3808 for (v = aggval; v; v = v->next)
3809 v->index = adj[v->index];
3810 }
3811
3812
3813 /* Function body transformation phase. */
3814
3815 unsigned int
3816 ipcp_transform_function (struct cgraph_node *node)
3817 {
3818 vec<ipa_param_descriptor_t> descriptors = vNULL;
3819 struct param_analysis_info *parms_ainfo;
3820 struct ipa_agg_replacement_value *aggval;
3821 gimple_stmt_iterator gsi;
3822 basic_block bb;
3823 int param_count;
3824 bool cfg_changed = false, something_changed = false;
3825
3826 gcc_checking_assert (cfun);
3827 gcc_checking_assert (current_function_decl);
3828
3829 if (dump_file)
3830 fprintf (dump_file, "Modification phase of node %s/%i\n",
3831 cgraph_node_name (node), node->uid);
3832
3833 aggval = ipa_get_agg_replacements_for_node (node);
3834 if (!aggval)
3835 return 0;
3836 param_count = count_formal_params (node->symbol.decl);
3837 if (param_count == 0)
3838 return 0;
3839 adjust_agg_replacement_values (node, aggval);
3840 if (dump_file)
3841 ipa_dump_agg_replacement_values (dump_file, aggval);
3842 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
3843 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
3844 descriptors.safe_grow_cleared (param_count);
3845 ipa_populate_param_decls (node, descriptors);
3846
3847 FOR_EACH_BB (bb)
3848 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3849 {
3850 struct ipa_agg_replacement_value *v;
3851 gimple stmt = gsi_stmt (gsi);
3852 tree rhs, val, t;
3853 HOST_WIDE_INT offset;
3854 int index;
3855 bool by_ref, vce;
3856
3857 if (!gimple_assign_load_p (stmt))
3858 continue;
3859 rhs = gimple_assign_rhs1 (stmt);
3860 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
3861 continue;
3862
3863 vce = false;
3864 t = rhs;
3865 while (handled_component_p (t))
3866 {
3867 /* V_C_E can do things like convert an array of integers to one
3868 bigger integer and similar things we do not handle below. */
3869 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
3870 {
3871 vce = true;
3872 break;
3873 }
3874 t = TREE_OPERAND (t, 0);
3875 }
3876 if (vce)
3877 continue;
3878
3879 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3880 rhs, &index, &offset, &by_ref))
3881 continue;
3882 for (v = aggval; v; v = v->next)
3883 if (v->index == index
3884 && v->offset == offset)
3885 break;
3886 if (!v)
3887 continue;
3888
3889 gcc_checking_assert (is_gimple_ip_invariant (v->value));
3890 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
3891 {
3892 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
3893 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
3894 else if (TYPE_SIZE (TREE_TYPE (rhs))
3895 == TYPE_SIZE (TREE_TYPE (v->value)))
3896 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
3897 else
3898 {
3899 if (dump_file)
3900 {
3901 fprintf (dump_file, " const ");
3902 print_generic_expr (dump_file, v->value, 0);
3903 fprintf (dump_file, " can't be converted to type of ");
3904 print_generic_expr (dump_file, rhs, 0);
3905 fprintf (dump_file, "\n");
3906 }
3907 continue;
3908 }
3909 }
3910 else
3911 val = v->value;
3912
3913 if (dump_file && (dump_flags & TDF_DETAILS))
3914 {
3915 fprintf (dump_file, "Modifying stmt:\n ");
3916 print_gimple_stmt (dump_file, stmt, 0, 0);
3917 }
3918 gimple_assign_set_rhs_from_tree (&gsi, val);
3919 update_stmt (stmt);
3920
3921 if (dump_file && (dump_flags & TDF_DETAILS))
3922 {
3923 fprintf (dump_file, "into:\n ");
3924 print_gimple_stmt (dump_file, stmt, 0, 0);
3925 fprintf (dump_file, "\n");
3926 }
3927
3928 something_changed = true;
3929 if (maybe_clean_eh_stmt (stmt)
3930 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3931 cfg_changed = true;
3932 }
3933
3934 (*ipa_node_agg_replacements)[node->uid] = NULL;
3935 free_parms_ainfo (parms_ainfo, param_count);
3936 descriptors.release ();
3937
3938 if (!something_changed)
3939 return 0;
3940 else if (cfg_changed)
3941 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
3942 else
3943 return TODO_update_ssa_only_virtuals;
3944 }