re PR fortran/55618 (Failures with ISO_Varying_String test suite)
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 vec<ipa_node_params_t> ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 static int
70 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
71 {
72 int i, count;
73
74 count = descriptors.length ();
75 for (i = 0; i < count; i++)
76 if (descriptors[i].decl == ptree)
77 return i;
78
79 return -1;
80 }
81
82 /* Return index of the formal whose tree is PTREE in function which corresponds
83 to INFO. */
84
85 int
86 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
87 {
88 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
89 }
90
91 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
92 NODE. */
93
94 static void
95 ipa_populate_param_decls (struct cgraph_node *node,
96 vec<ipa_param_descriptor_t> &descriptors)
97 {
98 tree fndecl;
99 tree fnargs;
100 tree parm;
101 int param_num;
102
103 fndecl = node->symbol.decl;
104 fnargs = DECL_ARGUMENTS (fndecl);
105 param_num = 0;
106 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
107 {
108 descriptors[param_num].decl = parm;
109 param_num++;
110 }
111 }
112
113 /* Return how many formal parameters FNDECL has. */
114
115 static inline int
116 count_formal_params (tree fndecl)
117 {
118 tree parm;
119 int count = 0;
120
121 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
122 count++;
123
124 return count;
125 }
126
127 /* Initialize the ipa_node_params structure associated with NODE by counting
128 the function parameters, creating the descriptors and populating their
129 param_decls. */
130
131 void
132 ipa_initialize_node_params (struct cgraph_node *node)
133 {
134 struct ipa_node_params *info = IPA_NODE_REF (node);
135
136 if (!info->descriptors.exists ())
137 {
138 int param_count;
139
140 param_count = count_formal_params (node->symbol.decl);
141 if (param_count)
142 {
143 info->descriptors.safe_grow_cleared (param_count);
144 ipa_populate_param_decls (node, info->descriptors);
145 }
146 }
147 }
148
149 /* Print the jump functions associated with call graph edge CS to file F. */
150
151 static void
152 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
153 {
154 int i, count;
155
156 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
157 for (i = 0; i < count; i++)
158 {
159 struct ipa_jump_func *jump_func;
160 enum jump_func_type type;
161
162 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
163 type = jump_func->type;
164
165 fprintf (f, " param %d: ", i);
166 if (type == IPA_JF_UNKNOWN)
167 fprintf (f, "UNKNOWN\n");
168 else if (type == IPA_JF_KNOWN_TYPE)
169 {
170 fprintf (f, "KNOWN TYPE: base ");
171 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
172 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
173 jump_func->value.known_type.offset);
174 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
175 fprintf (f, "\n");
176 }
177 else if (type == IPA_JF_CONST)
178 {
179 tree val = jump_func->value.constant;
180 fprintf (f, "CONST: ");
181 print_generic_expr (f, val, 0);
182 if (TREE_CODE (val) == ADDR_EXPR
183 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
184 {
185 fprintf (f, " -> ");
186 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
187 0);
188 }
189 fprintf (f, "\n");
190 }
191 else if (type == IPA_JF_PASS_THROUGH)
192 {
193 fprintf (f, "PASS THROUGH: ");
194 fprintf (f, "%d, op %s",
195 jump_func->value.pass_through.formal_id,
196 tree_code_name[(int)
197 jump_func->value.pass_through.operation]);
198 if (jump_func->value.pass_through.operation != NOP_EXPR)
199 {
200 fprintf (f, " ");
201 print_generic_expr (f,
202 jump_func->value.pass_through.operand, 0);
203 }
204 if (jump_func->value.pass_through.agg_preserved)
205 fprintf (f, ", agg_preserved");
206 fprintf (f, "\n");
207 }
208 else if (type == IPA_JF_ANCESTOR)
209 {
210 fprintf (f, "ANCESTOR: ");
211 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
212 jump_func->value.ancestor.formal_id,
213 jump_func->value.ancestor.offset);
214 print_generic_expr (f, jump_func->value.ancestor.type, 0);
215 if (jump_func->value.ancestor.agg_preserved)
216 fprintf (f, ", agg_preserved");
217 fprintf (f, "\n");
218 }
219
220 if (jump_func->agg.items)
221 {
222 struct ipa_agg_jf_item *item;
223 int j;
224
225 fprintf (f, " Aggregate passed by %s:\n",
226 jump_func->agg.by_ref ? "reference" : "value");
227 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
228 {
229 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
230 item->offset);
231 if (TYPE_P (item->value))
232 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
233 tree_low_cst (TYPE_SIZE (item->value), 1));
234 else
235 {
236 fprintf (f, "cst: ");
237 print_generic_expr (f, item->value, 0);
238 }
239 fprintf (f, "\n");
240 }
241 }
242 }
243 }
244
245
246 /* Print the jump functions of all arguments on all call graph edges going from
247 NODE to file F. */
248
249 void
250 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
251 {
252 struct cgraph_edge *cs;
253 int i;
254
255 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
256 for (cs = node->callees; cs; cs = cs->next_callee)
257 {
258 if (!ipa_edge_args_info_available_for_edge_p (cs))
259 continue;
260
261 fprintf (f, " callsite %s/%i -> %s/%i : \n",
262 xstrdup (cgraph_node_name (node)), node->uid,
263 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
264 ipa_print_node_jump_functions_for_edge (f, cs);
265 }
266
267 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
268 {
269 if (!ipa_edge_args_info_available_for_edge_p (cs))
270 continue;
271
272 if (cs->call_stmt)
273 {
274 fprintf (f, " indirect callsite %d for stmt ", i);
275 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
276 }
277 else
278 fprintf (f, " indirect callsite %d :\n", i);
279 ipa_print_node_jump_functions_for_edge (f, cs);
280
281 }
282 }
283
284 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
285
286 void
287 ipa_print_all_jump_functions (FILE *f)
288 {
289 struct cgraph_node *node;
290
291 fprintf (f, "\nJump functions:\n");
292 FOR_EACH_FUNCTION (node)
293 {
294 ipa_print_node_jump_functions (f, node);
295 }
296 }
297
298 /* Set JFUNC to be a known type jump function. */
299
300 static void
301 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
302 tree base_type, tree component_type)
303 {
304 jfunc->type = IPA_JF_KNOWN_TYPE;
305 jfunc->value.known_type.offset = offset,
306 jfunc->value.known_type.base_type = base_type;
307 jfunc->value.known_type.component_type = component_type;
308 }
309
310 /* Set JFUNC to be a constant jmp function. */
311
312 static void
313 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
314 {
315 constant = unshare_expr (constant);
316 if (constant && EXPR_P (constant))
317 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
318 jfunc->type = IPA_JF_CONST;
319 jfunc->value.constant = unshare_expr_without_location (constant);
320 }
321
322 /* Set JFUNC to be a simple pass-through jump function. */
323 static void
324 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
325 bool agg_preserved)
326 {
327 jfunc->type = IPA_JF_PASS_THROUGH;
328 jfunc->value.pass_through.operand = NULL_TREE;
329 jfunc->value.pass_through.formal_id = formal_id;
330 jfunc->value.pass_through.operation = NOP_EXPR;
331 jfunc->value.pass_through.agg_preserved = agg_preserved;
332 }
333
334 /* Set JFUNC to be an arithmetic pass through jump function. */
335
336 static void
337 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
338 tree operand, enum tree_code operation)
339 {
340 jfunc->type = IPA_JF_PASS_THROUGH;
341 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
342 jfunc->value.pass_through.formal_id = formal_id;
343 jfunc->value.pass_through.operation = operation;
344 jfunc->value.pass_through.agg_preserved = false;
345 }
346
347 /* Set JFUNC to be an ancestor jump function. */
348
349 static void
350 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
351 tree type, int formal_id, bool agg_preserved)
352 {
353 jfunc->type = IPA_JF_ANCESTOR;
354 jfunc->value.ancestor.formal_id = formal_id;
355 jfunc->value.ancestor.offset = offset;
356 jfunc->value.ancestor.type = type;
357 jfunc->value.ancestor.agg_preserved = agg_preserved;
358 }
359
360 /* Structure to be passed in between detect_type_change and
361 check_stmt_for_type_change. */
362
363 struct type_change_info
364 {
365 /* Offset into the object where there is the virtual method pointer we are
366 looking for. */
367 HOST_WIDE_INT offset;
368 /* The declaration or SSA_NAME pointer of the base that we are checking for
369 type change. */
370 tree object;
371 /* If we actually can tell the type that the object has changed to, it is
372 stored in this field. Otherwise it remains NULL_TREE. */
373 tree known_current_type;
374 /* Set to true if dynamic type change has been detected. */
375 bool type_maybe_changed;
376 /* Set to true if multiple types have been encountered. known_current_type
377 must be disregarded in that case. */
378 bool multiple_types_encountered;
379 };
380
381 /* Return true if STMT can modify a virtual method table pointer.
382
383 This function makes special assumptions about both constructors and
384 destructors which are all the functions that are allowed to alter the VMT
385 pointers. It assumes that destructors begin with assignment into all VMT
386 pointers and that constructors essentially look in the following way:
387
388 1) The very first thing they do is that they call constructors of ancestor
389 sub-objects that have them.
390
391 2) Then VMT pointers of this and all its ancestors is set to new values
392 corresponding to the type corresponding to the constructor.
393
394 3) Only afterwards, other stuff such as constructor of member sub-objects
395 and the code written by the user is run. Only this may include calling
396 virtual functions, directly or indirectly.
397
398 There is no way to call a constructor of an ancestor sub-object in any
399 other way.
400
401 This means that we do not have to care whether constructors get the correct
402 type information because they will always change it (in fact, if we define
403 the type to be given by the VMT pointer, it is undefined).
404
405 The most important fact to derive from the above is that if, for some
406 statement in the section 3, we try to detect whether the dynamic type has
407 changed, we can safely ignore all calls as we examine the function body
408 backwards until we reach statements in section 2 because these calls cannot
409 be ancestor constructors or destructors (if the input is not bogus) and so
410 do not change the dynamic type (this holds true only for automatically
411 allocated objects but at the moment we devirtualize only these). We then
412 must detect that statements in section 2 change the dynamic type and can try
413 to derive the new type. That is enough and we can stop, we will never see
414 the calls into constructors of sub-objects in this code. Therefore we can
415 safely ignore all call statements that we traverse.
416 */
417
418 static bool
419 stmt_may_be_vtbl_ptr_store (gimple stmt)
420 {
421 if (is_gimple_call (stmt))
422 return false;
423 else if (is_gimple_assign (stmt))
424 {
425 tree lhs = gimple_assign_lhs (stmt);
426
427 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
428 {
429 if (flag_strict_aliasing
430 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
431 return false;
432
433 if (TREE_CODE (lhs) == COMPONENT_REF
434 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
435 return false;
436 /* In the future we might want to use get_base_ref_and_offset to find
437 if there is a field corresponding to the offset and if so, proceed
438 almost like if it was a component ref. */
439 }
440 }
441 return true;
442 }
443
444 /* If STMT can be proved to be an assignment to the virtual method table
445 pointer of ANALYZED_OBJ and the type associated with the new table
446 identified, return the type. Otherwise return NULL_TREE. */
447
448 static tree
449 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
450 {
451 HOST_WIDE_INT offset, size, max_size;
452 tree lhs, rhs, base;
453
454 if (!gimple_assign_single_p (stmt))
455 return NULL_TREE;
456
457 lhs = gimple_assign_lhs (stmt);
458 rhs = gimple_assign_rhs1 (stmt);
459 if (TREE_CODE (lhs) != COMPONENT_REF
460 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
461 || TREE_CODE (rhs) != ADDR_EXPR)
462 return NULL_TREE;
463 rhs = get_base_address (TREE_OPERAND (rhs, 0));
464 if (!rhs
465 || TREE_CODE (rhs) != VAR_DECL
466 || !DECL_VIRTUAL_P (rhs))
467 return NULL_TREE;
468
469 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
470 if (offset != tci->offset
471 || size != POINTER_SIZE
472 || max_size != POINTER_SIZE)
473 return NULL_TREE;
474 if (TREE_CODE (base) == MEM_REF)
475 {
476 if (TREE_CODE (tci->object) != MEM_REF
477 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
478 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
479 TREE_OPERAND (base, 1)))
480 return NULL_TREE;
481 }
482 else if (tci->object != base)
483 return NULL_TREE;
484
485 return DECL_CONTEXT (rhs);
486 }
487
488 /* Callback of walk_aliased_vdefs and a helper function for
489 detect_type_change to check whether a particular statement may modify
490 the virtual table pointer, and if possible also determine the new type of
491 the (sub-)object. It stores its result into DATA, which points to a
492 type_change_info structure. */
493
494 static bool
495 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
496 {
497 gimple stmt = SSA_NAME_DEF_STMT (vdef);
498 struct type_change_info *tci = (struct type_change_info *) data;
499
500 if (stmt_may_be_vtbl_ptr_store (stmt))
501 {
502 tree type;
503 type = extr_type_from_vtbl_ptr_store (stmt, tci);
504 if (tci->type_maybe_changed
505 && type != tci->known_current_type)
506 tci->multiple_types_encountered = true;
507 tci->known_current_type = type;
508 tci->type_maybe_changed = true;
509 return true;
510 }
511 else
512 return false;
513 }
514
515
516
517 /* Like detect_type_change but with extra argument COMP_TYPE which will become
518 the component type part of new JFUNC of dynamic type change is detected and
519 the new base type is identified. */
520
521 static bool
522 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
523 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
524 {
525 struct type_change_info tci;
526 ao_ref ao;
527
528 gcc_checking_assert (DECL_P (arg)
529 || TREE_CODE (arg) == MEM_REF
530 || handled_component_p (arg));
531 /* Const calls cannot call virtual methods through VMT and so type changes do
532 not matter. */
533 if (!flag_devirtualize || !gimple_vuse (call))
534 return false;
535
536 ao_ref_init (&ao, arg);
537 ao.base = base;
538 ao.offset = offset;
539 ao.size = POINTER_SIZE;
540 ao.max_size = ao.size;
541
542 tci.offset = offset;
543 tci.object = get_base_address (arg);
544 tci.known_current_type = NULL_TREE;
545 tci.type_maybe_changed = false;
546 tci.multiple_types_encountered = false;
547
548 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
549 &tci, NULL);
550 if (!tci.type_maybe_changed)
551 return false;
552
553 if (!tci.known_current_type
554 || tci.multiple_types_encountered
555 || offset != 0)
556 jfunc->type = IPA_JF_UNKNOWN;
557 else
558 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
559
560 return true;
561 }
562
563 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
564 looking for assignments to its virtual table pointer. If it is, return true
565 and fill in the jump function JFUNC with relevant type information or set it
566 to unknown. ARG is the object itself (not a pointer to it, unless
567 dereferenced). BASE is the base of the memory access as returned by
568 get_ref_base_and_extent, as is the offset. */
569
570 static bool
571 detect_type_change (tree arg, tree base, gimple call,
572 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
573 {
574 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
575 }
576
577 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
578 SSA name (its dereference will become the base and the offset is assumed to
579 be zero). */
580
581 static bool
582 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
583 {
584 tree comp_type;
585
586 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
587 if (!flag_devirtualize
588 || !POINTER_TYPE_P (TREE_TYPE (arg))
589 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
590 return false;
591
592 comp_type = TREE_TYPE (TREE_TYPE (arg));
593 arg = build2 (MEM_REF, ptr_type_node, arg,
594 build_int_cst (ptr_type_node, 0));
595
596 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
597 }
598
599 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
600 boolean variable pointed to by DATA. */
601
602 static bool
603 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
604 void *data)
605 {
606 bool *b = (bool *) data;
607 *b = true;
608 return true;
609 }
610
611 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
612 a value known not to be modified in this function before reaching the
613 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
614 information about the parameter. */
615
616 static bool
617 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
618 gimple stmt, tree parm_load)
619 {
620 bool modified = false;
621 bitmap *visited_stmts;
622 ao_ref refd;
623
624 if (parm_ainfo && parm_ainfo->parm_modified)
625 return false;
626
627 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
628 ao_ref_init (&refd, parm_load);
629 /* We can cache visited statements only when parm_ainfo is available and when
630 we are looking at a naked load of the whole parameter. */
631 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
632 visited_stmts = NULL;
633 else
634 visited_stmts = &parm_ainfo->parm_visited_statements;
635 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
636 visited_stmts);
637 if (parm_ainfo && modified)
638 parm_ainfo->parm_modified = true;
639 return !modified;
640 }
641
642 /* If STMT is an assignment that loads a value from an parameter declaration,
643 return the index of the parameter in ipa_node_params which has not been
644 modified. Otherwise return -1. */
645
646 static int
647 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
648 struct param_analysis_info *parms_ainfo,
649 gimple stmt)
650 {
651 int index;
652 tree op1;
653
654 if (!gimple_assign_single_p (stmt))
655 return -1;
656
657 op1 = gimple_assign_rhs1 (stmt);
658 if (TREE_CODE (op1) != PARM_DECL)
659 return -1;
660
661 index = ipa_get_param_decl_index_1 (descriptors, op1);
662 if (index < 0
663 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
664 : NULL, stmt, op1))
665 return -1;
666
667 return index;
668 }
669
670 /* Return true if memory reference REF loads data that are known to be
671 unmodified in this function before reaching statement STMT. PARM_AINFO, if
672 non-NULL, is a pointer to a structure containing temporary information about
673 PARM. */
674
675 static bool
676 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
677 gimple stmt, tree ref)
678 {
679 bool modified = false;
680 ao_ref refd;
681
682 gcc_checking_assert (gimple_vuse (stmt));
683 if (parm_ainfo && parm_ainfo->ref_modified)
684 return false;
685
686 ao_ref_init (&refd, ref);
687 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
688 NULL);
689 if (parm_ainfo && modified)
690 parm_ainfo->ref_modified = true;
691 return !modified;
692 }
693
694 /* Return true if the data pointed to by PARM is known to be unmodified in this
695 function before reaching call statement CALL into which it is passed.
696 PARM_AINFO is a pointer to a structure containing temporary information
697 about PARM. */
698
699 static bool
700 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
701 gimple call, tree parm)
702 {
703 bool modified = false;
704 ao_ref refd;
705
706 /* It's unnecessary to calculate anything about memory contnets for a const
707 function because it is not goin to use it. But do not cache the result
708 either. Also, no such calculations for non-pointers. */
709 if (!gimple_vuse (call)
710 || !POINTER_TYPE_P (TREE_TYPE (parm)))
711 return false;
712
713 if (parm_ainfo->pt_modified)
714 return false;
715
716 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
717 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
718 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
719 if (modified)
720 parm_ainfo->pt_modified = true;
721 return !modified;
722 }
723
724 /* Return true if we can prove that OP is a memory reference loading unmodified
725 data from an aggregate passed as a parameter and if the aggregate is passed
726 by reference, that the alias type of the load corresponds to the type of the
727 formal parameter (so that we can rely on this type for TBAA in callers).
728 INFO and PARMS_AINFO describe parameters of the current function (but the
729 latter can be NULL), STMT is the load statement. If function returns true,
730 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
731 within the aggregate and whether it is a load from a value passed by
732 reference respectively. */
733
734 static bool
735 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
736 struct param_analysis_info *parms_ainfo, gimple stmt,
737 tree op, int *index_p, HOST_WIDE_INT *offset_p,
738 bool *by_ref_p)
739 {
740 int index;
741 HOST_WIDE_INT size, max_size;
742 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
743
744 if (max_size == -1 || max_size != size || *offset_p < 0)
745 return false;
746
747 if (DECL_P (base))
748 {
749 int index = ipa_get_param_decl_index_1 (descriptors, base);
750 if (index >= 0
751 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
752 : NULL, stmt, op))
753 {
754 *index_p = index;
755 *by_ref_p = false;
756 return true;
757 }
758 return false;
759 }
760
761 if (TREE_CODE (base) != MEM_REF
762 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
763 || !integer_zerop (TREE_OPERAND (base, 1)))
764 return false;
765
766 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
767 {
768 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
769 index = ipa_get_param_decl_index_1 (descriptors, parm);
770 }
771 else
772 {
773 /* This branch catches situations where a pointer parameter is not a
774 gimple register, for example:
775
776 void hip7(S*) (struct S * p)
777 {
778 void (*<T2e4>) (struct S *) D.1867;
779 struct S * p.1;
780
781 <bb 2>:
782 p.1_1 = p;
783 D.1867_2 = p.1_1->f;
784 D.1867_2 ();
785 gdp = &p;
786 */
787
788 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
789 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
790 }
791
792 if (index >= 0
793 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
794 stmt, op))
795 {
796 *index_p = index;
797 *by_ref_p = true;
798 return true;
799 }
800 return false;
801 }
802
803 /* Just like the previous function, just without the param_analysis_info
804 pointer, for users outside of this file. */
805
806 bool
807 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
808 tree op, int *index_p, HOST_WIDE_INT *offset_p,
809 bool *by_ref_p)
810 {
811 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
812 offset_p, by_ref_p);
813 }
814
815 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
816 of an assignment statement STMT, try to determine whether we are actually
817 handling any of the following cases and construct an appropriate jump
818 function into JFUNC if so:
819
820 1) The passed value is loaded from a formal parameter which is not a gimple
821 register (most probably because it is addressable, the value has to be
822 scalar) and we can guarantee the value has not changed. This case can
823 therefore be described by a simple pass-through jump function. For example:
824
825 foo (int a)
826 {
827 int a.0;
828
829 a.0_2 = a;
830 bar (a.0_2);
831
832 2) The passed value can be described by a simple arithmetic pass-through
833 jump function. E.g.
834
835 foo (int a)
836 {
837 int D.2064;
838
839 D.2064_4 = a.1(D) + 4;
840 bar (D.2064_4);
841
842 This case can also occur in combination of the previous one, e.g.:
843
844 foo (int a, int z)
845 {
846 int a.0;
847 int D.2064;
848
849 a.0_3 = a;
850 D.2064_4 = a.0_3 + 4;
851 foo (D.2064_4);
852
853 3) The passed value is an address of an object within another one (which
854 also passed by reference). Such situations are described by an ancestor
855 jump function and describe situations such as:
856
857 B::foo() (struct B * const this)
858 {
859 struct A * D.1845;
860
861 D.1845_2 = &this_1(D)->D.1748;
862 A::bar (D.1845_2);
863
864 INFO is the structure describing individual parameters access different
865 stages of IPA optimizations. PARMS_AINFO contains the information that is
866 only needed for intraprocedural analysis. */
867
868 static void
869 compute_complex_assign_jump_func (struct ipa_node_params *info,
870 struct param_analysis_info *parms_ainfo,
871 struct ipa_jump_func *jfunc,
872 gimple call, gimple stmt, tree name)
873 {
874 HOST_WIDE_INT offset, size, max_size;
875 tree op1, tc_ssa, base, ssa;
876 int index;
877
878 op1 = gimple_assign_rhs1 (stmt);
879
880 if (TREE_CODE (op1) == SSA_NAME)
881 {
882 if (SSA_NAME_IS_DEFAULT_DEF (op1))
883 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
884 else
885 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
886 SSA_NAME_DEF_STMT (op1));
887 tc_ssa = op1;
888 }
889 else
890 {
891 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
892 tc_ssa = gimple_assign_lhs (stmt);
893 }
894
895 if (index >= 0)
896 {
897 tree op2 = gimple_assign_rhs2 (stmt);
898
899 if (op2)
900 {
901 if (!is_gimple_ip_invariant (op2)
902 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
903 && !useless_type_conversion_p (TREE_TYPE (name),
904 TREE_TYPE (op1))))
905 return;
906
907 ipa_set_jf_arith_pass_through (jfunc, index, op2,
908 gimple_assign_rhs_code (stmt));
909 }
910 else if (gimple_assign_single_p (stmt)
911 && !detect_type_change_ssa (tc_ssa, call, jfunc))
912 {
913 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
914 call, tc_ssa);
915 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
916 }
917 return;
918 }
919
920 if (TREE_CODE (op1) != ADDR_EXPR)
921 return;
922 op1 = TREE_OPERAND (op1, 0);
923 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
924 return;
925 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
926 if (TREE_CODE (base) != MEM_REF
927 /* If this is a varying address, punt. */
928 || max_size == -1
929 || max_size != size)
930 return;
931 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
932 ssa = TREE_OPERAND (base, 0);
933 if (TREE_CODE (ssa) != SSA_NAME
934 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
935 || offset < 0)
936 return;
937
938 /* Dynamic types are changed only in constructors and destructors and */
939 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
940 if (index >= 0
941 && !detect_type_change (op1, base, call, jfunc, offset))
942 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
943 parm_ref_data_pass_through_p (&parms_ainfo[index],
944 call, ssa));
945 }
946
947 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
948 it looks like:
949
950 iftmp.1_3 = &obj_2(D)->D.1762;
951
952 The base of the MEM_REF must be a default definition SSA NAME of a
953 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
954 whole MEM_REF expression is returned and the offset calculated from any
955 handled components and the MEM_REF itself is stored into *OFFSET. The whole
956 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
957
958 static tree
959 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
960 {
961 HOST_WIDE_INT size, max_size;
962 tree expr, parm, obj;
963
964 if (!gimple_assign_single_p (assign))
965 return NULL_TREE;
966 expr = gimple_assign_rhs1 (assign);
967
968 if (TREE_CODE (expr) != ADDR_EXPR)
969 return NULL_TREE;
970 expr = TREE_OPERAND (expr, 0);
971 obj = expr;
972 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
973
974 if (TREE_CODE (expr) != MEM_REF
975 /* If this is a varying address, punt. */
976 || max_size == -1
977 || max_size != size
978 || *offset < 0)
979 return NULL_TREE;
980 parm = TREE_OPERAND (expr, 0);
981 if (TREE_CODE (parm) != SSA_NAME
982 || !SSA_NAME_IS_DEFAULT_DEF (parm)
983 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
984 return NULL_TREE;
985
986 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
987 *obj_p = obj;
988 return expr;
989 }
990
991
992 /* Given that an actual argument is an SSA_NAME that is a result of a phi
993 statement PHI, try to find out whether NAME is in fact a
994 multiple-inheritance typecast from a descendant into an ancestor of a formal
995 parameter and thus can be described by an ancestor jump function and if so,
996 write the appropriate function into JFUNC.
997
998 Essentially we want to match the following pattern:
999
1000 if (obj_2(D) != 0B)
1001 goto <bb 3>;
1002 else
1003 goto <bb 4>;
1004
1005 <bb 3>:
1006 iftmp.1_3 = &obj_2(D)->D.1762;
1007
1008 <bb 4>:
1009 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1010 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1011 return D.1879_6; */
1012
1013 static void
1014 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1015 struct param_analysis_info *parms_ainfo,
1016 struct ipa_jump_func *jfunc,
1017 gimple call, gimple phi)
1018 {
1019 HOST_WIDE_INT offset;
1020 gimple assign, cond;
1021 basic_block phi_bb, assign_bb, cond_bb;
1022 tree tmp, parm, expr, obj;
1023 int index, i;
1024
1025 if (gimple_phi_num_args (phi) != 2)
1026 return;
1027
1028 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1029 tmp = PHI_ARG_DEF (phi, 0);
1030 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1031 tmp = PHI_ARG_DEF (phi, 1);
1032 else
1033 return;
1034 if (TREE_CODE (tmp) != SSA_NAME
1035 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1036 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1037 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1038 return;
1039
1040 assign = SSA_NAME_DEF_STMT (tmp);
1041 assign_bb = gimple_bb (assign);
1042 if (!single_pred_p (assign_bb))
1043 return;
1044 expr = get_ancestor_addr_info (assign, &obj, &offset);
1045 if (!expr)
1046 return;
1047 parm = TREE_OPERAND (expr, 0);
1048 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1049 gcc_assert (index >= 0);
1050
1051 cond_bb = single_pred (assign_bb);
1052 cond = last_stmt (cond_bb);
1053 if (!cond
1054 || gimple_code (cond) != GIMPLE_COND
1055 || gimple_cond_code (cond) != NE_EXPR
1056 || gimple_cond_lhs (cond) != parm
1057 || !integer_zerop (gimple_cond_rhs (cond)))
1058 return;
1059
1060 phi_bb = gimple_bb (phi);
1061 for (i = 0; i < 2; i++)
1062 {
1063 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1064 if (pred != assign_bb && pred != cond_bb)
1065 return;
1066 }
1067
1068 if (!detect_type_change (obj, expr, call, jfunc, offset))
1069 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1070 parm_ref_data_pass_through_p (&parms_ainfo[index],
1071 call, parm));
1072 }
1073
1074 /* Given OP which is passed as an actual argument to a called function,
1075 determine if it is possible to construct a KNOWN_TYPE jump function for it
1076 and if so, create one and store it to JFUNC. */
1077
1078 static void
1079 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1080 gimple call)
1081 {
1082 HOST_WIDE_INT offset, size, max_size;
1083 tree base;
1084
1085 if (!flag_devirtualize
1086 || TREE_CODE (op) != ADDR_EXPR
1087 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1088 return;
1089
1090 op = TREE_OPERAND (op, 0);
1091 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1092 if (!DECL_P (base)
1093 || max_size == -1
1094 || max_size != size
1095 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1096 || is_global_var (base))
1097 return;
1098
1099 if (!TYPE_BINFO (TREE_TYPE (base))
1100 || detect_type_change (op, base, call, jfunc, offset))
1101 return;
1102
1103 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1104 }
1105
1106 /* Inspect the given TYPE and return true iff it has the same structure (the
1107 same number of fields of the same types) as a C++ member pointer. If
1108 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1109 corresponding fields there. */
1110
1111 static bool
1112 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1113 {
1114 tree fld;
1115
1116 if (TREE_CODE (type) != RECORD_TYPE)
1117 return false;
1118
1119 fld = TYPE_FIELDS (type);
1120 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1121 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1122 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1123 return false;
1124
1125 if (method_ptr)
1126 *method_ptr = fld;
1127
1128 fld = DECL_CHAIN (fld);
1129 if (!fld || INTEGRAL_TYPE_P (fld)
1130 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1131 return false;
1132 if (delta)
1133 *delta = fld;
1134
1135 if (DECL_CHAIN (fld))
1136 return false;
1137
1138 return true;
1139 }
1140
1141 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1142 return the rhs of its defining statement. Otherwise return RHS as it
1143 is. */
1144
1145 static inline tree
1146 get_ssa_def_if_simple_copy (tree rhs)
1147 {
1148 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1149 {
1150 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1151
1152 if (gimple_assign_single_p (def_stmt))
1153 rhs = gimple_assign_rhs1 (def_stmt);
1154 else
1155 break;
1156 }
1157 return rhs;
1158 }
1159
1160 /* Simple linked list, describing known contents of an aggregate beforere
1161 call. */
1162
1163 struct ipa_known_agg_contents_list
1164 {
1165 /* Offset and size of the described part of the aggregate. */
1166 HOST_WIDE_INT offset, size;
1167 /* Known constant value or NULL if the contents is known to be unknown. */
1168 tree constant;
1169 /* Pointer to the next structure in the list. */
1170 struct ipa_known_agg_contents_list *next;
1171 };
1172
1173 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1174 in ARG is filled in with constant values. ARG can either be an aggregate
1175 expression or a pointer to an aggregate. JFUNC is the jump function into
1176 which the constants are subsequently stored. */
1177
1178 static void
1179 determine_known_aggregate_parts (gimple call, tree arg,
1180 struct ipa_jump_func *jfunc)
1181 {
1182 struct ipa_known_agg_contents_list *list = NULL;
1183 int item_count = 0, const_count = 0;
1184 HOST_WIDE_INT arg_offset, arg_size;
1185 gimple_stmt_iterator gsi;
1186 tree arg_base;
1187 bool check_ref, by_ref;
1188 ao_ref r;
1189
1190 /* The function operates in three stages. First, we prepare check_ref, r,
1191 arg_base and arg_offset based on what is actually passed as an actual
1192 argument. */
1193
1194 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1195 {
1196 by_ref = true;
1197 if (TREE_CODE (arg) == SSA_NAME)
1198 {
1199 tree type_size;
1200 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1201 return;
1202 check_ref = true;
1203 arg_base = arg;
1204 arg_offset = 0;
1205 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1206 arg_size = tree_low_cst (type_size, 1);
1207 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1208 }
1209 else if (TREE_CODE (arg) == ADDR_EXPR)
1210 {
1211 HOST_WIDE_INT arg_max_size;
1212
1213 arg = TREE_OPERAND (arg, 0);
1214 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1215 &arg_max_size);
1216 if (arg_max_size == -1
1217 || arg_max_size != arg_size
1218 || arg_offset < 0)
1219 return;
1220 if (DECL_P (arg_base))
1221 {
1222 tree size;
1223 check_ref = false;
1224 size = build_int_cst (integer_type_node, arg_size);
1225 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1226 }
1227 else
1228 return;
1229 }
1230 else
1231 return;
1232 }
1233 else
1234 {
1235 HOST_WIDE_INT arg_max_size;
1236
1237 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1238
1239 by_ref = false;
1240 check_ref = false;
1241 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1242 &arg_max_size);
1243 if (arg_max_size == -1
1244 || arg_max_size != arg_size
1245 || arg_offset < 0)
1246 return;
1247
1248 ao_ref_init (&r, arg);
1249 }
1250
1251 /* Second stage walks back the BB, looks at individual statements and as long
1252 as it is confident of how the statements affect contents of the
1253 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1254 describing it. */
1255 gsi = gsi_for_stmt (call);
1256 gsi_prev (&gsi);
1257 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1258 {
1259 struct ipa_known_agg_contents_list *n, **p;
1260 gimple stmt = gsi_stmt (gsi);
1261 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1262 tree lhs, rhs, lhs_base;
1263 bool partial_overlap;
1264
1265 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1266 continue;
1267 if (!gimple_assign_single_p (stmt))
1268 break;
1269
1270 lhs = gimple_assign_lhs (stmt);
1271 rhs = gimple_assign_rhs1 (stmt);
1272 if (!is_gimple_reg_type (rhs))
1273 break;
1274
1275 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1276 &lhs_max_size);
1277 if (lhs_max_size == -1
1278 || lhs_max_size != lhs_size
1279 || (lhs_offset < arg_offset
1280 && lhs_offset + lhs_size > arg_offset)
1281 || (lhs_offset < arg_offset + arg_size
1282 && lhs_offset + lhs_size > arg_offset + arg_size))
1283 break;
1284
1285 if (check_ref)
1286 {
1287 if (TREE_CODE (lhs_base) != MEM_REF
1288 || TREE_OPERAND (lhs_base, 0) != arg_base
1289 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1290 break;
1291 }
1292 else if (lhs_base != arg_base)
1293 {
1294 if (DECL_P (lhs_base))
1295 continue;
1296 else
1297 break;
1298 }
1299
1300 if (lhs_offset + lhs_size < arg_offset
1301 || lhs_offset >= (arg_offset + arg_size))
1302 continue;
1303
1304 partial_overlap = false;
1305 p = &list;
1306 while (*p && (*p)->offset < lhs_offset)
1307 {
1308 if ((*p)->offset + (*p)->size > lhs_offset)
1309 {
1310 partial_overlap = true;
1311 break;
1312 }
1313 p = &(*p)->next;
1314 }
1315 if (partial_overlap)
1316 break;
1317 if (*p && (*p)->offset < lhs_offset + lhs_size)
1318 {
1319 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1320 /* We already know this value is subsequently overwritten with
1321 something else. */
1322 continue;
1323 else
1324 /* Otherwise this is a partial overlap which we cannot
1325 represent. */
1326 break;
1327 }
1328
1329 rhs = get_ssa_def_if_simple_copy (rhs);
1330 n = XALLOCA (struct ipa_known_agg_contents_list);
1331 n->size = lhs_size;
1332 n->offset = lhs_offset;
1333 if (is_gimple_ip_invariant (rhs))
1334 {
1335 n->constant = rhs;
1336 const_count++;
1337 }
1338 else
1339 n->constant = NULL_TREE;
1340 n->next = *p;
1341 *p = n;
1342
1343 item_count++;
1344 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1345 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1346 break;
1347 }
1348
1349 /* Third stage just goes over the list and creates an appropriate vector of
1350 ipa_agg_jf_item structures out of it, of sourse only if there are
1351 any known constants to begin with. */
1352
1353 if (const_count)
1354 {
1355 jfunc->agg.by_ref = by_ref;
1356 vec_alloc (jfunc->agg.items, const_count);
1357 while (list)
1358 {
1359 if (list->constant)
1360 {
1361 struct ipa_agg_jf_item item;
1362 item.offset = list->offset - arg_offset;
1363 item.value = unshare_expr_without_location (list->constant);
1364 jfunc->agg.items->quick_push (item);
1365 }
1366 list = list->next;
1367 }
1368 }
1369 }
1370
1371 /* Compute jump function for all arguments of callsite CS and insert the
1372 information in the jump_functions array in the ipa_edge_args corresponding
1373 to this callsite. */
1374
1375 static void
1376 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1377 struct cgraph_edge *cs)
1378 {
1379 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1380 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1381 gimple call = cs->call_stmt;
1382 int n, arg_num = gimple_call_num_args (call);
1383
1384 if (arg_num == 0 || args->jump_functions)
1385 return;
1386 vec_safe_grow_cleared (args->jump_functions, arg_num);
1387
1388 for (n = 0; n < arg_num; n++)
1389 {
1390 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1391 tree arg = gimple_call_arg (call, n);
1392
1393 if (is_gimple_ip_invariant (arg))
1394 ipa_set_jf_constant (jfunc, arg);
1395 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1396 && TREE_CODE (arg) == PARM_DECL)
1397 {
1398 int index = ipa_get_param_decl_index (info, arg);
1399
1400 gcc_assert (index >=0);
1401 /* Aggregate passed by value, check for pass-through, otherwise we
1402 will attempt to fill in aggregate contents later in this
1403 for cycle. */
1404 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1405 {
1406 ipa_set_jf_simple_pass_through (jfunc, index, false);
1407 continue;
1408 }
1409 }
1410 else if (TREE_CODE (arg) == SSA_NAME)
1411 {
1412 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1413 {
1414 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1415 if (index >= 0
1416 && !detect_type_change_ssa (arg, call, jfunc))
1417 {
1418 bool agg_p;
1419 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1420 call, arg);
1421 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1422 }
1423 }
1424 else
1425 {
1426 gimple stmt = SSA_NAME_DEF_STMT (arg);
1427 if (is_gimple_assign (stmt))
1428 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1429 call, stmt, arg);
1430 else if (gimple_code (stmt) == GIMPLE_PHI)
1431 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1432 call, stmt);
1433 }
1434 }
1435 else
1436 compute_known_type_jump_func (arg, jfunc, call);
1437
1438 if ((jfunc->type != IPA_JF_PASS_THROUGH
1439 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1440 && (jfunc->type != IPA_JF_ANCESTOR
1441 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1442 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1443 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1444 determine_known_aggregate_parts (call, arg, jfunc);
1445 }
1446 }
1447
1448 /* Compute jump functions for all edges - both direct and indirect - outgoing
1449 from NODE. Also count the actual arguments in the process. */
1450
1451 static void
1452 ipa_compute_jump_functions (struct cgraph_node *node,
1453 struct param_analysis_info *parms_ainfo)
1454 {
1455 struct cgraph_edge *cs;
1456
1457 for (cs = node->callees; cs; cs = cs->next_callee)
1458 {
1459 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1460 NULL);
1461 /* We do not need to bother analyzing calls to unknown
1462 functions unless they may become known during lto/whopr. */
1463 if (!callee->analyzed && !flag_lto)
1464 continue;
1465 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1466 }
1467
1468 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1469 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1470 }
1471
1472 /* If STMT looks like a statement loading a value from a member pointer formal
1473 parameter, return that parameter and store the offset of the field to
1474 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1475 might be clobbered). If USE_DELTA, then we look for a use of the delta
1476 field rather than the pfn. */
1477
1478 static tree
1479 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1480 HOST_WIDE_INT *offset_p)
1481 {
1482 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1483
1484 if (!gimple_assign_single_p (stmt))
1485 return NULL_TREE;
1486
1487 rhs = gimple_assign_rhs1 (stmt);
1488 if (TREE_CODE (rhs) == COMPONENT_REF)
1489 {
1490 ref_field = TREE_OPERAND (rhs, 1);
1491 rhs = TREE_OPERAND (rhs, 0);
1492 }
1493 else
1494 ref_field = NULL_TREE;
1495 if (TREE_CODE (rhs) != MEM_REF)
1496 return NULL_TREE;
1497 rec = TREE_OPERAND (rhs, 0);
1498 if (TREE_CODE (rec) != ADDR_EXPR)
1499 return NULL_TREE;
1500 rec = TREE_OPERAND (rec, 0);
1501 if (TREE_CODE (rec) != PARM_DECL
1502 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1503 return NULL_TREE;
1504 ref_offset = TREE_OPERAND (rhs, 1);
1505
1506 if (use_delta)
1507 fld = delta_field;
1508 else
1509 fld = ptr_field;
1510 if (offset_p)
1511 *offset_p = int_bit_position (fld);
1512
1513 if (ref_field)
1514 {
1515 if (integer_nonzerop (ref_offset))
1516 return NULL_TREE;
1517 return ref_field == fld ? rec : NULL_TREE;
1518 }
1519 else
1520 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1521 : NULL_TREE;
1522 }
1523
1524 /* Returns true iff T is an SSA_NAME defined by a statement. */
1525
1526 static bool
1527 ipa_is_ssa_with_stmt_def (tree t)
1528 {
1529 if (TREE_CODE (t) == SSA_NAME
1530 && !SSA_NAME_IS_DEFAULT_DEF (t))
1531 return true;
1532 else
1533 return false;
1534 }
1535
1536 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1537 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1538 indirect call graph edge. */
1539
1540 static struct cgraph_edge *
1541 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1542 {
1543 struct cgraph_edge *cs;
1544
1545 cs = cgraph_edge (node, stmt);
1546 cs->indirect_info->param_index = param_index;
1547 cs->indirect_info->offset = 0;
1548 cs->indirect_info->polymorphic = 0;
1549 cs->indirect_info->agg_contents = 0;
1550 return cs;
1551 }
1552
1553 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1554 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1555 intermediate information about each formal parameter. Currently it checks
1556 whether the call calls a pointer that is a formal parameter and if so, the
1557 parameter is marked with the called flag and an indirect call graph edge
1558 describing the call is created. This is very simple for ordinary pointers
1559 represented in SSA but not-so-nice when it comes to member pointers. The
1560 ugly part of this function does nothing more than trying to match the
1561 pattern of such a call. An example of such a pattern is the gimple dump
1562 below, the call is on the last line:
1563
1564 <bb 2>:
1565 f$__delta_5 = f.__delta;
1566 f$__pfn_24 = f.__pfn;
1567
1568 or
1569 <bb 2>:
1570 f$__delta_5 = MEM[(struct *)&f];
1571 f$__pfn_24 = MEM[(struct *)&f + 4B];
1572
1573 and a few lines below:
1574
1575 <bb 5>
1576 D.2496_3 = (int) f$__pfn_24;
1577 D.2497_4 = D.2496_3 & 1;
1578 if (D.2497_4 != 0)
1579 goto <bb 3>;
1580 else
1581 goto <bb 4>;
1582
1583 <bb 6>:
1584 D.2500_7 = (unsigned int) f$__delta_5;
1585 D.2501_8 = &S + D.2500_7;
1586 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1587 D.2503_10 = *D.2502_9;
1588 D.2504_12 = f$__pfn_24 + -1;
1589 D.2505_13 = (unsigned int) D.2504_12;
1590 D.2506_14 = D.2503_10 + D.2505_13;
1591 D.2507_15 = *D.2506_14;
1592 iftmp.11_16 = (String:: *) D.2507_15;
1593
1594 <bb 7>:
1595 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1596 D.2500_19 = (unsigned int) f$__delta_5;
1597 D.2508_20 = &S + D.2500_19;
1598 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1599
1600 Such patterns are results of simple calls to a member pointer:
1601
1602 int doprinting (int (MyString::* f)(int) const)
1603 {
1604 MyString S ("somestring");
1605
1606 return (S.*f)(4);
1607 }
1608
1609 Moreover, the function also looks for called pointers loaded from aggregates
1610 passed by value or reference. */
1611
1612 static void
1613 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1614 struct ipa_node_params *info,
1615 struct param_analysis_info *parms_ainfo,
1616 gimple call, tree target)
1617 {
1618 gimple def;
1619 tree n1, n2;
1620 gimple d1, d2;
1621 tree rec, rec2, cond;
1622 gimple branch;
1623 int index;
1624 basic_block bb, virt_bb, join;
1625 HOST_WIDE_INT offset;
1626 bool by_ref;
1627
1628 if (SSA_NAME_IS_DEFAULT_DEF (target))
1629 {
1630 tree var = SSA_NAME_VAR (target);
1631 index = ipa_get_param_decl_index (info, var);
1632 if (index >= 0)
1633 ipa_note_param_call (node, index, call);
1634 return;
1635 }
1636
1637 def = SSA_NAME_DEF_STMT (target);
1638 if (gimple_assign_single_p (def)
1639 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1640 gimple_assign_rhs1 (def), &index, &offset,
1641 &by_ref))
1642 {
1643 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1644 cs->indirect_info->offset = offset;
1645 cs->indirect_info->agg_contents = 1;
1646 cs->indirect_info->by_ref = by_ref;
1647 return;
1648 }
1649
1650 /* Now we need to try to match the complex pattern of calling a member
1651 pointer. */
1652 if (gimple_code (def) != GIMPLE_PHI
1653 || gimple_phi_num_args (def) != 2
1654 || !POINTER_TYPE_P (TREE_TYPE (target))
1655 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1656 return;
1657
1658 /* First, we need to check whether one of these is a load from a member
1659 pointer that is a parameter to this function. */
1660 n1 = PHI_ARG_DEF (def, 0);
1661 n2 = PHI_ARG_DEF (def, 1);
1662 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1663 return;
1664 d1 = SSA_NAME_DEF_STMT (n1);
1665 d2 = SSA_NAME_DEF_STMT (n2);
1666
1667 join = gimple_bb (def);
1668 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1669 {
1670 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1671 return;
1672
1673 bb = EDGE_PRED (join, 0)->src;
1674 virt_bb = gimple_bb (d2);
1675 }
1676 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1677 {
1678 bb = EDGE_PRED (join, 1)->src;
1679 virt_bb = gimple_bb (d1);
1680 }
1681 else
1682 return;
1683
1684 /* Second, we need to check that the basic blocks are laid out in the way
1685 corresponding to the pattern. */
1686
1687 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1688 || single_pred (virt_bb) != bb
1689 || single_succ (virt_bb) != join)
1690 return;
1691
1692 /* Third, let's see that the branching is done depending on the least
1693 significant bit of the pfn. */
1694
1695 branch = last_stmt (bb);
1696 if (!branch || gimple_code (branch) != GIMPLE_COND)
1697 return;
1698
1699 if ((gimple_cond_code (branch) != NE_EXPR
1700 && gimple_cond_code (branch) != EQ_EXPR)
1701 || !integer_zerop (gimple_cond_rhs (branch)))
1702 return;
1703
1704 cond = gimple_cond_lhs (branch);
1705 if (!ipa_is_ssa_with_stmt_def (cond))
1706 return;
1707
1708 def = SSA_NAME_DEF_STMT (cond);
1709 if (!is_gimple_assign (def)
1710 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1711 || !integer_onep (gimple_assign_rhs2 (def)))
1712 return;
1713
1714 cond = gimple_assign_rhs1 (def);
1715 if (!ipa_is_ssa_with_stmt_def (cond))
1716 return;
1717
1718 def = SSA_NAME_DEF_STMT (cond);
1719
1720 if (is_gimple_assign (def)
1721 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1722 {
1723 cond = gimple_assign_rhs1 (def);
1724 if (!ipa_is_ssa_with_stmt_def (cond))
1725 return;
1726 def = SSA_NAME_DEF_STMT (cond);
1727 }
1728
1729 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1730 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1731 == ptrmemfunc_vbit_in_delta),
1732 NULL);
1733 if (rec != rec2)
1734 return;
1735
1736 index = ipa_get_param_decl_index (info, rec);
1737 if (index >= 0
1738 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1739 {
1740 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1741 cs->indirect_info->offset = offset;
1742 cs->indirect_info->agg_contents = 1;
1743 }
1744
1745 return;
1746 }
1747
1748 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1749 object referenced in the expression is a formal parameter of the caller
1750 (described by INFO), create a call note for the statement. */
1751
1752 static void
1753 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1754 struct ipa_node_params *info, gimple call,
1755 tree target)
1756 {
1757 struct cgraph_edge *cs;
1758 struct cgraph_indirect_call_info *ii;
1759 struct ipa_jump_func jfunc;
1760 tree obj = OBJ_TYPE_REF_OBJECT (target);
1761 int index;
1762 HOST_WIDE_INT anc_offset;
1763
1764 if (!flag_devirtualize)
1765 return;
1766
1767 if (TREE_CODE (obj) != SSA_NAME)
1768 return;
1769
1770 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1771 {
1772 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1773 return;
1774
1775 anc_offset = 0;
1776 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1777 gcc_assert (index >= 0);
1778 if (detect_type_change_ssa (obj, call, &jfunc))
1779 return;
1780 }
1781 else
1782 {
1783 gimple stmt = SSA_NAME_DEF_STMT (obj);
1784 tree expr;
1785
1786 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1787 if (!expr)
1788 return;
1789 index = ipa_get_param_decl_index (info,
1790 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1791 gcc_assert (index >= 0);
1792 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1793 return;
1794 }
1795
1796 cs = ipa_note_param_call (node, index, call);
1797 ii = cs->indirect_info;
1798 ii->offset = anc_offset;
1799 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1800 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1801 ii->polymorphic = 1;
1802 }
1803
1804 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1805 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1806 containing intermediate information about each formal parameter. */
1807
1808 static void
1809 ipa_analyze_call_uses (struct cgraph_node *node,
1810 struct ipa_node_params *info,
1811 struct param_analysis_info *parms_ainfo, gimple call)
1812 {
1813 tree target = gimple_call_fn (call);
1814
1815 if (!target)
1816 return;
1817 if (TREE_CODE (target) == SSA_NAME)
1818 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1819 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1820 ipa_analyze_virtual_call_uses (node, info, call, target);
1821 }
1822
1823
1824 /* Analyze the call statement STMT with respect to formal parameters (described
1825 in INFO) of caller given by NODE. Currently it only checks whether formal
1826 parameters are called. PARMS_AINFO is a pointer to a vector containing
1827 intermediate information about each formal parameter. */
1828
1829 static void
1830 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1831 struct param_analysis_info *parms_ainfo, gimple stmt)
1832 {
1833 if (is_gimple_call (stmt))
1834 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1835 }
1836
1837 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1838 If OP is a parameter declaration, mark it as used in the info structure
1839 passed in DATA. */
1840
1841 static bool
1842 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1843 tree op, void *data)
1844 {
1845 struct ipa_node_params *info = (struct ipa_node_params *) data;
1846
1847 op = get_base_address (op);
1848 if (op
1849 && TREE_CODE (op) == PARM_DECL)
1850 {
1851 int index = ipa_get_param_decl_index (info, op);
1852 gcc_assert (index >= 0);
1853 ipa_set_param_used (info, index, true);
1854 }
1855
1856 return false;
1857 }
1858
1859 /* Scan the function body of NODE and inspect the uses of formal parameters.
1860 Store the findings in various structures of the associated ipa_node_params
1861 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1862 vector containing intermediate information about each formal parameter. */
1863
1864 static void
1865 ipa_analyze_params_uses (struct cgraph_node *node,
1866 struct param_analysis_info *parms_ainfo)
1867 {
1868 tree decl = node->symbol.decl;
1869 basic_block bb;
1870 struct function *func;
1871 gimple_stmt_iterator gsi;
1872 struct ipa_node_params *info = IPA_NODE_REF (node);
1873 int i;
1874
1875 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1876 return;
1877
1878 for (i = 0; i < ipa_get_param_count (info); i++)
1879 {
1880 tree parm = ipa_get_param (info, i);
1881 tree ddef;
1882 /* For SSA regs see if parameter is used. For non-SSA we compute
1883 the flag during modification analysis. */
1884 if (is_gimple_reg (parm)
1885 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1886 parm)) != NULL_TREE
1887 && !has_zero_uses (ddef))
1888 ipa_set_param_used (info, i, true);
1889 }
1890
1891 func = DECL_STRUCT_FUNCTION (decl);
1892 FOR_EACH_BB_FN (bb, func)
1893 {
1894 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1895 {
1896 gimple stmt = gsi_stmt (gsi);
1897
1898 if (is_gimple_debug (stmt))
1899 continue;
1900
1901 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1902 walk_stmt_load_store_addr_ops (stmt, info,
1903 visit_ref_for_mod_analysis,
1904 visit_ref_for_mod_analysis,
1905 visit_ref_for_mod_analysis);
1906 }
1907 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1908 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1909 visit_ref_for_mod_analysis,
1910 visit_ref_for_mod_analysis,
1911 visit_ref_for_mod_analysis);
1912 }
1913
1914 info->uses_analysis_done = 1;
1915 }
1916
1917 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
1918
1919 static void
1920 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
1921 {
1922 int i;
1923
1924 for (i = 0; i < param_count; i++)
1925 {
1926 if (parms_ainfo[i].parm_visited_statements)
1927 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1928 if (parms_ainfo[i].pt_visited_statements)
1929 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1930 }
1931 }
1932
1933 /* Initialize the array describing properties of of formal parameters
1934 of NODE, analyze their uses and compute jump functions associated
1935 with actual arguments of calls from within NODE. */
1936
1937 void
1938 ipa_analyze_node (struct cgraph_node *node)
1939 {
1940 struct ipa_node_params *info;
1941 struct param_analysis_info *parms_ainfo;
1942 int param_count;
1943
1944 ipa_check_create_node_params ();
1945 ipa_check_create_edge_args ();
1946 info = IPA_NODE_REF (node);
1947 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1948 ipa_initialize_node_params (node);
1949
1950 param_count = ipa_get_param_count (info);
1951 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1952 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1953
1954 ipa_analyze_params_uses (node, parms_ainfo);
1955 ipa_compute_jump_functions (node, parms_ainfo);
1956
1957 free_parms_ainfo (parms_ainfo, param_count);
1958 pop_cfun ();
1959 }
1960
1961
1962 /* Update the jump function DST when the call graph edge corresponding to SRC is
1963 is being inlined, knowing that DST is of type ancestor and src of known
1964 type. */
1965
1966 static void
1967 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1968 struct ipa_jump_func *dst)
1969 {
1970 HOST_WIDE_INT combined_offset;
1971 tree combined_type;
1972
1973 combined_offset = ipa_get_jf_known_type_offset (src)
1974 + ipa_get_jf_ancestor_offset (dst);
1975 combined_type = ipa_get_jf_ancestor_type (dst);
1976
1977 ipa_set_jf_known_type (dst, combined_offset,
1978 ipa_get_jf_known_type_base_type (src),
1979 combined_type);
1980 }
1981
1982 /* Update the jump functions associated with call graph edge E when the call
1983 graph edge CS is being inlined, assuming that E->caller is already (possibly
1984 indirectly) inlined into CS->callee and that E has not been inlined. */
1985
1986 static void
1987 update_jump_functions_after_inlining (struct cgraph_edge *cs,
1988 struct cgraph_edge *e)
1989 {
1990 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
1991 struct ipa_edge_args *args = IPA_EDGE_REF (e);
1992 int count = ipa_get_cs_argument_count (args);
1993 int i;
1994
1995 for (i = 0; i < count; i++)
1996 {
1997 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
1998
1999 if (dst->type == IPA_JF_ANCESTOR)
2000 {
2001 struct ipa_jump_func *src;
2002 int dst_fid = dst->value.ancestor.formal_id;
2003
2004 /* Variable number of arguments can cause havoc if we try to access
2005 one that does not exist in the inlined edge. So make sure we
2006 don't. */
2007 if (dst_fid >= ipa_get_cs_argument_count (top))
2008 {
2009 dst->type = IPA_JF_UNKNOWN;
2010 continue;
2011 }
2012
2013 src = ipa_get_ith_jump_func (top, dst_fid);
2014
2015 if (src->agg.items
2016 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2017 {
2018 struct ipa_agg_jf_item *item;
2019 int j;
2020
2021 /* Currently we do not produce clobber aggregate jump functions,
2022 replace with merging when we do. */
2023 gcc_assert (!dst->agg.items);
2024
2025 dst->agg.items = vec_safe_copy (src->agg.items);
2026 dst->agg.by_ref = src->agg.by_ref;
2027 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2028 item->offset -= dst->value.ancestor.offset;
2029 }
2030
2031 if (src->type == IPA_JF_KNOWN_TYPE)
2032 combine_known_type_and_ancestor_jfs (src, dst);
2033 else if (src->type == IPA_JF_PASS_THROUGH
2034 && src->value.pass_through.operation == NOP_EXPR)
2035 {
2036 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2037 dst->value.ancestor.agg_preserved &=
2038 src->value.pass_through.agg_preserved;
2039 }
2040 else if (src->type == IPA_JF_ANCESTOR)
2041 {
2042 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2043 dst->value.ancestor.offset += src->value.ancestor.offset;
2044 dst->value.ancestor.agg_preserved &=
2045 src->value.ancestor.agg_preserved;
2046 }
2047 else
2048 dst->type = IPA_JF_UNKNOWN;
2049 }
2050 else if (dst->type == IPA_JF_PASS_THROUGH)
2051 {
2052 struct ipa_jump_func *src;
2053 /* We must check range due to calls with variable number of arguments
2054 and we cannot combine jump functions with operations. */
2055 if (dst->value.pass_through.operation == NOP_EXPR
2056 && (dst->value.pass_through.formal_id
2057 < ipa_get_cs_argument_count (top)))
2058 {
2059 bool agg_p;
2060 int dst_fid = dst->value.pass_through.formal_id;
2061 src = ipa_get_ith_jump_func (top, dst_fid);
2062 agg_p = dst->value.pass_through.agg_preserved;
2063
2064 dst->type = src->type;
2065 dst->value = src->value;
2066
2067 if (src->agg.items
2068 && (agg_p || !src->agg.by_ref))
2069 {
2070 /* Currently we do not produce clobber aggregate jump
2071 functions, replace with merging when we do. */
2072 gcc_assert (!dst->agg.items);
2073
2074 dst->agg.by_ref = src->agg.by_ref;
2075 dst->agg.items = vec_safe_copy (src->agg.items);
2076 }
2077
2078 if (!agg_p)
2079 {
2080 if (dst->type == IPA_JF_PASS_THROUGH)
2081 dst->value.pass_through.agg_preserved = false;
2082 else if (dst->type == IPA_JF_ANCESTOR)
2083 dst->value.ancestor.agg_preserved = false;
2084 }
2085 }
2086 else
2087 dst->type = IPA_JF_UNKNOWN;
2088 }
2089 }
2090 }
2091
2092 /* If TARGET is an addr_expr of a function declaration, make it the destination
2093 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2094
2095 struct cgraph_edge *
2096 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2097 {
2098 struct cgraph_node *callee;
2099 struct inline_edge_summary *es = inline_edge_summary (ie);
2100
2101 if (TREE_CODE (target) == ADDR_EXPR)
2102 target = TREE_OPERAND (target, 0);
2103 if (TREE_CODE (target) != FUNCTION_DECL)
2104 return NULL;
2105 callee = cgraph_get_node (target);
2106 if (!callee)
2107 return NULL;
2108 ipa_check_create_node_params ();
2109
2110 /* We can not make edges to inline clones. It is bug that someone removed
2111 the cgraph node too early. */
2112 gcc_assert (!callee->global.inlined_to);
2113
2114 cgraph_make_edge_direct (ie, callee);
2115 es = inline_edge_summary (ie);
2116 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2117 - eni_size_weights.call_cost);
2118 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2119 - eni_time_weights.call_cost);
2120 if (dump_file)
2121 {
2122 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2123 "(%s/%i -> %s/%i), for stmt ",
2124 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2125 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2126 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2127 if (ie->call_stmt)
2128 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2129 else
2130 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2131 }
2132 callee = cgraph_function_or_thunk_node (callee, NULL);
2133
2134 return ie;
2135 }
2136
2137 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2138 return NULL if there is not any. BY_REF specifies whether the value has to
2139 be passed by reference or by value. */
2140
2141 tree
2142 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2143 HOST_WIDE_INT offset, bool by_ref)
2144 {
2145 struct ipa_agg_jf_item *item;
2146 int i;
2147
2148 if (by_ref != agg->by_ref)
2149 return NULL;
2150
2151 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2152 if (item->offset == offset)
2153 {
2154 /* Currently we do not have clobber values, return NULL for them once
2155 we do. */
2156 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2157 return item->value;
2158 }
2159 return NULL;
2160 }
2161
2162 /* Try to find a destination for indirect edge IE that corresponds to a simple
2163 call or a call of a member function pointer and where the destination is a
2164 pointer formal parameter described by jump function JFUNC. If it can be
2165 determined, return the newly direct edge, otherwise return NULL.
2166 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2167
2168 static struct cgraph_edge *
2169 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2170 struct ipa_jump_func *jfunc,
2171 struct ipa_node_params *new_root_info)
2172 {
2173 tree target;
2174
2175 if (ie->indirect_info->agg_contents)
2176 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2177 ie->indirect_info->offset,
2178 ie->indirect_info->by_ref);
2179 else
2180 target = ipa_value_from_jfunc (new_root_info, jfunc);
2181 if (!target)
2182 return NULL;
2183 return ipa_make_edge_direct_to_target (ie, target);
2184 }
2185
2186 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2187 call based on a formal parameter which is described by jump function JFUNC
2188 and if it can be determined, make it direct and return the direct edge.
2189 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2190 are relative to. */
2191
2192 static struct cgraph_edge *
2193 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2194 struct ipa_jump_func *jfunc,
2195 struct ipa_node_params *new_root_info)
2196 {
2197 tree binfo, target;
2198
2199 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2200
2201 if (!binfo)
2202 return NULL;
2203
2204 if (TREE_CODE (binfo) != TREE_BINFO)
2205 {
2206 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2207 if (!binfo)
2208 return NULL;
2209 }
2210
2211 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2212 ie->indirect_info->otr_type);
2213 if (binfo)
2214 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2215 binfo);
2216 else
2217 return NULL;
2218
2219 if (target)
2220 return ipa_make_edge_direct_to_target (ie, target);
2221 else
2222 return NULL;
2223 }
2224
2225 /* Update the param called notes associated with NODE when CS is being inlined,
2226 assuming NODE is (potentially indirectly) inlined into CS->callee.
2227 Moreover, if the callee is discovered to be constant, create a new cgraph
2228 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2229 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2230
2231 static bool
2232 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2233 struct cgraph_node *node,
2234 vec<cgraph_edge_p> *new_edges)
2235 {
2236 struct ipa_edge_args *top;
2237 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2238 struct ipa_node_params *new_root_info;
2239 bool res = false;
2240
2241 ipa_check_create_edge_args ();
2242 top = IPA_EDGE_REF (cs);
2243 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2244 ? cs->caller->global.inlined_to
2245 : cs->caller);
2246
2247 for (ie = node->indirect_calls; ie; ie = next_ie)
2248 {
2249 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2250 struct ipa_jump_func *jfunc;
2251 int param_index;
2252
2253 next_ie = ie->next_callee;
2254
2255 if (ici->param_index == -1)
2256 continue;
2257
2258 /* We must check range due to calls with variable number of arguments: */
2259 if (ici->param_index >= ipa_get_cs_argument_count (top))
2260 {
2261 ici->param_index = -1;
2262 continue;
2263 }
2264
2265 param_index = ici->param_index;
2266 jfunc = ipa_get_ith_jump_func (top, param_index);
2267 if (jfunc->type == IPA_JF_PASS_THROUGH
2268 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2269 {
2270 if (ici->agg_contents
2271 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2272 ici->param_index = -1;
2273 else
2274 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2275 }
2276 else if (jfunc->type == IPA_JF_ANCESTOR)
2277 {
2278 if (ici->agg_contents
2279 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2280 ici->param_index = -1;
2281 else
2282 {
2283 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2284 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2285 }
2286 }
2287 else
2288 /* Either we can find a destination for this edge now or never. */
2289 ici->param_index = -1;
2290
2291 if (!flag_indirect_inlining)
2292 continue;
2293
2294 if (ici->polymorphic)
2295 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2296 new_root_info);
2297 else
2298 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2299 new_root_info);
2300
2301 if (new_direct_edge)
2302 {
2303 new_direct_edge->indirect_inlining_edge = 1;
2304 if (new_direct_edge->call_stmt)
2305 new_direct_edge->call_stmt_cannot_inline_p
2306 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2307 new_direct_edge->callee->symbol.decl);
2308 if (new_edges)
2309 {
2310 new_edges->safe_push (new_direct_edge);
2311 top = IPA_EDGE_REF (cs);
2312 res = true;
2313 }
2314 }
2315 }
2316
2317 return res;
2318 }
2319
2320 /* Recursively traverse subtree of NODE (including node) made of inlined
2321 cgraph_edges when CS has been inlined and invoke
2322 update_indirect_edges_after_inlining on all nodes and
2323 update_jump_functions_after_inlining on all non-inlined edges that lead out
2324 of this subtree. Newly discovered indirect edges will be added to
2325 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2326 created. */
2327
2328 static bool
2329 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2330 struct cgraph_node *node,
2331 vec<cgraph_edge_p> *new_edges)
2332 {
2333 struct cgraph_edge *e;
2334 bool res;
2335
2336 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2337
2338 for (e = node->callees; e; e = e->next_callee)
2339 if (!e->inline_failed)
2340 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2341 else
2342 update_jump_functions_after_inlining (cs, e);
2343 for (e = node->indirect_calls; e; e = e->next_callee)
2344 update_jump_functions_after_inlining (cs, e);
2345
2346 return res;
2347 }
2348
2349 /* Update jump functions and call note functions on inlining the call site CS.
2350 CS is expected to lead to a node already cloned by
2351 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2352 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2353 created. */
2354
2355 bool
2356 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2357 vec<cgraph_edge_p> *new_edges)
2358 {
2359 bool changed;
2360 /* Do nothing if the preparation phase has not been carried out yet
2361 (i.e. during early inlining). */
2362 if (!ipa_node_params_vector.exists ())
2363 return false;
2364 gcc_assert (ipa_edge_args_vector);
2365
2366 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2367
2368 /* We do not keep jump functions of inlined edges up to date. Better to free
2369 them so we do not access them accidentally. */
2370 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2371 return changed;
2372 }
2373
2374 /* Frees all dynamically allocated structures that the argument info points
2375 to. */
2376
2377 void
2378 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2379 {
2380 vec_free (args->jump_functions);
2381 memset (args, 0, sizeof (*args));
2382 }
2383
2384 /* Free all ipa_edge structures. */
2385
2386 void
2387 ipa_free_all_edge_args (void)
2388 {
2389 int i;
2390 struct ipa_edge_args *args;
2391
2392 if (!ipa_edge_args_vector)
2393 return;
2394
2395 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2396 ipa_free_edge_args_substructures (args);
2397
2398 vec_free (ipa_edge_args_vector);
2399 }
2400
2401 /* Frees all dynamically allocated structures that the param info points
2402 to. */
2403
2404 void
2405 ipa_free_node_params_substructures (struct ipa_node_params *info)
2406 {
2407 info->descriptors.release ();
2408 free (info->lattices);
2409 /* Lattice values and their sources are deallocated with their alocation
2410 pool. */
2411 info->known_vals.release ();
2412 memset (info, 0, sizeof (*info));
2413 }
2414
2415 /* Free all ipa_node_params structures. */
2416
2417 void
2418 ipa_free_all_node_params (void)
2419 {
2420 int i;
2421 struct ipa_node_params *info;
2422
2423 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2424 ipa_free_node_params_substructures (info);
2425
2426 ipa_node_params_vector.release ();
2427 }
2428
2429 /* Set the aggregate replacements of NODE to be AGGVALS. */
2430
2431 void
2432 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2433 struct ipa_agg_replacement_value *aggvals)
2434 {
2435 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2436 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2437
2438 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2439 }
2440
2441 /* Hook that is called by cgraph.c when an edge is removed. */
2442
2443 static void
2444 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2445 {
2446 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2447 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2448 return;
2449 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2450 }
2451
2452 /* Hook that is called by cgraph.c when a node is removed. */
2453
2454 static void
2455 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2456 {
2457 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2458 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2459 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2460 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2461 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2462 }
2463
2464 /* Hook that is called by cgraph.c when an edge is duplicated. */
2465
2466 static void
2467 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2468 __attribute__((unused)) void *data)
2469 {
2470 struct ipa_edge_args *old_args, *new_args;
2471 unsigned int i;
2472
2473 ipa_check_create_edge_args ();
2474
2475 old_args = IPA_EDGE_REF (src);
2476 new_args = IPA_EDGE_REF (dst);
2477
2478 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2479
2480 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2481 (*new_args->jump_functions)[i].agg.items
2482 = vec_safe_copy ((*old_args->jump_functions)[i].agg.items);
2483 }
2484
2485 /* Hook that is called by cgraph.c when a node is duplicated. */
2486
2487 static void
2488 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2489 ATTRIBUTE_UNUSED void *data)
2490 {
2491 struct ipa_node_params *old_info, *new_info;
2492 struct ipa_agg_replacement_value *old_av, *new_av;
2493
2494 ipa_check_create_node_params ();
2495 old_info = IPA_NODE_REF (src);
2496 new_info = IPA_NODE_REF (dst);
2497
2498 new_info->descriptors = old_info->descriptors.copy ();
2499 new_info->lattices = NULL;
2500 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2501
2502 new_info->uses_analysis_done = old_info->uses_analysis_done;
2503 new_info->node_enqueued = old_info->node_enqueued;
2504
2505 old_av = ipa_get_agg_replacements_for_node (src);
2506 if (!old_av)
2507 return;
2508
2509 new_av = NULL;
2510 while (old_av)
2511 {
2512 struct ipa_agg_replacement_value *v;
2513
2514 v = ggc_alloc_ipa_agg_replacement_value ();
2515 memcpy (v, old_av, sizeof (*v));
2516 v->next = new_av;
2517 new_av = v;
2518 old_av = old_av->next;
2519 }
2520 ipa_set_node_agg_value_chain (dst, new_av);
2521 }
2522
2523
2524 /* Analyze newly added function into callgraph. */
2525
2526 static void
2527 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2528 {
2529 ipa_analyze_node (node);
2530 }
2531
2532 /* Register our cgraph hooks if they are not already there. */
2533
2534 void
2535 ipa_register_cgraph_hooks (void)
2536 {
2537 if (!edge_removal_hook_holder)
2538 edge_removal_hook_holder =
2539 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2540 if (!node_removal_hook_holder)
2541 node_removal_hook_holder =
2542 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2543 if (!edge_duplication_hook_holder)
2544 edge_duplication_hook_holder =
2545 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2546 if (!node_duplication_hook_holder)
2547 node_duplication_hook_holder =
2548 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2549 function_insertion_hook_holder =
2550 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2551 }
2552
2553 /* Unregister our cgraph hooks if they are not already there. */
2554
2555 static void
2556 ipa_unregister_cgraph_hooks (void)
2557 {
2558 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2559 edge_removal_hook_holder = NULL;
2560 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2561 node_removal_hook_holder = NULL;
2562 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2563 edge_duplication_hook_holder = NULL;
2564 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2565 node_duplication_hook_holder = NULL;
2566 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2567 function_insertion_hook_holder = NULL;
2568 }
2569
2570 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2571 longer needed after ipa-cp. */
2572
2573 void
2574 ipa_free_all_structures_after_ipa_cp (void)
2575 {
2576 if (!optimize)
2577 {
2578 ipa_free_all_edge_args ();
2579 ipa_free_all_node_params ();
2580 free_alloc_pool (ipcp_sources_pool);
2581 free_alloc_pool (ipcp_values_pool);
2582 free_alloc_pool (ipcp_agg_lattice_pool);
2583 ipa_unregister_cgraph_hooks ();
2584 }
2585 }
2586
2587 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2588 longer needed after indirect inlining. */
2589
2590 void
2591 ipa_free_all_structures_after_iinln (void)
2592 {
2593 ipa_free_all_edge_args ();
2594 ipa_free_all_node_params ();
2595 ipa_unregister_cgraph_hooks ();
2596 if (ipcp_sources_pool)
2597 free_alloc_pool (ipcp_sources_pool);
2598 if (ipcp_values_pool)
2599 free_alloc_pool (ipcp_values_pool);
2600 if (ipcp_agg_lattice_pool)
2601 free_alloc_pool (ipcp_agg_lattice_pool);
2602 }
2603
2604 /* Print ipa_tree_map data structures of all functions in the
2605 callgraph to F. */
2606
2607 void
2608 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2609 {
2610 int i, count;
2611 tree temp;
2612 struct ipa_node_params *info;
2613
2614 if (!node->analyzed)
2615 return;
2616 info = IPA_NODE_REF (node);
2617 fprintf (f, " function %s parameter descriptors:\n",
2618 cgraph_node_name (node));
2619 count = ipa_get_param_count (info);
2620 for (i = 0; i < count; i++)
2621 {
2622 temp = ipa_get_param (info, i);
2623 if (TREE_CODE (temp) == PARM_DECL)
2624 fprintf (f, " param %d : %s", i,
2625 (DECL_NAME (temp)
2626 ? (*lang_hooks.decl_printable_name) (temp, 2)
2627 : "(unnamed)"));
2628 if (ipa_is_param_used (info, i))
2629 fprintf (f, " used");
2630 fprintf (f, "\n");
2631 }
2632 }
2633
2634 /* Print ipa_tree_map data structures of all functions in the
2635 callgraph to F. */
2636
2637 void
2638 ipa_print_all_params (FILE * f)
2639 {
2640 struct cgraph_node *node;
2641
2642 fprintf (f, "\nFunction parameters:\n");
2643 FOR_EACH_FUNCTION (node)
2644 ipa_print_node_params (f, node);
2645 }
2646
2647 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2648
2649 vec<tree>
2650 ipa_get_vector_of_formal_parms (tree fndecl)
2651 {
2652 vec<tree> args;
2653 int count;
2654 tree parm;
2655
2656 count = count_formal_params (fndecl);
2657 args.create (count);
2658 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2659 args.quick_push (parm);
2660
2661 return args;
2662 }
2663
2664 /* Return a heap allocated vector containing types of formal parameters of
2665 function type FNTYPE. */
2666
2667 static inline vec<tree>
2668 get_vector_of_formal_parm_types (tree fntype)
2669 {
2670 vec<tree> types;
2671 int count = 0;
2672 tree t;
2673
2674 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2675 count++;
2676
2677 types.create (count);
2678 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2679 types.quick_push (TREE_VALUE (t));
2680
2681 return types;
2682 }
2683
2684 /* Modify the function declaration FNDECL and its type according to the plan in
2685 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2686 to reflect the actual parameters being modified which are determined by the
2687 base_index field. */
2688
2689 void
2690 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2691 const char *synth_parm_prefix)
2692 {
2693 vec<tree> oparms, otypes;
2694 tree orig_type, new_type = NULL;
2695 tree old_arg_types, t, new_arg_types = NULL;
2696 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2697 int i, len = adjustments.length ();
2698 tree new_reversed = NULL;
2699 bool care_for_types, last_parm_void;
2700
2701 if (!synth_parm_prefix)
2702 synth_parm_prefix = "SYNTH";
2703
2704 oparms = ipa_get_vector_of_formal_parms (fndecl);
2705 orig_type = TREE_TYPE (fndecl);
2706 old_arg_types = TYPE_ARG_TYPES (orig_type);
2707
2708 /* The following test is an ugly hack, some functions simply don't have any
2709 arguments in their type. This is probably a bug but well... */
2710 care_for_types = (old_arg_types != NULL_TREE);
2711 if (care_for_types)
2712 {
2713 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2714 == void_type_node);
2715 otypes = get_vector_of_formal_parm_types (orig_type);
2716 if (last_parm_void)
2717 gcc_assert (oparms.length () + 1 == otypes.length ());
2718 else
2719 gcc_assert (oparms.length () == otypes.length ());
2720 }
2721 else
2722 {
2723 last_parm_void = false;
2724 otypes.create (0);
2725 }
2726
2727 for (i = 0; i < len; i++)
2728 {
2729 struct ipa_parm_adjustment *adj;
2730 gcc_assert (link);
2731
2732 adj = &adjustments[i];
2733 parm = oparms[adj->base_index];
2734 adj->base = parm;
2735
2736 if (adj->copy_param)
2737 {
2738 if (care_for_types)
2739 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
2740 new_arg_types);
2741 *link = parm;
2742 link = &DECL_CHAIN (parm);
2743 }
2744 else if (!adj->remove_param)
2745 {
2746 tree new_parm;
2747 tree ptype;
2748
2749 if (adj->by_ref)
2750 ptype = build_pointer_type (adj->type);
2751 else
2752 ptype = adj->type;
2753
2754 if (care_for_types)
2755 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2756
2757 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2758 ptype);
2759 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2760
2761 DECL_ARTIFICIAL (new_parm) = 1;
2762 DECL_ARG_TYPE (new_parm) = ptype;
2763 DECL_CONTEXT (new_parm) = fndecl;
2764 TREE_USED (new_parm) = 1;
2765 DECL_IGNORED_P (new_parm) = 1;
2766 layout_decl (new_parm, 0);
2767
2768 adj->base = parm;
2769 adj->reduction = new_parm;
2770
2771 *link = new_parm;
2772
2773 link = &DECL_CHAIN (new_parm);
2774 }
2775 }
2776
2777 *link = NULL_TREE;
2778
2779 if (care_for_types)
2780 {
2781 new_reversed = nreverse (new_arg_types);
2782 if (last_parm_void)
2783 {
2784 if (new_reversed)
2785 TREE_CHAIN (new_arg_types) = void_list_node;
2786 else
2787 new_reversed = void_list_node;
2788 }
2789 }
2790
2791 /* Use copy_node to preserve as much as possible from original type
2792 (debug info, attribute lists etc.)
2793 Exception is METHOD_TYPEs must have THIS argument.
2794 When we are asked to remove it, we need to build new FUNCTION_TYPE
2795 instead. */
2796 if (TREE_CODE (orig_type) != METHOD_TYPE
2797 || (adjustments[0].copy_param
2798 && adjustments[0].base_index == 0))
2799 {
2800 new_type = build_distinct_type_copy (orig_type);
2801 TYPE_ARG_TYPES (new_type) = new_reversed;
2802 }
2803 else
2804 {
2805 new_type
2806 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2807 new_reversed));
2808 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2809 DECL_VINDEX (fndecl) = NULL_TREE;
2810 }
2811
2812 /* When signature changes, we need to clear builtin info. */
2813 if (DECL_BUILT_IN (fndecl))
2814 {
2815 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2816 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2817 }
2818
2819 /* This is a new type, not a copy of an old type. Need to reassociate
2820 variants. We can handle everything except the main variant lazily. */
2821 t = TYPE_MAIN_VARIANT (orig_type);
2822 if (orig_type != t)
2823 {
2824 TYPE_MAIN_VARIANT (new_type) = t;
2825 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2826 TYPE_NEXT_VARIANT (t) = new_type;
2827 }
2828 else
2829 {
2830 TYPE_MAIN_VARIANT (new_type) = new_type;
2831 TYPE_NEXT_VARIANT (new_type) = NULL;
2832 }
2833
2834 TREE_TYPE (fndecl) = new_type;
2835 DECL_VIRTUAL_P (fndecl) = 0;
2836 otypes.release ();
2837 oparms.release ();
2838 }
2839
2840 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2841 If this is a directly recursive call, CS must be NULL. Otherwise it must
2842 contain the corresponding call graph edge. */
2843
2844 void
2845 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2846 ipa_parm_adjustment_vec adjustments)
2847 {
2848 vec<tree> vargs;
2849 vec<tree, va_gc> **debug_args = NULL;
2850 gimple new_stmt;
2851 gimple_stmt_iterator gsi;
2852 tree callee_decl;
2853 int i, len;
2854
2855 len = adjustments.length ();
2856 vargs.create (len);
2857 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2858
2859 gsi = gsi_for_stmt (stmt);
2860 for (i = 0; i < len; i++)
2861 {
2862 struct ipa_parm_adjustment *adj;
2863
2864 adj = &adjustments[i];
2865
2866 if (adj->copy_param)
2867 {
2868 tree arg = gimple_call_arg (stmt, adj->base_index);
2869
2870 vargs.quick_push (arg);
2871 }
2872 else if (!adj->remove_param)
2873 {
2874 tree expr, base, off;
2875 location_t loc;
2876 unsigned int deref_align;
2877 bool deref_base = false;
2878
2879 /* We create a new parameter out of the value of the old one, we can
2880 do the following kind of transformations:
2881
2882 - A scalar passed by reference is converted to a scalar passed by
2883 value. (adj->by_ref is false and the type of the original
2884 actual argument is a pointer to a scalar).
2885
2886 - A part of an aggregate is passed instead of the whole aggregate.
2887 The part can be passed either by value or by reference, this is
2888 determined by value of adj->by_ref. Moreover, the code below
2889 handles both situations when the original aggregate is passed by
2890 value (its type is not a pointer) and when it is passed by
2891 reference (it is a pointer to an aggregate).
2892
2893 When the new argument is passed by reference (adj->by_ref is true)
2894 it must be a part of an aggregate and therefore we form it by
2895 simply taking the address of a reference inside the original
2896 aggregate. */
2897
2898 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2899 base = gimple_call_arg (stmt, adj->base_index);
2900 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
2901 : EXPR_LOCATION (base);
2902
2903 if (TREE_CODE (base) != ADDR_EXPR
2904 && POINTER_TYPE_P (TREE_TYPE (base)))
2905 off = build_int_cst (adj->alias_ptr_type,
2906 adj->offset / BITS_PER_UNIT);
2907 else
2908 {
2909 HOST_WIDE_INT base_offset;
2910 tree prev_base;
2911 bool addrof;
2912
2913 if (TREE_CODE (base) == ADDR_EXPR)
2914 {
2915 base = TREE_OPERAND (base, 0);
2916 addrof = true;
2917 }
2918 else
2919 addrof = false;
2920 prev_base = base;
2921 base = get_addr_base_and_unit_offset (base, &base_offset);
2922 /* Aggregate arguments can have non-invariant addresses. */
2923 if (!base)
2924 {
2925 base = build_fold_addr_expr (prev_base);
2926 off = build_int_cst (adj->alias_ptr_type,
2927 adj->offset / BITS_PER_UNIT);
2928 }
2929 else if (TREE_CODE (base) == MEM_REF)
2930 {
2931 if (!addrof)
2932 {
2933 deref_base = true;
2934 deref_align = TYPE_ALIGN (TREE_TYPE (base));
2935 }
2936 off = build_int_cst (adj->alias_ptr_type,
2937 base_offset
2938 + adj->offset / BITS_PER_UNIT);
2939 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2940 off);
2941 base = TREE_OPERAND (base, 0);
2942 }
2943 else
2944 {
2945 off = build_int_cst (adj->alias_ptr_type,
2946 base_offset
2947 + adj->offset / BITS_PER_UNIT);
2948 base = build_fold_addr_expr (base);
2949 }
2950 }
2951
2952 if (!adj->by_ref)
2953 {
2954 tree type = adj->type;
2955 unsigned int align;
2956 unsigned HOST_WIDE_INT misalign;
2957
2958 if (deref_base)
2959 {
2960 align = deref_align;
2961 misalign = 0;
2962 }
2963 else
2964 {
2965 get_pointer_alignment_1 (base, &align, &misalign);
2966 if (TYPE_ALIGN (type) > align)
2967 align = TYPE_ALIGN (type);
2968 }
2969 misalign += (tree_to_double_int (off)
2970 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2971 * BITS_PER_UNIT);
2972 misalign = misalign & (align - 1);
2973 if (misalign != 0)
2974 align = (misalign & -misalign);
2975 if (align < TYPE_ALIGN (type))
2976 type = build_aligned_type (type, align);
2977 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2978 }
2979 else
2980 {
2981 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2982 expr = build_fold_addr_expr (expr);
2983 }
2984
2985 expr = force_gimple_operand_gsi (&gsi, expr,
2986 adj->by_ref
2987 || is_gimple_reg_type (adj->type),
2988 NULL, true, GSI_SAME_STMT);
2989 vargs.quick_push (expr);
2990 }
2991 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2992 {
2993 unsigned int ix;
2994 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2995 gimple def_temp;
2996
2997 arg = gimple_call_arg (stmt, adj->base_index);
2998 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2999 {
3000 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3001 continue;
3002 arg = fold_convert_loc (gimple_location (stmt),
3003 TREE_TYPE (origin), arg);
3004 }
3005 if (debug_args == NULL)
3006 debug_args = decl_debug_args_insert (callee_decl);
3007 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
3008 if (ddecl == origin)
3009 {
3010 ddecl = (**debug_args)[ix + 1];
3011 break;
3012 }
3013 if (ddecl == NULL)
3014 {
3015 ddecl = make_node (DEBUG_EXPR_DECL);
3016 DECL_ARTIFICIAL (ddecl) = 1;
3017 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3018 DECL_MODE (ddecl) = DECL_MODE (origin);
3019
3020 vec_safe_push (*debug_args, origin);
3021 vec_safe_push (*debug_args, ddecl);
3022 }
3023 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3024 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3025 }
3026 }
3027
3028 if (dump_file && (dump_flags & TDF_DETAILS))
3029 {
3030 fprintf (dump_file, "replacing stmt:");
3031 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3032 }
3033
3034 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3035 vargs.release ();
3036 if (gimple_call_lhs (stmt))
3037 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3038
3039 gimple_set_block (new_stmt, gimple_block (stmt));
3040 if (gimple_has_location (stmt))
3041 gimple_set_location (new_stmt, gimple_location (stmt));
3042 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3043 gimple_call_copy_flags (new_stmt, stmt);
3044
3045 if (dump_file && (dump_flags & TDF_DETAILS))
3046 {
3047 fprintf (dump_file, "with stmt:");
3048 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3049 fprintf (dump_file, "\n");
3050 }
3051 gsi_replace (&gsi, new_stmt, true);
3052 if (cs)
3053 cgraph_set_call_stmt (cs, new_stmt);
3054 update_ssa (TODO_update_ssa);
3055 free_dominance_info (CDI_DOMINATORS);
3056 }
3057
3058 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3059
3060 static bool
3061 index_in_adjustments_multiple_times_p (int base_index,
3062 ipa_parm_adjustment_vec adjustments)
3063 {
3064 int i, len = adjustments.length ();
3065 bool one = false;
3066
3067 for (i = 0; i < len; i++)
3068 {
3069 struct ipa_parm_adjustment *adj;
3070 adj = &adjustments[i];
3071
3072 if (adj->base_index == base_index)
3073 {
3074 if (one)
3075 return true;
3076 else
3077 one = true;
3078 }
3079 }
3080 return false;
3081 }
3082
3083
3084 /* Return adjustments that should have the same effect on function parameters
3085 and call arguments as if they were first changed according to adjustments in
3086 INNER and then by adjustments in OUTER. */
3087
3088 ipa_parm_adjustment_vec
3089 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3090 ipa_parm_adjustment_vec outer)
3091 {
3092 int i, outlen = outer.length ();
3093 int inlen = inner.length ();
3094 int removals = 0;
3095 ipa_parm_adjustment_vec adjustments, tmp;
3096
3097 tmp.create (inlen);
3098 for (i = 0; i < inlen; i++)
3099 {
3100 struct ipa_parm_adjustment *n;
3101 n = &inner[i];
3102
3103 if (n->remove_param)
3104 removals++;
3105 else
3106 tmp.quick_push (*n);
3107 }
3108
3109 adjustments.create (outlen + removals);
3110 for (i = 0; i < outlen; i++)
3111 {
3112 struct ipa_parm_adjustment r;
3113 struct ipa_parm_adjustment *out = &outer[i];
3114 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3115
3116 memset (&r, 0, sizeof (r));
3117 gcc_assert (!in->remove_param);
3118 if (out->remove_param)
3119 {
3120 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3121 {
3122 r.remove_param = true;
3123 adjustments.quick_push (r);
3124 }
3125 continue;
3126 }
3127
3128 r.base_index = in->base_index;
3129 r.type = out->type;
3130
3131 /* FIXME: Create nonlocal value too. */
3132
3133 if (in->copy_param && out->copy_param)
3134 r.copy_param = true;
3135 else if (in->copy_param)
3136 r.offset = out->offset;
3137 else if (out->copy_param)
3138 r.offset = in->offset;
3139 else
3140 r.offset = in->offset + out->offset;
3141 adjustments.quick_push (r);
3142 }
3143
3144 for (i = 0; i < inlen; i++)
3145 {
3146 struct ipa_parm_adjustment *n = &inner[i];
3147
3148 if (n->remove_param)
3149 adjustments.quick_push (*n);
3150 }
3151
3152 tmp.release ();
3153 return adjustments;
3154 }
3155
3156 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3157 friendly way, assuming they are meant to be applied to FNDECL. */
3158
3159 void
3160 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3161 tree fndecl)
3162 {
3163 int i, len = adjustments.length ();
3164 bool first = true;
3165 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3166
3167 fprintf (file, "IPA param adjustments: ");
3168 for (i = 0; i < len; i++)
3169 {
3170 struct ipa_parm_adjustment *adj;
3171 adj = &adjustments[i];
3172
3173 if (!first)
3174 fprintf (file, " ");
3175 else
3176 first = false;
3177
3178 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3179 print_generic_expr (file, parms[adj->base_index], 0);
3180 if (adj->base)
3181 {
3182 fprintf (file, ", base: ");
3183 print_generic_expr (file, adj->base, 0);
3184 }
3185 if (adj->reduction)
3186 {
3187 fprintf (file, ", reduction: ");
3188 print_generic_expr (file, adj->reduction, 0);
3189 }
3190 if (adj->new_ssa_base)
3191 {
3192 fprintf (file, ", new_ssa_base: ");
3193 print_generic_expr (file, adj->new_ssa_base, 0);
3194 }
3195
3196 if (adj->copy_param)
3197 fprintf (file, ", copy_param");
3198 else if (adj->remove_param)
3199 fprintf (file, ", remove_param");
3200 else
3201 fprintf (file, ", offset %li", (long) adj->offset);
3202 if (adj->by_ref)
3203 fprintf (file, ", by_ref");
3204 print_node_brief (file, ", type: ", adj->type, 0);
3205 fprintf (file, "\n");
3206 }
3207 parms.release ();
3208 }
3209
3210 /* Dump the AV linked list. */
3211
3212 void
3213 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3214 {
3215 bool comma = false;
3216 fprintf (f, " Aggregate replacements:");
3217 for (; av; av = av->next)
3218 {
3219 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3220 av->index, av->offset);
3221 print_generic_expr (f, av->value, 0);
3222 comma = true;
3223 }
3224 fprintf (f, "\n");
3225 }
3226
3227 /* Stream out jump function JUMP_FUNC to OB. */
3228
3229 static void
3230 ipa_write_jump_function (struct output_block *ob,
3231 struct ipa_jump_func *jump_func)
3232 {
3233 struct ipa_agg_jf_item *item;
3234 struct bitpack_d bp;
3235 int i, count;
3236
3237 streamer_write_uhwi (ob, jump_func->type);
3238 switch (jump_func->type)
3239 {
3240 case IPA_JF_UNKNOWN:
3241 break;
3242 case IPA_JF_KNOWN_TYPE:
3243 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3244 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3245 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3246 break;
3247 case IPA_JF_CONST:
3248 gcc_assert (
3249 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3250 stream_write_tree (ob, jump_func->value.constant, true);
3251 break;
3252 case IPA_JF_PASS_THROUGH:
3253 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3254 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3255 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3256 bp = bitpack_create (ob->main_stream);
3257 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3258 streamer_write_bitpack (&bp);
3259 break;
3260 case IPA_JF_ANCESTOR:
3261 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3262 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3263 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3264 bp = bitpack_create (ob->main_stream);
3265 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3266 streamer_write_bitpack (&bp);
3267 break;
3268 }
3269
3270 count = vec_safe_length (jump_func->agg.items);
3271 streamer_write_uhwi (ob, count);
3272 if (count)
3273 {
3274 bp = bitpack_create (ob->main_stream);
3275 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3276 streamer_write_bitpack (&bp);
3277 }
3278
3279 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3280 {
3281 streamer_write_uhwi (ob, item->offset);
3282 stream_write_tree (ob, item->value, true);
3283 }
3284 }
3285
3286 /* Read in jump function JUMP_FUNC from IB. */
3287
3288 static void
3289 ipa_read_jump_function (struct lto_input_block *ib,
3290 struct ipa_jump_func *jump_func,
3291 struct data_in *data_in)
3292 {
3293 struct bitpack_d bp;
3294 int i, count;
3295
3296 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3297 switch (jump_func->type)
3298 {
3299 case IPA_JF_UNKNOWN:
3300 break;
3301 case IPA_JF_KNOWN_TYPE:
3302 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3303 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3304 jump_func->value.known_type.component_type = stream_read_tree (ib,
3305 data_in);
3306 break;
3307 case IPA_JF_CONST:
3308 jump_func->value.constant = stream_read_tree (ib, data_in);
3309 break;
3310 case IPA_JF_PASS_THROUGH:
3311 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3312 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3313 jump_func->value.pass_through.operation
3314 = (enum tree_code) streamer_read_uhwi (ib);
3315 bp = streamer_read_bitpack (ib);
3316 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3317 break;
3318 case IPA_JF_ANCESTOR:
3319 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3320 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3321 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3322 bp = streamer_read_bitpack (ib);
3323 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3324 break;
3325 }
3326
3327 count = streamer_read_uhwi (ib);
3328 vec_alloc (jump_func->agg.items, count);
3329 if (count)
3330 {
3331 bp = streamer_read_bitpack (ib);
3332 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3333 }
3334 for (i = 0; i < count; i++)
3335 {
3336 struct ipa_agg_jf_item item;
3337 item.offset = streamer_read_uhwi (ib);
3338 item.value = stream_read_tree (ib, data_in);
3339 jump_func->agg.items->quick_push (item);
3340 }
3341 }
3342
3343 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3344 relevant to indirect inlining to OB. */
3345
3346 static void
3347 ipa_write_indirect_edge_info (struct output_block *ob,
3348 struct cgraph_edge *cs)
3349 {
3350 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3351 struct bitpack_d bp;
3352
3353 streamer_write_hwi (ob, ii->param_index);
3354 streamer_write_hwi (ob, ii->offset);
3355 bp = bitpack_create (ob->main_stream);
3356 bp_pack_value (&bp, ii->polymorphic, 1);
3357 bp_pack_value (&bp, ii->agg_contents, 1);
3358 bp_pack_value (&bp, ii->by_ref, 1);
3359 streamer_write_bitpack (&bp);
3360
3361 if (ii->polymorphic)
3362 {
3363 streamer_write_hwi (ob, ii->otr_token);
3364 stream_write_tree (ob, ii->otr_type, true);
3365 }
3366 }
3367
3368 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3369 relevant to indirect inlining from IB. */
3370
3371 static void
3372 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3373 struct data_in *data_in ATTRIBUTE_UNUSED,
3374 struct cgraph_edge *cs)
3375 {
3376 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3377 struct bitpack_d bp;
3378
3379 ii->param_index = (int) streamer_read_hwi (ib);
3380 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3381 bp = streamer_read_bitpack (ib);
3382 ii->polymorphic = bp_unpack_value (&bp, 1);
3383 ii->agg_contents = bp_unpack_value (&bp, 1);
3384 ii->by_ref = bp_unpack_value (&bp, 1);
3385 if (ii->polymorphic)
3386 {
3387 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3388 ii->otr_type = stream_read_tree (ib, data_in);
3389 }
3390 }
3391
3392 /* Stream out NODE info to OB. */
3393
3394 static void
3395 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3396 {
3397 int node_ref;
3398 lto_symtab_encoder_t encoder;
3399 struct ipa_node_params *info = IPA_NODE_REF (node);
3400 int j;
3401 struct cgraph_edge *e;
3402 struct bitpack_d bp;
3403
3404 encoder = ob->decl_state->symtab_node_encoder;
3405 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3406 streamer_write_uhwi (ob, node_ref);
3407
3408 bp = bitpack_create (ob->main_stream);
3409 gcc_assert (info->uses_analysis_done
3410 || ipa_get_param_count (info) == 0);
3411 gcc_assert (!info->node_enqueued);
3412 gcc_assert (!info->ipcp_orig_node);
3413 for (j = 0; j < ipa_get_param_count (info); j++)
3414 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3415 streamer_write_bitpack (&bp);
3416 for (e = node->callees; e; e = e->next_callee)
3417 {
3418 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3419
3420 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3421 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3422 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3423 }
3424 for (e = node->indirect_calls; e; e = e->next_callee)
3425 {
3426 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3427
3428 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3429 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3430 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3431 ipa_write_indirect_edge_info (ob, e);
3432 }
3433 }
3434
3435 /* Stream in NODE info from IB. */
3436
3437 static void
3438 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3439 struct data_in *data_in)
3440 {
3441 struct ipa_node_params *info = IPA_NODE_REF (node);
3442 int k;
3443 struct cgraph_edge *e;
3444 struct bitpack_d bp;
3445
3446 ipa_initialize_node_params (node);
3447
3448 bp = streamer_read_bitpack (ib);
3449 if (ipa_get_param_count (info) != 0)
3450 info->uses_analysis_done = true;
3451 info->node_enqueued = false;
3452 for (k = 0; k < ipa_get_param_count (info); k++)
3453 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3454 for (e = node->callees; e; e = e->next_callee)
3455 {
3456 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3457 int count = streamer_read_uhwi (ib);
3458
3459 if (!count)
3460 continue;
3461 vec_safe_grow_cleared (args->jump_functions, count);
3462
3463 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3464 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3465 }
3466 for (e = node->indirect_calls; e; e = e->next_callee)
3467 {
3468 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3469 int count = streamer_read_uhwi (ib);
3470
3471 if (count)
3472 {
3473 vec_safe_grow_cleared (args->jump_functions, count);
3474 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3475 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3476 data_in);
3477 }
3478 ipa_read_indirect_edge_info (ib, data_in, e);
3479 }
3480 }
3481
3482 /* Write jump functions for nodes in SET. */
3483
3484 void
3485 ipa_prop_write_jump_functions (void)
3486 {
3487 struct cgraph_node *node;
3488 struct output_block *ob;
3489 unsigned int count = 0;
3490 lto_symtab_encoder_iterator lsei;
3491 lto_symtab_encoder_t encoder;
3492
3493
3494 if (!ipa_node_params_vector.exists ())
3495 return;
3496
3497 ob = create_output_block (LTO_section_jump_functions);
3498 encoder = ob->decl_state->symtab_node_encoder;
3499 ob->cgraph_node = NULL;
3500 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3501 lsei_next_function_in_partition (&lsei))
3502 {
3503 node = lsei_cgraph_node (lsei);
3504 if (cgraph_function_with_gimple_body_p (node)
3505 && IPA_NODE_REF (node) != NULL)
3506 count++;
3507 }
3508
3509 streamer_write_uhwi (ob, count);
3510
3511 /* Process all of the functions. */
3512 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3513 lsei_next_function_in_partition (&lsei))
3514 {
3515 node = lsei_cgraph_node (lsei);
3516 if (cgraph_function_with_gimple_body_p (node)
3517 && IPA_NODE_REF (node) != NULL)
3518 ipa_write_node_info (ob, node);
3519 }
3520 streamer_write_char_stream (ob->main_stream, 0);
3521 produce_asm (ob, NULL);
3522 destroy_output_block (ob);
3523 }
3524
3525 /* Read section in file FILE_DATA of length LEN with data DATA. */
3526
3527 static void
3528 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3529 size_t len)
3530 {
3531 const struct lto_function_header *header =
3532 (const struct lto_function_header *) data;
3533 const int cfg_offset = sizeof (struct lto_function_header);
3534 const int main_offset = cfg_offset + header->cfg_size;
3535 const int string_offset = main_offset + header->main_size;
3536 struct data_in *data_in;
3537 struct lto_input_block ib_main;
3538 unsigned int i;
3539 unsigned int count;
3540
3541 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3542 header->main_size);
3543
3544 data_in =
3545 lto_data_in_create (file_data, (const char *) data + string_offset,
3546 header->string_size, vNULL);
3547 count = streamer_read_uhwi (&ib_main);
3548
3549 for (i = 0; i < count; i++)
3550 {
3551 unsigned int index;
3552 struct cgraph_node *node;
3553 lto_symtab_encoder_t encoder;
3554
3555 index = streamer_read_uhwi (&ib_main);
3556 encoder = file_data->symtab_node_encoder;
3557 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3558 gcc_assert (node->analyzed);
3559 ipa_read_node_info (&ib_main, node, data_in);
3560 }
3561 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3562 len);
3563 lto_data_in_delete (data_in);
3564 }
3565
3566 /* Read ipcp jump functions. */
3567
3568 void
3569 ipa_prop_read_jump_functions (void)
3570 {
3571 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3572 struct lto_file_decl_data *file_data;
3573 unsigned int j = 0;
3574
3575 ipa_check_create_node_params ();
3576 ipa_check_create_edge_args ();
3577 ipa_register_cgraph_hooks ();
3578
3579 while ((file_data = file_data_vec[j++]))
3580 {
3581 size_t len;
3582 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3583
3584 if (data)
3585 ipa_prop_read_section (file_data, data, len);
3586 }
3587 }
3588
3589 /* After merging units, we can get mismatch in argument counts.
3590 Also decl merging might've rendered parameter lists obsolete.
3591 Also compute called_with_variable_arg info. */
3592
3593 void
3594 ipa_update_after_lto_read (void)
3595 {
3596 struct cgraph_node *node;
3597
3598 ipa_check_create_node_params ();
3599 ipa_check_create_edge_args ();
3600
3601 FOR_EACH_DEFINED_FUNCTION (node)
3602 if (node->analyzed)
3603 ipa_initialize_node_params (node);
3604 }
3605
3606 void
3607 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
3608 {
3609 int node_ref;
3610 unsigned int count = 0;
3611 lto_symtab_encoder_t encoder;
3612 struct ipa_agg_replacement_value *aggvals, *av;
3613
3614 aggvals = ipa_get_agg_replacements_for_node (node);
3615 encoder = ob->decl_state->symtab_node_encoder;
3616 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3617 streamer_write_uhwi (ob, node_ref);
3618
3619 for (av = aggvals; av; av = av->next)
3620 count++;
3621 streamer_write_uhwi (ob, count);
3622
3623 for (av = aggvals; av; av = av->next)
3624 {
3625 streamer_write_uhwi (ob, av->offset);
3626 streamer_write_uhwi (ob, av->index);
3627 stream_write_tree (ob, av->value, true);
3628 }
3629 }
3630
3631 /* Stream in the aggregate value replacement chain for NODE from IB. */
3632
3633 static void
3634 read_agg_replacement_chain (struct lto_input_block *ib,
3635 struct cgraph_node *node,
3636 struct data_in *data_in)
3637 {
3638 struct ipa_agg_replacement_value *aggvals = NULL;
3639 unsigned int count, i;
3640
3641 count = streamer_read_uhwi (ib);
3642 for (i = 0; i <count; i++)
3643 {
3644 struct ipa_agg_replacement_value *av;
3645
3646 av = ggc_alloc_ipa_agg_replacement_value ();
3647 av->offset = streamer_read_uhwi (ib);
3648 av->index = streamer_read_uhwi (ib);
3649 av->value = stream_read_tree (ib, data_in);
3650 av->next = aggvals;
3651 aggvals = av;
3652 }
3653 ipa_set_node_agg_value_chain (node, aggvals);
3654 }
3655
3656 /* Write all aggregate replacement for nodes in set. */
3657
3658 void
3659 ipa_prop_write_all_agg_replacement (void)
3660 {
3661 struct cgraph_node *node;
3662 struct output_block *ob;
3663 unsigned int count = 0;
3664 lto_symtab_encoder_iterator lsei;
3665 lto_symtab_encoder_t encoder;
3666
3667 if (!ipa_node_agg_replacements)
3668 return;
3669
3670 ob = create_output_block (LTO_section_ipcp_transform);
3671 encoder = ob->decl_state->symtab_node_encoder;
3672 ob->cgraph_node = NULL;
3673 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3674 lsei_next_function_in_partition (&lsei))
3675 {
3676 node = lsei_cgraph_node (lsei);
3677 if (cgraph_function_with_gimple_body_p (node)
3678 && ipa_get_agg_replacements_for_node (node) != NULL)
3679 count++;
3680 }
3681
3682 streamer_write_uhwi (ob, count);
3683
3684 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3685 lsei_next_function_in_partition (&lsei))
3686 {
3687 node = lsei_cgraph_node (lsei);
3688 if (cgraph_function_with_gimple_body_p (node)
3689 && ipa_get_agg_replacements_for_node (node) != NULL)
3690 write_agg_replacement_chain (ob, node);
3691 }
3692 streamer_write_char_stream (ob->main_stream, 0);
3693 produce_asm (ob, NULL);
3694 destroy_output_block (ob);
3695 }
3696
3697 /* Read replacements section in file FILE_DATA of length LEN with data
3698 DATA. */
3699
3700 static void
3701 read_replacements_section (struct lto_file_decl_data *file_data,
3702 const char *data,
3703 size_t len)
3704 {
3705 const struct lto_function_header *header =
3706 (const struct lto_function_header *) data;
3707 const int cfg_offset = sizeof (struct lto_function_header);
3708 const int main_offset = cfg_offset + header->cfg_size;
3709 const int string_offset = main_offset + header->main_size;
3710 struct data_in *data_in;
3711 struct lto_input_block ib_main;
3712 unsigned int i;
3713 unsigned int count;
3714
3715 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3716 header->main_size);
3717
3718 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
3719 header->string_size, vNULL);
3720 count = streamer_read_uhwi (&ib_main);
3721
3722 for (i = 0; i < count; i++)
3723 {
3724 unsigned int index;
3725 struct cgraph_node *node;
3726 lto_symtab_encoder_t encoder;
3727
3728 index = streamer_read_uhwi (&ib_main);
3729 encoder = file_data->symtab_node_encoder;
3730 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3731 gcc_assert (node->analyzed);
3732 read_agg_replacement_chain (&ib_main, node, data_in);
3733 }
3734 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3735 len);
3736 lto_data_in_delete (data_in);
3737 }
3738
3739 /* Read IPA-CP aggregate replacements. */
3740
3741 void
3742 ipa_prop_read_all_agg_replacement (void)
3743 {
3744 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3745 struct lto_file_decl_data *file_data;
3746 unsigned int j = 0;
3747
3748 while ((file_data = file_data_vec[j++]))
3749 {
3750 size_t len;
3751 const char *data = lto_get_section_data (file_data,
3752 LTO_section_ipcp_transform,
3753 NULL, &len);
3754 if (data)
3755 read_replacements_section (file_data, data, len);
3756 }
3757 }
3758
3759 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
3760 NODE. */
3761
3762 static void
3763 adjust_agg_replacement_values (struct cgraph_node *node,
3764 struct ipa_agg_replacement_value *aggval)
3765 {
3766 struct ipa_agg_replacement_value *v;
3767 int i, c = 0, d = 0, *adj;
3768
3769 if (!node->clone.combined_args_to_skip)
3770 return;
3771
3772 for (v = aggval; v; v = v->next)
3773 {
3774 gcc_assert (v->index >= 0);
3775 if (c < v->index)
3776 c = v->index;
3777 }
3778 c++;
3779
3780 adj = XALLOCAVEC (int, c);
3781 for (i = 0; i < c; i++)
3782 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
3783 {
3784 adj[i] = -1;
3785 d++;
3786 }
3787 else
3788 adj[i] = i - d;
3789
3790 for (v = aggval; v; v = v->next)
3791 v->index = adj[v->index];
3792 }
3793
3794
3795 /* Function body transformation phase. */
3796
3797 unsigned int
3798 ipcp_transform_function (struct cgraph_node *node)
3799 {
3800 vec<ipa_param_descriptor_t> descriptors = vNULL;
3801 struct param_analysis_info *parms_ainfo;
3802 struct ipa_agg_replacement_value *aggval;
3803 gimple_stmt_iterator gsi;
3804 basic_block bb;
3805 int param_count;
3806 bool cfg_changed = false, something_changed = false;
3807
3808 gcc_checking_assert (cfun);
3809 gcc_checking_assert (current_function_decl);
3810
3811 if (dump_file)
3812 fprintf (dump_file, "Modification phase of node %s/%i\n",
3813 cgraph_node_name (node), node->uid);
3814
3815 aggval = ipa_get_agg_replacements_for_node (node);
3816 if (!aggval)
3817 return 0;
3818 param_count = count_formal_params (node->symbol.decl);
3819 if (param_count == 0)
3820 return 0;
3821 adjust_agg_replacement_values (node, aggval);
3822 if (dump_file)
3823 ipa_dump_agg_replacement_values (dump_file, aggval);
3824 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
3825 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
3826 descriptors.safe_grow_cleared (param_count);
3827 ipa_populate_param_decls (node, descriptors);
3828
3829 FOR_EACH_BB (bb)
3830 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3831 {
3832 struct ipa_agg_replacement_value *v;
3833 gimple stmt = gsi_stmt (gsi);
3834 tree rhs, val, t;
3835 HOST_WIDE_INT offset;
3836 int index;
3837 bool by_ref, vce;
3838
3839 if (!gimple_assign_load_p (stmt))
3840 continue;
3841 rhs = gimple_assign_rhs1 (stmt);
3842 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
3843 continue;
3844
3845 vce = false;
3846 t = rhs;
3847 while (handled_component_p (t))
3848 {
3849 /* V_C_E can do things like convert an array of integers to one
3850 bigger integer and similar things we do not handle below. */
3851 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
3852 {
3853 vce = true;
3854 break;
3855 }
3856 t = TREE_OPERAND (t, 0);
3857 }
3858 if (vce)
3859 continue;
3860
3861 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3862 rhs, &index, &offset, &by_ref))
3863 continue;
3864 for (v = aggval; v; v = v->next)
3865 if (v->index == index
3866 && v->offset == offset)
3867 break;
3868 if (!v)
3869 continue;
3870
3871 gcc_checking_assert (is_gimple_ip_invariant (v->value));
3872 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
3873 {
3874 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
3875 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
3876 else if (TYPE_SIZE (TREE_TYPE (rhs))
3877 == TYPE_SIZE (TREE_TYPE (v->value)))
3878 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
3879 else
3880 {
3881 if (dump_file)
3882 {
3883 fprintf (dump_file, " const ");
3884 print_generic_expr (dump_file, v->value, 0);
3885 fprintf (dump_file, " can't be converted to type of ");
3886 print_generic_expr (dump_file, rhs, 0);
3887 fprintf (dump_file, "\n");
3888 }
3889 continue;
3890 }
3891 }
3892 else
3893 val = v->value;
3894
3895 if (dump_file && (dump_flags & TDF_DETAILS))
3896 {
3897 fprintf (dump_file, "Modifying stmt:\n ");
3898 print_gimple_stmt (dump_file, stmt, 0, 0);
3899 }
3900 gimple_assign_set_rhs_from_tree (&gsi, val);
3901 update_stmt (stmt);
3902
3903 if (dump_file && (dump_flags & TDF_DETAILS))
3904 {
3905 fprintf (dump_file, "into:\n ");
3906 print_gimple_stmt (dump_file, stmt, 0, 0);
3907 fprintf (dump_file, "\n");
3908 }
3909
3910 something_changed = true;
3911 if (maybe_clean_eh_stmt (stmt)
3912 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3913 cfg_changed = true;
3914 }
3915
3916 (*ipa_node_agg_replacements)[node->uid] = NULL;
3917 free_parms_ainfo (parms_ainfo, param_count);
3918 descriptors.release ();
3919
3920 if (!something_changed)
3921 return 0;
3922 else if (cfg_changed)
3923 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
3924 else
3925 return TODO_update_ssa_only_virtuals;
3926 }