Introduce vNULL to use as a nil initializer for vec<>.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 vec<ipa_node_params_t> ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 static int
70 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
71 {
72 int i, count;
73
74 count = descriptors.length ();
75 for (i = 0; i < count; i++)
76 if (descriptors[i].decl == ptree)
77 return i;
78
79 return -1;
80 }
81
82 /* Return index of the formal whose tree is PTREE in function which corresponds
83 to INFO. */
84
85 int
86 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
87 {
88 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
89 }
90
91 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
92 NODE. */
93
94 static void
95 ipa_populate_param_decls (struct cgraph_node *node,
96 vec<ipa_param_descriptor_t> &descriptors)
97 {
98 tree fndecl;
99 tree fnargs;
100 tree parm;
101 int param_num;
102
103 fndecl = node->symbol.decl;
104 fnargs = DECL_ARGUMENTS (fndecl);
105 param_num = 0;
106 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
107 {
108 descriptors[param_num].decl = parm;
109 param_num++;
110 }
111 }
112
113 /* Return how many formal parameters FNDECL has. */
114
115 static inline int
116 count_formal_params (tree fndecl)
117 {
118 tree parm;
119 int count = 0;
120
121 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
122 count++;
123
124 return count;
125 }
126
127 /* Initialize the ipa_node_params structure associated with NODE by counting
128 the function parameters, creating the descriptors and populating their
129 param_decls. */
130
131 void
132 ipa_initialize_node_params (struct cgraph_node *node)
133 {
134 struct ipa_node_params *info = IPA_NODE_REF (node);
135
136 if (!info->descriptors.exists ())
137 {
138 int param_count;
139
140 param_count = count_formal_params (node->symbol.decl);
141 if (param_count)
142 {
143 info->descriptors.safe_grow_cleared (param_count);
144 ipa_populate_param_decls (node, info->descriptors);
145 }
146 }
147 }
148
149 /* Print the jump functions associated with call graph edge CS to file F. */
150
151 static void
152 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
153 {
154 int i, count;
155
156 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
157 for (i = 0; i < count; i++)
158 {
159 struct ipa_jump_func *jump_func;
160 enum jump_func_type type;
161
162 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
163 type = jump_func->type;
164
165 fprintf (f, " param %d: ", i);
166 if (type == IPA_JF_UNKNOWN)
167 fprintf (f, "UNKNOWN\n");
168 else if (type == IPA_JF_KNOWN_TYPE)
169 {
170 fprintf (f, "KNOWN TYPE: base ");
171 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
172 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
173 jump_func->value.known_type.offset);
174 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
175 fprintf (f, "\n");
176 }
177 else if (type == IPA_JF_CONST)
178 {
179 tree val = jump_func->value.constant;
180 fprintf (f, "CONST: ");
181 print_generic_expr (f, val, 0);
182 if (TREE_CODE (val) == ADDR_EXPR
183 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
184 {
185 fprintf (f, " -> ");
186 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
187 0);
188 }
189 fprintf (f, "\n");
190 }
191 else if (type == IPA_JF_PASS_THROUGH)
192 {
193 fprintf (f, "PASS THROUGH: ");
194 fprintf (f, "%d, op %s",
195 jump_func->value.pass_through.formal_id,
196 tree_code_name[(int)
197 jump_func->value.pass_through.operation]);
198 if (jump_func->value.pass_through.operation != NOP_EXPR)
199 {
200 fprintf (f, " ");
201 print_generic_expr (f,
202 jump_func->value.pass_through.operand, 0);
203 }
204 if (jump_func->value.pass_through.agg_preserved)
205 fprintf (f, ", agg_preserved");
206 fprintf (f, "\n");
207 }
208 else if (type == IPA_JF_ANCESTOR)
209 {
210 fprintf (f, "ANCESTOR: ");
211 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
212 jump_func->value.ancestor.formal_id,
213 jump_func->value.ancestor.offset);
214 print_generic_expr (f, jump_func->value.ancestor.type, 0);
215 if (jump_func->value.ancestor.agg_preserved)
216 fprintf (f, ", agg_preserved");
217 fprintf (f, "\n");
218 }
219
220 if (jump_func->agg.items)
221 {
222 struct ipa_agg_jf_item *item;
223 int j;
224
225 fprintf (f, " Aggregate passed by %s:\n",
226 jump_func->agg.by_ref ? "reference" : "value");
227 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
228 {
229 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
230 item->offset);
231 if (TYPE_P (item->value))
232 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
233 tree_low_cst (TYPE_SIZE (item->value), 1));
234 else
235 {
236 fprintf (f, "cst: ");
237 print_generic_expr (f, item->value, 0);
238 }
239 fprintf (f, "\n");
240 }
241 }
242 }
243 }
244
245
246 /* Print the jump functions of all arguments on all call graph edges going from
247 NODE to file F. */
248
249 void
250 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
251 {
252 struct cgraph_edge *cs;
253 int i;
254
255 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
256 for (cs = node->callees; cs; cs = cs->next_callee)
257 {
258 if (!ipa_edge_args_info_available_for_edge_p (cs))
259 continue;
260
261 fprintf (f, " callsite %s/%i -> %s/%i : \n",
262 xstrdup (cgraph_node_name (node)), node->uid,
263 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
264 ipa_print_node_jump_functions_for_edge (f, cs);
265 }
266
267 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
268 {
269 if (!ipa_edge_args_info_available_for_edge_p (cs))
270 continue;
271
272 if (cs->call_stmt)
273 {
274 fprintf (f, " indirect callsite %d for stmt ", i);
275 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
276 }
277 else
278 fprintf (f, " indirect callsite %d :\n", i);
279 ipa_print_node_jump_functions_for_edge (f, cs);
280
281 }
282 }
283
284 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
285
286 void
287 ipa_print_all_jump_functions (FILE *f)
288 {
289 struct cgraph_node *node;
290
291 fprintf (f, "\nJump functions:\n");
292 FOR_EACH_FUNCTION (node)
293 {
294 ipa_print_node_jump_functions (f, node);
295 }
296 }
297
298 /* Worker for prune_expression_for_jf. */
299
300 static tree
301 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
302 {
303 if (EXPR_P (*tp))
304 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
305 else
306 *walk_subtrees = 0;
307 return NULL_TREE;
308 }
309
310 /* Return the expression tree EXPR unshared and with location stripped off. */
311
312 static tree
313 prune_expression_for_jf (tree exp)
314 {
315 if (EXPR_P (exp))
316 {
317 exp = unshare_expr (exp);
318 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
319 }
320 return exp;
321 }
322
323 /* Set JFUNC to be a known type jump function. */
324
325 static void
326 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
327 tree base_type, tree component_type)
328 {
329 jfunc->type = IPA_JF_KNOWN_TYPE;
330 jfunc->value.known_type.offset = offset,
331 jfunc->value.known_type.base_type = base_type;
332 jfunc->value.known_type.component_type = component_type;
333 }
334
335 /* Set JFUNC to be a constant jmp function. */
336
337 static void
338 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
339 {
340 constant = unshare_expr (constant);
341 if (constant && EXPR_P (constant))
342 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
343 jfunc->type = IPA_JF_CONST;
344 jfunc->value.constant = prune_expression_for_jf (constant);
345 }
346
347 /* Set JFUNC to be a simple pass-through jump function. */
348 static void
349 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
350 bool agg_preserved)
351 {
352 jfunc->type = IPA_JF_PASS_THROUGH;
353 jfunc->value.pass_through.operand = NULL_TREE;
354 jfunc->value.pass_through.formal_id = formal_id;
355 jfunc->value.pass_through.operation = NOP_EXPR;
356 jfunc->value.pass_through.agg_preserved = agg_preserved;
357 }
358
359 /* Set JFUNC to be an arithmetic pass through jump function. */
360
361 static void
362 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
363 tree operand, enum tree_code operation)
364 {
365 jfunc->type = IPA_JF_PASS_THROUGH;
366 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
367 jfunc->value.pass_through.formal_id = formal_id;
368 jfunc->value.pass_through.operation = operation;
369 jfunc->value.pass_through.agg_preserved = false;
370 }
371
372 /* Set JFUNC to be an ancestor jump function. */
373
374 static void
375 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
376 tree type, int formal_id, bool agg_preserved)
377 {
378 jfunc->type = IPA_JF_ANCESTOR;
379 jfunc->value.ancestor.formal_id = formal_id;
380 jfunc->value.ancestor.offset = offset;
381 jfunc->value.ancestor.type = type;
382 jfunc->value.ancestor.agg_preserved = agg_preserved;
383 }
384
385 /* Structure to be passed in between detect_type_change and
386 check_stmt_for_type_change. */
387
388 struct type_change_info
389 {
390 /* Offset into the object where there is the virtual method pointer we are
391 looking for. */
392 HOST_WIDE_INT offset;
393 /* The declaration or SSA_NAME pointer of the base that we are checking for
394 type change. */
395 tree object;
396 /* If we actually can tell the type that the object has changed to, it is
397 stored in this field. Otherwise it remains NULL_TREE. */
398 tree known_current_type;
399 /* Set to true if dynamic type change has been detected. */
400 bool type_maybe_changed;
401 /* Set to true if multiple types have been encountered. known_current_type
402 must be disregarded in that case. */
403 bool multiple_types_encountered;
404 };
405
406 /* Return true if STMT can modify a virtual method table pointer.
407
408 This function makes special assumptions about both constructors and
409 destructors which are all the functions that are allowed to alter the VMT
410 pointers. It assumes that destructors begin with assignment into all VMT
411 pointers and that constructors essentially look in the following way:
412
413 1) The very first thing they do is that they call constructors of ancestor
414 sub-objects that have them.
415
416 2) Then VMT pointers of this and all its ancestors is set to new values
417 corresponding to the type corresponding to the constructor.
418
419 3) Only afterwards, other stuff such as constructor of member sub-objects
420 and the code written by the user is run. Only this may include calling
421 virtual functions, directly or indirectly.
422
423 There is no way to call a constructor of an ancestor sub-object in any
424 other way.
425
426 This means that we do not have to care whether constructors get the correct
427 type information because they will always change it (in fact, if we define
428 the type to be given by the VMT pointer, it is undefined).
429
430 The most important fact to derive from the above is that if, for some
431 statement in the section 3, we try to detect whether the dynamic type has
432 changed, we can safely ignore all calls as we examine the function body
433 backwards until we reach statements in section 2 because these calls cannot
434 be ancestor constructors or destructors (if the input is not bogus) and so
435 do not change the dynamic type (this holds true only for automatically
436 allocated objects but at the moment we devirtualize only these). We then
437 must detect that statements in section 2 change the dynamic type and can try
438 to derive the new type. That is enough and we can stop, we will never see
439 the calls into constructors of sub-objects in this code. Therefore we can
440 safely ignore all call statements that we traverse.
441 */
442
443 static bool
444 stmt_may_be_vtbl_ptr_store (gimple stmt)
445 {
446 if (is_gimple_call (stmt))
447 return false;
448 else if (is_gimple_assign (stmt))
449 {
450 tree lhs = gimple_assign_lhs (stmt);
451
452 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
453 {
454 if (flag_strict_aliasing
455 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
456 return false;
457
458 if (TREE_CODE (lhs) == COMPONENT_REF
459 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
460 return false;
461 /* In the future we might want to use get_base_ref_and_offset to find
462 if there is a field corresponding to the offset and if so, proceed
463 almost like if it was a component ref. */
464 }
465 }
466 return true;
467 }
468
469 /* If STMT can be proved to be an assignment to the virtual method table
470 pointer of ANALYZED_OBJ and the type associated with the new table
471 identified, return the type. Otherwise return NULL_TREE. */
472
473 static tree
474 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
475 {
476 HOST_WIDE_INT offset, size, max_size;
477 tree lhs, rhs, base;
478
479 if (!gimple_assign_single_p (stmt))
480 return NULL_TREE;
481
482 lhs = gimple_assign_lhs (stmt);
483 rhs = gimple_assign_rhs1 (stmt);
484 if (TREE_CODE (lhs) != COMPONENT_REF
485 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
486 || TREE_CODE (rhs) != ADDR_EXPR)
487 return NULL_TREE;
488 rhs = get_base_address (TREE_OPERAND (rhs, 0));
489 if (!rhs
490 || TREE_CODE (rhs) != VAR_DECL
491 || !DECL_VIRTUAL_P (rhs))
492 return NULL_TREE;
493
494 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
495 if (offset != tci->offset
496 || size != POINTER_SIZE
497 || max_size != POINTER_SIZE)
498 return NULL_TREE;
499 if (TREE_CODE (base) == MEM_REF)
500 {
501 if (TREE_CODE (tci->object) != MEM_REF
502 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
503 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
504 TREE_OPERAND (base, 1)))
505 return NULL_TREE;
506 }
507 else if (tci->object != base)
508 return NULL_TREE;
509
510 return DECL_CONTEXT (rhs);
511 }
512
513 /* Callback of walk_aliased_vdefs and a helper function for
514 detect_type_change to check whether a particular statement may modify
515 the virtual table pointer, and if possible also determine the new type of
516 the (sub-)object. It stores its result into DATA, which points to a
517 type_change_info structure. */
518
519 static bool
520 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
521 {
522 gimple stmt = SSA_NAME_DEF_STMT (vdef);
523 struct type_change_info *tci = (struct type_change_info *) data;
524
525 if (stmt_may_be_vtbl_ptr_store (stmt))
526 {
527 tree type;
528 type = extr_type_from_vtbl_ptr_store (stmt, tci);
529 if (tci->type_maybe_changed
530 && type != tci->known_current_type)
531 tci->multiple_types_encountered = true;
532 tci->known_current_type = type;
533 tci->type_maybe_changed = true;
534 return true;
535 }
536 else
537 return false;
538 }
539
540
541
542 /* Like detect_type_change but with extra argument COMP_TYPE which will become
543 the component type part of new JFUNC of dynamic type change is detected and
544 the new base type is identified. */
545
546 static bool
547 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
548 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
549 {
550 struct type_change_info tci;
551 ao_ref ao;
552
553 gcc_checking_assert (DECL_P (arg)
554 || TREE_CODE (arg) == MEM_REF
555 || handled_component_p (arg));
556 /* Const calls cannot call virtual methods through VMT and so type changes do
557 not matter. */
558 if (!flag_devirtualize || !gimple_vuse (call))
559 return false;
560
561 ao_ref_init (&ao, arg);
562 ao.base = base;
563 ao.offset = offset;
564 ao.size = POINTER_SIZE;
565 ao.max_size = ao.size;
566
567 tci.offset = offset;
568 tci.object = get_base_address (arg);
569 tci.known_current_type = NULL_TREE;
570 tci.type_maybe_changed = false;
571 tci.multiple_types_encountered = false;
572
573 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
574 &tci, NULL);
575 if (!tci.type_maybe_changed)
576 return false;
577
578 if (!tci.known_current_type
579 || tci.multiple_types_encountered
580 || offset != 0)
581 jfunc->type = IPA_JF_UNKNOWN;
582 else
583 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
584
585 return true;
586 }
587
588 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
589 looking for assignments to its virtual table pointer. If it is, return true
590 and fill in the jump function JFUNC with relevant type information or set it
591 to unknown. ARG is the object itself (not a pointer to it, unless
592 dereferenced). BASE is the base of the memory access as returned by
593 get_ref_base_and_extent, as is the offset. */
594
595 static bool
596 detect_type_change (tree arg, tree base, gimple call,
597 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
598 {
599 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
600 }
601
602 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
603 SSA name (its dereference will become the base and the offset is assumed to
604 be zero). */
605
606 static bool
607 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
608 {
609 tree comp_type;
610
611 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
612 if (!flag_devirtualize
613 || !POINTER_TYPE_P (TREE_TYPE (arg))
614 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
615 return false;
616
617 comp_type = TREE_TYPE (TREE_TYPE (arg));
618 arg = build2 (MEM_REF, ptr_type_node, arg,
619 build_int_cst (ptr_type_node, 0));
620
621 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
622 }
623
624 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
625 boolean variable pointed to by DATA. */
626
627 static bool
628 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
629 void *data)
630 {
631 bool *b = (bool *) data;
632 *b = true;
633 return true;
634 }
635
636 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
637 a value known not to be modified in this function before reaching the
638 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
639 information about the parameter. */
640
641 static bool
642 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
643 gimple stmt, tree parm_load)
644 {
645 bool modified = false;
646 bitmap *visited_stmts;
647 ao_ref refd;
648
649 if (parm_ainfo && parm_ainfo->parm_modified)
650 return false;
651
652 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
653 ao_ref_init (&refd, parm_load);
654 /* We can cache visited statements only when parm_ainfo is available and when
655 we are looking at a naked load of the whole parameter. */
656 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
657 visited_stmts = NULL;
658 else
659 visited_stmts = &parm_ainfo->parm_visited_statements;
660 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
661 visited_stmts);
662 if (parm_ainfo && modified)
663 parm_ainfo->parm_modified = true;
664 return !modified;
665 }
666
667 /* If STMT is an assignment that loads a value from an parameter declaration,
668 return the index of the parameter in ipa_node_params which has not been
669 modified. Otherwise return -1. */
670
671 static int
672 load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
673 struct param_analysis_info *parms_ainfo,
674 gimple stmt)
675 {
676 int index;
677 tree op1;
678
679 if (!gimple_assign_single_p (stmt))
680 return -1;
681
682 op1 = gimple_assign_rhs1 (stmt);
683 if (TREE_CODE (op1) != PARM_DECL)
684 return -1;
685
686 index = ipa_get_param_decl_index_1 (descriptors, op1);
687 if (index < 0
688 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
689 : NULL, stmt, op1))
690 return -1;
691
692 return index;
693 }
694
695 /* Return true if memory reference REF loads data that are known to be
696 unmodified in this function before reaching statement STMT. PARM_AINFO, if
697 non-NULL, is a pointer to a structure containing temporary information about
698 PARM. */
699
700 static bool
701 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
702 gimple stmt, tree ref)
703 {
704 bool modified = false;
705 ao_ref refd;
706
707 gcc_checking_assert (gimple_vuse (stmt));
708 if (parm_ainfo && parm_ainfo->ref_modified)
709 return false;
710
711 ao_ref_init (&refd, ref);
712 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
713 NULL);
714 if (parm_ainfo && modified)
715 parm_ainfo->ref_modified = true;
716 return !modified;
717 }
718
719 /* Return true if the data pointed to by PARM is known to be unmodified in this
720 function before reaching call statement CALL into which it is passed.
721 PARM_AINFO is a pointer to a structure containing temporary information
722 about PARM. */
723
724 static bool
725 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
726 gimple call, tree parm)
727 {
728 bool modified = false;
729 ao_ref refd;
730
731 /* It's unnecessary to calculate anything about memory contnets for a const
732 function because it is not goin to use it. But do not cache the result
733 either. Also, no such calculations for non-pointers. */
734 if (!gimple_vuse (call)
735 || !POINTER_TYPE_P (TREE_TYPE (parm)))
736 return false;
737
738 if (parm_ainfo->pt_modified)
739 return false;
740
741 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
742 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
743 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
744 if (modified)
745 parm_ainfo->pt_modified = true;
746 return !modified;
747 }
748
749 /* Return true if we can prove that OP is a memory reference loading unmodified
750 data from an aggregate passed as a parameter and if the aggregate is passed
751 by reference, that the alias type of the load corresponds to the type of the
752 formal parameter (so that we can rely on this type for TBAA in callers).
753 INFO and PARMS_AINFO describe parameters of the current function (but the
754 latter can be NULL), STMT is the load statement. If function returns true,
755 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
756 within the aggregate and whether it is a load from a value passed by
757 reference respectively. */
758
759 static bool
760 ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
761 struct param_analysis_info *parms_ainfo, gimple stmt,
762 tree op, int *index_p, HOST_WIDE_INT *offset_p,
763 bool *by_ref_p)
764 {
765 int index;
766 HOST_WIDE_INT size, max_size;
767 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
768
769 if (max_size == -1 || max_size != size || *offset_p < 0)
770 return false;
771
772 if (DECL_P (base))
773 {
774 int index = ipa_get_param_decl_index_1 (descriptors, base);
775 if (index >= 0
776 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
777 : NULL, stmt, op))
778 {
779 *index_p = index;
780 *by_ref_p = false;
781 return true;
782 }
783 return false;
784 }
785
786 if (TREE_CODE (base) != MEM_REF
787 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
788 || !integer_zerop (TREE_OPERAND (base, 1)))
789 return false;
790
791 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
792 {
793 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
794 index = ipa_get_param_decl_index_1 (descriptors, parm);
795 }
796 else
797 {
798 /* This branch catches situations where a pointer parameter is not a
799 gimple register, for example:
800
801 void hip7(S*) (struct S * p)
802 {
803 void (*<T2e4>) (struct S *) D.1867;
804 struct S * p.1;
805
806 <bb 2>:
807 p.1_1 = p;
808 D.1867_2 = p.1_1->f;
809 D.1867_2 ();
810 gdp = &p;
811 */
812
813 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
814 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
815 }
816
817 if (index >= 0
818 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
819 stmt, op))
820 {
821 *index_p = index;
822 *by_ref_p = true;
823 return true;
824 }
825 return false;
826 }
827
828 /* Just like the previous function, just without the param_analysis_info
829 pointer, for users outside of this file. */
830
831 bool
832 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
833 tree op, int *index_p, HOST_WIDE_INT *offset_p,
834 bool *by_ref_p)
835 {
836 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
837 offset_p, by_ref_p);
838 }
839
840 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
841 of an assignment statement STMT, try to determine whether we are actually
842 handling any of the following cases and construct an appropriate jump
843 function into JFUNC if so:
844
845 1) The passed value is loaded from a formal parameter which is not a gimple
846 register (most probably because it is addressable, the value has to be
847 scalar) and we can guarantee the value has not changed. This case can
848 therefore be described by a simple pass-through jump function. For example:
849
850 foo (int a)
851 {
852 int a.0;
853
854 a.0_2 = a;
855 bar (a.0_2);
856
857 2) The passed value can be described by a simple arithmetic pass-through
858 jump function. E.g.
859
860 foo (int a)
861 {
862 int D.2064;
863
864 D.2064_4 = a.1(D) + 4;
865 bar (D.2064_4);
866
867 This case can also occur in combination of the previous one, e.g.:
868
869 foo (int a, int z)
870 {
871 int a.0;
872 int D.2064;
873
874 a.0_3 = a;
875 D.2064_4 = a.0_3 + 4;
876 foo (D.2064_4);
877
878 3) The passed value is an address of an object within another one (which
879 also passed by reference). Such situations are described by an ancestor
880 jump function and describe situations such as:
881
882 B::foo() (struct B * const this)
883 {
884 struct A * D.1845;
885
886 D.1845_2 = &this_1(D)->D.1748;
887 A::bar (D.1845_2);
888
889 INFO is the structure describing individual parameters access different
890 stages of IPA optimizations. PARMS_AINFO contains the information that is
891 only needed for intraprocedural analysis. */
892
893 static void
894 compute_complex_assign_jump_func (struct ipa_node_params *info,
895 struct param_analysis_info *parms_ainfo,
896 struct ipa_jump_func *jfunc,
897 gimple call, gimple stmt, tree name)
898 {
899 HOST_WIDE_INT offset, size, max_size;
900 tree op1, tc_ssa, base, ssa;
901 int index;
902
903 op1 = gimple_assign_rhs1 (stmt);
904
905 if (TREE_CODE (op1) == SSA_NAME)
906 {
907 if (SSA_NAME_IS_DEFAULT_DEF (op1))
908 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
909 else
910 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
911 SSA_NAME_DEF_STMT (op1));
912 tc_ssa = op1;
913 }
914 else
915 {
916 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
917 tc_ssa = gimple_assign_lhs (stmt);
918 }
919
920 if (index >= 0)
921 {
922 tree op2 = gimple_assign_rhs2 (stmt);
923
924 if (op2)
925 {
926 if (!is_gimple_ip_invariant (op2)
927 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
928 && !useless_type_conversion_p (TREE_TYPE (name),
929 TREE_TYPE (op1))))
930 return;
931
932 ipa_set_jf_arith_pass_through (jfunc, index, op2,
933 gimple_assign_rhs_code (stmt));
934 }
935 else if (gimple_assign_single_p (stmt)
936 && !detect_type_change_ssa (tc_ssa, call, jfunc))
937 {
938 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
939 call, tc_ssa);
940 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
941 }
942 return;
943 }
944
945 if (TREE_CODE (op1) != ADDR_EXPR)
946 return;
947 op1 = TREE_OPERAND (op1, 0);
948 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
949 return;
950 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
951 if (TREE_CODE (base) != MEM_REF
952 /* If this is a varying address, punt. */
953 || max_size == -1
954 || max_size != size)
955 return;
956 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
957 ssa = TREE_OPERAND (base, 0);
958 if (TREE_CODE (ssa) != SSA_NAME
959 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
960 || offset < 0)
961 return;
962
963 /* Dynamic types are changed only in constructors and destructors and */
964 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
965 if (index >= 0
966 && !detect_type_change (op1, base, call, jfunc, offset))
967 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
968 parm_ref_data_pass_through_p (&parms_ainfo[index],
969 call, ssa));
970 }
971
972 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
973 it looks like:
974
975 iftmp.1_3 = &obj_2(D)->D.1762;
976
977 The base of the MEM_REF must be a default definition SSA NAME of a
978 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
979 whole MEM_REF expression is returned and the offset calculated from any
980 handled components and the MEM_REF itself is stored into *OFFSET. The whole
981 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
982
983 static tree
984 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
985 {
986 HOST_WIDE_INT size, max_size;
987 tree expr, parm, obj;
988
989 if (!gimple_assign_single_p (assign))
990 return NULL_TREE;
991 expr = gimple_assign_rhs1 (assign);
992
993 if (TREE_CODE (expr) != ADDR_EXPR)
994 return NULL_TREE;
995 expr = TREE_OPERAND (expr, 0);
996 obj = expr;
997 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
998
999 if (TREE_CODE (expr) != MEM_REF
1000 /* If this is a varying address, punt. */
1001 || max_size == -1
1002 || max_size != size
1003 || *offset < 0)
1004 return NULL_TREE;
1005 parm = TREE_OPERAND (expr, 0);
1006 if (TREE_CODE (parm) != SSA_NAME
1007 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1008 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1009 return NULL_TREE;
1010
1011 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1012 *obj_p = obj;
1013 return expr;
1014 }
1015
1016
1017 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1018 statement PHI, try to find out whether NAME is in fact a
1019 multiple-inheritance typecast from a descendant into an ancestor of a formal
1020 parameter and thus can be described by an ancestor jump function and if so,
1021 write the appropriate function into JFUNC.
1022
1023 Essentially we want to match the following pattern:
1024
1025 if (obj_2(D) != 0B)
1026 goto <bb 3>;
1027 else
1028 goto <bb 4>;
1029
1030 <bb 3>:
1031 iftmp.1_3 = &obj_2(D)->D.1762;
1032
1033 <bb 4>:
1034 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1035 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1036 return D.1879_6; */
1037
1038 static void
1039 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1040 struct param_analysis_info *parms_ainfo,
1041 struct ipa_jump_func *jfunc,
1042 gimple call, gimple phi)
1043 {
1044 HOST_WIDE_INT offset;
1045 gimple assign, cond;
1046 basic_block phi_bb, assign_bb, cond_bb;
1047 tree tmp, parm, expr, obj;
1048 int index, i;
1049
1050 if (gimple_phi_num_args (phi) != 2)
1051 return;
1052
1053 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1054 tmp = PHI_ARG_DEF (phi, 0);
1055 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1056 tmp = PHI_ARG_DEF (phi, 1);
1057 else
1058 return;
1059 if (TREE_CODE (tmp) != SSA_NAME
1060 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1061 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1062 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1063 return;
1064
1065 assign = SSA_NAME_DEF_STMT (tmp);
1066 assign_bb = gimple_bb (assign);
1067 if (!single_pred_p (assign_bb))
1068 return;
1069 expr = get_ancestor_addr_info (assign, &obj, &offset);
1070 if (!expr)
1071 return;
1072 parm = TREE_OPERAND (expr, 0);
1073 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1074 gcc_assert (index >= 0);
1075
1076 cond_bb = single_pred (assign_bb);
1077 cond = last_stmt (cond_bb);
1078 if (!cond
1079 || gimple_code (cond) != GIMPLE_COND
1080 || gimple_cond_code (cond) != NE_EXPR
1081 || gimple_cond_lhs (cond) != parm
1082 || !integer_zerop (gimple_cond_rhs (cond)))
1083 return;
1084
1085 phi_bb = gimple_bb (phi);
1086 for (i = 0; i < 2; i++)
1087 {
1088 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1089 if (pred != assign_bb && pred != cond_bb)
1090 return;
1091 }
1092
1093 if (!detect_type_change (obj, expr, call, jfunc, offset))
1094 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1095 parm_ref_data_pass_through_p (&parms_ainfo[index],
1096 call, parm));
1097 }
1098
1099 /* Given OP which is passed as an actual argument to a called function,
1100 determine if it is possible to construct a KNOWN_TYPE jump function for it
1101 and if so, create one and store it to JFUNC. */
1102
1103 static void
1104 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1105 gimple call)
1106 {
1107 HOST_WIDE_INT offset, size, max_size;
1108 tree base;
1109
1110 if (!flag_devirtualize
1111 || TREE_CODE (op) != ADDR_EXPR
1112 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1113 return;
1114
1115 op = TREE_OPERAND (op, 0);
1116 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1117 if (!DECL_P (base)
1118 || max_size == -1
1119 || max_size != size
1120 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1121 || is_global_var (base))
1122 return;
1123
1124 if (!TYPE_BINFO (TREE_TYPE (base))
1125 || detect_type_change (op, base, call, jfunc, offset))
1126 return;
1127
1128 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1129 }
1130
1131 /* Inspect the given TYPE and return true iff it has the same structure (the
1132 same number of fields of the same types) as a C++ member pointer. If
1133 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1134 corresponding fields there. */
1135
1136 static bool
1137 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1138 {
1139 tree fld;
1140
1141 if (TREE_CODE (type) != RECORD_TYPE)
1142 return false;
1143
1144 fld = TYPE_FIELDS (type);
1145 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1146 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1147 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1148 return false;
1149
1150 if (method_ptr)
1151 *method_ptr = fld;
1152
1153 fld = DECL_CHAIN (fld);
1154 if (!fld || INTEGRAL_TYPE_P (fld)
1155 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1156 return false;
1157 if (delta)
1158 *delta = fld;
1159
1160 if (DECL_CHAIN (fld))
1161 return false;
1162
1163 return true;
1164 }
1165
1166 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1167 return the rhs of its defining statement. Otherwise return RHS as it
1168 is. */
1169
1170 static inline tree
1171 get_ssa_def_if_simple_copy (tree rhs)
1172 {
1173 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1174 {
1175 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1176
1177 if (gimple_assign_single_p (def_stmt))
1178 rhs = gimple_assign_rhs1 (def_stmt);
1179 else
1180 break;
1181 }
1182 return rhs;
1183 }
1184
1185 /* Simple linked list, describing known contents of an aggregate beforere
1186 call. */
1187
1188 struct ipa_known_agg_contents_list
1189 {
1190 /* Offset and size of the described part of the aggregate. */
1191 HOST_WIDE_INT offset, size;
1192 /* Known constant value or NULL if the contents is known to be unknown. */
1193 tree constant;
1194 /* Pointer to the next structure in the list. */
1195 struct ipa_known_agg_contents_list *next;
1196 };
1197
1198 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1199 in ARG is filled in with constant values. ARG can either be an aggregate
1200 expression or a pointer to an aggregate. JFUNC is the jump function into
1201 which the constants are subsequently stored. */
1202
1203 static void
1204 determine_known_aggregate_parts (gimple call, tree arg,
1205 struct ipa_jump_func *jfunc)
1206 {
1207 struct ipa_known_agg_contents_list *list = NULL;
1208 int item_count = 0, const_count = 0;
1209 HOST_WIDE_INT arg_offset, arg_size;
1210 gimple_stmt_iterator gsi;
1211 tree arg_base;
1212 bool check_ref, by_ref;
1213 ao_ref r;
1214
1215 /* The function operates in three stages. First, we prepare check_ref, r,
1216 arg_base and arg_offset based on what is actually passed as an actual
1217 argument. */
1218
1219 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1220 {
1221 by_ref = true;
1222 if (TREE_CODE (arg) == SSA_NAME)
1223 {
1224 tree type_size;
1225 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1226 return;
1227 check_ref = true;
1228 arg_base = arg;
1229 arg_offset = 0;
1230 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1231 arg_size = tree_low_cst (type_size, 1);
1232 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1233 }
1234 else if (TREE_CODE (arg) == ADDR_EXPR)
1235 {
1236 HOST_WIDE_INT arg_max_size;
1237
1238 arg = TREE_OPERAND (arg, 0);
1239 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1240 &arg_max_size);
1241 if (arg_max_size == -1
1242 || arg_max_size != arg_size
1243 || arg_offset < 0)
1244 return;
1245 if (DECL_P (arg_base))
1246 {
1247 tree size;
1248 check_ref = false;
1249 size = build_int_cst (integer_type_node, arg_size);
1250 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1251 }
1252 else
1253 return;
1254 }
1255 else
1256 return;
1257 }
1258 else
1259 {
1260 HOST_WIDE_INT arg_max_size;
1261
1262 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1263
1264 by_ref = false;
1265 check_ref = false;
1266 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1267 &arg_max_size);
1268 if (arg_max_size == -1
1269 || arg_max_size != arg_size
1270 || arg_offset < 0)
1271 return;
1272
1273 ao_ref_init (&r, arg);
1274 }
1275
1276 /* Second stage walks back the BB, looks at individual statements and as long
1277 as it is confident of how the statements affect contents of the
1278 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1279 describing it. */
1280 gsi = gsi_for_stmt (call);
1281 gsi_prev (&gsi);
1282 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1283 {
1284 struct ipa_known_agg_contents_list *n, **p;
1285 gimple stmt = gsi_stmt (gsi);
1286 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1287 tree lhs, rhs, lhs_base;
1288 bool partial_overlap;
1289
1290 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1291 continue;
1292 if (!gimple_assign_single_p (stmt))
1293 break;
1294
1295 lhs = gimple_assign_lhs (stmt);
1296 rhs = gimple_assign_rhs1 (stmt);
1297 if (!is_gimple_reg_type (rhs))
1298 break;
1299
1300 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1301 &lhs_max_size);
1302 if (lhs_max_size == -1
1303 || lhs_max_size != lhs_size
1304 || (lhs_offset < arg_offset
1305 && lhs_offset + lhs_size > arg_offset)
1306 || (lhs_offset < arg_offset + arg_size
1307 && lhs_offset + lhs_size > arg_offset + arg_size))
1308 break;
1309
1310 if (check_ref)
1311 {
1312 if (TREE_CODE (lhs_base) != MEM_REF
1313 || TREE_OPERAND (lhs_base, 0) != arg_base
1314 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1315 break;
1316 }
1317 else if (lhs_base != arg_base)
1318 {
1319 if (DECL_P (lhs_base))
1320 continue;
1321 else
1322 break;
1323 }
1324
1325 if (lhs_offset + lhs_size < arg_offset
1326 || lhs_offset >= (arg_offset + arg_size))
1327 continue;
1328
1329 partial_overlap = false;
1330 p = &list;
1331 while (*p && (*p)->offset < lhs_offset)
1332 {
1333 if ((*p)->offset + (*p)->size > lhs_offset)
1334 {
1335 partial_overlap = true;
1336 break;
1337 }
1338 p = &(*p)->next;
1339 }
1340 if (partial_overlap)
1341 break;
1342 if (*p && (*p)->offset < lhs_offset + lhs_size)
1343 {
1344 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1345 /* We already know this value is subsequently overwritten with
1346 something else. */
1347 continue;
1348 else
1349 /* Otherwise this is a partial overlap which we cannot
1350 represent. */
1351 break;
1352 }
1353
1354 rhs = get_ssa_def_if_simple_copy (rhs);
1355 n = XALLOCA (struct ipa_known_agg_contents_list);
1356 n->size = lhs_size;
1357 n->offset = lhs_offset;
1358 if (is_gimple_ip_invariant (rhs))
1359 {
1360 n->constant = rhs;
1361 const_count++;
1362 }
1363 else
1364 n->constant = NULL_TREE;
1365 n->next = *p;
1366 *p = n;
1367
1368 item_count++;
1369 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1370 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1371 break;
1372 }
1373
1374 /* Third stage just goes over the list and creates an appropriate vector of
1375 ipa_agg_jf_item structures out of it, of sourse only if there are
1376 any known constants to begin with. */
1377
1378 if (const_count)
1379 {
1380 jfunc->agg.by_ref = by_ref;
1381 vec_alloc (jfunc->agg.items, const_count);
1382 while (list)
1383 {
1384 if (list->constant)
1385 {
1386 struct ipa_agg_jf_item item;
1387 item.offset = list->offset - arg_offset;
1388 item.value = prune_expression_for_jf (list->constant);
1389 jfunc->agg.items->quick_push (item);
1390 }
1391 list = list->next;
1392 }
1393 }
1394 }
1395
1396 /* Compute jump function for all arguments of callsite CS and insert the
1397 information in the jump_functions array in the ipa_edge_args corresponding
1398 to this callsite. */
1399
1400 static void
1401 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1402 struct cgraph_edge *cs)
1403 {
1404 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1405 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1406 gimple call = cs->call_stmt;
1407 int n, arg_num = gimple_call_num_args (call);
1408
1409 if (arg_num == 0 || args->jump_functions)
1410 return;
1411 vec_safe_grow_cleared (args->jump_functions, arg_num);
1412
1413 for (n = 0; n < arg_num; n++)
1414 {
1415 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1416 tree arg = gimple_call_arg (call, n);
1417
1418 if (is_gimple_ip_invariant (arg))
1419 ipa_set_jf_constant (jfunc, arg);
1420 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1421 && TREE_CODE (arg) == PARM_DECL)
1422 {
1423 int index = ipa_get_param_decl_index (info, arg);
1424
1425 gcc_assert (index >=0);
1426 /* Aggregate passed by value, check for pass-through, otherwise we
1427 will attempt to fill in aggregate contents later in this
1428 for cycle. */
1429 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1430 {
1431 ipa_set_jf_simple_pass_through (jfunc, index, false);
1432 continue;
1433 }
1434 }
1435 else if (TREE_CODE (arg) == SSA_NAME)
1436 {
1437 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1438 {
1439 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1440 if (index >= 0
1441 && !detect_type_change_ssa (arg, call, jfunc))
1442 {
1443 bool agg_p;
1444 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1445 call, arg);
1446 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1447 }
1448 }
1449 else
1450 {
1451 gimple stmt = SSA_NAME_DEF_STMT (arg);
1452 if (is_gimple_assign (stmt))
1453 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1454 call, stmt, arg);
1455 else if (gimple_code (stmt) == GIMPLE_PHI)
1456 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1457 call, stmt);
1458 }
1459 }
1460 else
1461 compute_known_type_jump_func (arg, jfunc, call);
1462
1463 if ((jfunc->type != IPA_JF_PASS_THROUGH
1464 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1465 && (jfunc->type != IPA_JF_ANCESTOR
1466 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1467 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1468 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1469 determine_known_aggregate_parts (call, arg, jfunc);
1470 }
1471 }
1472
1473 /* Compute jump functions for all edges - both direct and indirect - outgoing
1474 from NODE. Also count the actual arguments in the process. */
1475
1476 static void
1477 ipa_compute_jump_functions (struct cgraph_node *node,
1478 struct param_analysis_info *parms_ainfo)
1479 {
1480 struct cgraph_edge *cs;
1481
1482 for (cs = node->callees; cs; cs = cs->next_callee)
1483 {
1484 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1485 NULL);
1486 /* We do not need to bother analyzing calls to unknown
1487 functions unless they may become known during lto/whopr. */
1488 if (!callee->analyzed && !flag_lto)
1489 continue;
1490 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1491 }
1492
1493 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1494 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1495 }
1496
1497 /* If STMT looks like a statement loading a value from a member pointer formal
1498 parameter, return that parameter and store the offset of the field to
1499 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1500 might be clobbered). If USE_DELTA, then we look for a use of the delta
1501 field rather than the pfn. */
1502
1503 static tree
1504 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1505 HOST_WIDE_INT *offset_p)
1506 {
1507 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1508
1509 if (!gimple_assign_single_p (stmt))
1510 return NULL_TREE;
1511
1512 rhs = gimple_assign_rhs1 (stmt);
1513 if (TREE_CODE (rhs) == COMPONENT_REF)
1514 {
1515 ref_field = TREE_OPERAND (rhs, 1);
1516 rhs = TREE_OPERAND (rhs, 0);
1517 }
1518 else
1519 ref_field = NULL_TREE;
1520 if (TREE_CODE (rhs) != MEM_REF)
1521 return NULL_TREE;
1522 rec = TREE_OPERAND (rhs, 0);
1523 if (TREE_CODE (rec) != ADDR_EXPR)
1524 return NULL_TREE;
1525 rec = TREE_OPERAND (rec, 0);
1526 if (TREE_CODE (rec) != PARM_DECL
1527 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1528 return NULL_TREE;
1529 ref_offset = TREE_OPERAND (rhs, 1);
1530
1531 if (use_delta)
1532 fld = delta_field;
1533 else
1534 fld = ptr_field;
1535 if (offset_p)
1536 *offset_p = int_bit_position (fld);
1537
1538 if (ref_field)
1539 {
1540 if (integer_nonzerop (ref_offset))
1541 return NULL_TREE;
1542 return ref_field == fld ? rec : NULL_TREE;
1543 }
1544 else
1545 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1546 : NULL_TREE;
1547 }
1548
1549 /* Returns true iff T is an SSA_NAME defined by a statement. */
1550
1551 static bool
1552 ipa_is_ssa_with_stmt_def (tree t)
1553 {
1554 if (TREE_CODE (t) == SSA_NAME
1555 && !SSA_NAME_IS_DEFAULT_DEF (t))
1556 return true;
1557 else
1558 return false;
1559 }
1560
1561 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1562 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1563 indirect call graph edge. */
1564
1565 static struct cgraph_edge *
1566 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1567 {
1568 struct cgraph_edge *cs;
1569
1570 cs = cgraph_edge (node, stmt);
1571 cs->indirect_info->param_index = param_index;
1572 cs->indirect_info->offset = 0;
1573 cs->indirect_info->polymorphic = 0;
1574 cs->indirect_info->agg_contents = 0;
1575 return cs;
1576 }
1577
1578 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1579 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1580 intermediate information about each formal parameter. Currently it checks
1581 whether the call calls a pointer that is a formal parameter and if so, the
1582 parameter is marked with the called flag and an indirect call graph edge
1583 describing the call is created. This is very simple for ordinary pointers
1584 represented in SSA but not-so-nice when it comes to member pointers. The
1585 ugly part of this function does nothing more than trying to match the
1586 pattern of such a call. An example of such a pattern is the gimple dump
1587 below, the call is on the last line:
1588
1589 <bb 2>:
1590 f$__delta_5 = f.__delta;
1591 f$__pfn_24 = f.__pfn;
1592
1593 or
1594 <bb 2>:
1595 f$__delta_5 = MEM[(struct *)&f];
1596 f$__pfn_24 = MEM[(struct *)&f + 4B];
1597
1598 and a few lines below:
1599
1600 <bb 5>
1601 D.2496_3 = (int) f$__pfn_24;
1602 D.2497_4 = D.2496_3 & 1;
1603 if (D.2497_4 != 0)
1604 goto <bb 3>;
1605 else
1606 goto <bb 4>;
1607
1608 <bb 6>:
1609 D.2500_7 = (unsigned int) f$__delta_5;
1610 D.2501_8 = &S + D.2500_7;
1611 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1612 D.2503_10 = *D.2502_9;
1613 D.2504_12 = f$__pfn_24 + -1;
1614 D.2505_13 = (unsigned int) D.2504_12;
1615 D.2506_14 = D.2503_10 + D.2505_13;
1616 D.2507_15 = *D.2506_14;
1617 iftmp.11_16 = (String:: *) D.2507_15;
1618
1619 <bb 7>:
1620 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1621 D.2500_19 = (unsigned int) f$__delta_5;
1622 D.2508_20 = &S + D.2500_19;
1623 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1624
1625 Such patterns are results of simple calls to a member pointer:
1626
1627 int doprinting (int (MyString::* f)(int) const)
1628 {
1629 MyString S ("somestring");
1630
1631 return (S.*f)(4);
1632 }
1633
1634 Moreover, the function also looks for called pointers loaded from aggregates
1635 passed by value or reference. */
1636
1637 static void
1638 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1639 struct ipa_node_params *info,
1640 struct param_analysis_info *parms_ainfo,
1641 gimple call, tree target)
1642 {
1643 gimple def;
1644 tree n1, n2;
1645 gimple d1, d2;
1646 tree rec, rec2, cond;
1647 gimple branch;
1648 int index;
1649 basic_block bb, virt_bb, join;
1650 HOST_WIDE_INT offset;
1651 bool by_ref;
1652
1653 if (SSA_NAME_IS_DEFAULT_DEF (target))
1654 {
1655 tree var = SSA_NAME_VAR (target);
1656 index = ipa_get_param_decl_index (info, var);
1657 if (index >= 0)
1658 ipa_note_param_call (node, index, call);
1659 return;
1660 }
1661
1662 def = SSA_NAME_DEF_STMT (target);
1663 if (gimple_assign_single_p (def)
1664 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1665 gimple_assign_rhs1 (def), &index, &offset,
1666 &by_ref))
1667 {
1668 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1669 cs->indirect_info->offset = offset;
1670 cs->indirect_info->agg_contents = 1;
1671 cs->indirect_info->by_ref = by_ref;
1672 return;
1673 }
1674
1675 /* Now we need to try to match the complex pattern of calling a member
1676 pointer. */
1677 if (gimple_code (def) != GIMPLE_PHI
1678 || gimple_phi_num_args (def) != 2
1679 || !POINTER_TYPE_P (TREE_TYPE (target))
1680 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1681 return;
1682
1683 /* First, we need to check whether one of these is a load from a member
1684 pointer that is a parameter to this function. */
1685 n1 = PHI_ARG_DEF (def, 0);
1686 n2 = PHI_ARG_DEF (def, 1);
1687 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1688 return;
1689 d1 = SSA_NAME_DEF_STMT (n1);
1690 d2 = SSA_NAME_DEF_STMT (n2);
1691
1692 join = gimple_bb (def);
1693 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1694 {
1695 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1696 return;
1697
1698 bb = EDGE_PRED (join, 0)->src;
1699 virt_bb = gimple_bb (d2);
1700 }
1701 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1702 {
1703 bb = EDGE_PRED (join, 1)->src;
1704 virt_bb = gimple_bb (d1);
1705 }
1706 else
1707 return;
1708
1709 /* Second, we need to check that the basic blocks are laid out in the way
1710 corresponding to the pattern. */
1711
1712 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1713 || single_pred (virt_bb) != bb
1714 || single_succ (virt_bb) != join)
1715 return;
1716
1717 /* Third, let's see that the branching is done depending on the least
1718 significant bit of the pfn. */
1719
1720 branch = last_stmt (bb);
1721 if (!branch || gimple_code (branch) != GIMPLE_COND)
1722 return;
1723
1724 if ((gimple_cond_code (branch) != NE_EXPR
1725 && gimple_cond_code (branch) != EQ_EXPR)
1726 || !integer_zerop (gimple_cond_rhs (branch)))
1727 return;
1728
1729 cond = gimple_cond_lhs (branch);
1730 if (!ipa_is_ssa_with_stmt_def (cond))
1731 return;
1732
1733 def = SSA_NAME_DEF_STMT (cond);
1734 if (!is_gimple_assign (def)
1735 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1736 || !integer_onep (gimple_assign_rhs2 (def)))
1737 return;
1738
1739 cond = gimple_assign_rhs1 (def);
1740 if (!ipa_is_ssa_with_stmt_def (cond))
1741 return;
1742
1743 def = SSA_NAME_DEF_STMT (cond);
1744
1745 if (is_gimple_assign (def)
1746 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1747 {
1748 cond = gimple_assign_rhs1 (def);
1749 if (!ipa_is_ssa_with_stmt_def (cond))
1750 return;
1751 def = SSA_NAME_DEF_STMT (cond);
1752 }
1753
1754 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1755 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1756 == ptrmemfunc_vbit_in_delta),
1757 NULL);
1758 if (rec != rec2)
1759 return;
1760
1761 index = ipa_get_param_decl_index (info, rec);
1762 if (index >= 0
1763 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1764 {
1765 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1766 cs->indirect_info->offset = offset;
1767 cs->indirect_info->agg_contents = 1;
1768 }
1769
1770 return;
1771 }
1772
1773 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1774 object referenced in the expression is a formal parameter of the caller
1775 (described by INFO), create a call note for the statement. */
1776
1777 static void
1778 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1779 struct ipa_node_params *info, gimple call,
1780 tree target)
1781 {
1782 struct cgraph_edge *cs;
1783 struct cgraph_indirect_call_info *ii;
1784 struct ipa_jump_func jfunc;
1785 tree obj = OBJ_TYPE_REF_OBJECT (target);
1786 int index;
1787 HOST_WIDE_INT anc_offset;
1788
1789 if (!flag_devirtualize)
1790 return;
1791
1792 if (TREE_CODE (obj) != SSA_NAME)
1793 return;
1794
1795 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1796 {
1797 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1798 return;
1799
1800 anc_offset = 0;
1801 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1802 gcc_assert (index >= 0);
1803 if (detect_type_change_ssa (obj, call, &jfunc))
1804 return;
1805 }
1806 else
1807 {
1808 gimple stmt = SSA_NAME_DEF_STMT (obj);
1809 tree expr;
1810
1811 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1812 if (!expr)
1813 return;
1814 index = ipa_get_param_decl_index (info,
1815 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1816 gcc_assert (index >= 0);
1817 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1818 return;
1819 }
1820
1821 cs = ipa_note_param_call (node, index, call);
1822 ii = cs->indirect_info;
1823 ii->offset = anc_offset;
1824 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1825 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1826 ii->polymorphic = 1;
1827 }
1828
1829 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1830 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1831 containing intermediate information about each formal parameter. */
1832
1833 static void
1834 ipa_analyze_call_uses (struct cgraph_node *node,
1835 struct ipa_node_params *info,
1836 struct param_analysis_info *parms_ainfo, gimple call)
1837 {
1838 tree target = gimple_call_fn (call);
1839
1840 if (!target)
1841 return;
1842 if (TREE_CODE (target) == SSA_NAME)
1843 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1844 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1845 ipa_analyze_virtual_call_uses (node, info, call, target);
1846 }
1847
1848
1849 /* Analyze the call statement STMT with respect to formal parameters (described
1850 in INFO) of caller given by NODE. Currently it only checks whether formal
1851 parameters are called. PARMS_AINFO is a pointer to a vector containing
1852 intermediate information about each formal parameter. */
1853
1854 static void
1855 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1856 struct param_analysis_info *parms_ainfo, gimple stmt)
1857 {
1858 if (is_gimple_call (stmt))
1859 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1860 }
1861
1862 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1863 If OP is a parameter declaration, mark it as used in the info structure
1864 passed in DATA. */
1865
1866 static bool
1867 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1868 tree op, void *data)
1869 {
1870 struct ipa_node_params *info = (struct ipa_node_params *) data;
1871
1872 op = get_base_address (op);
1873 if (op
1874 && TREE_CODE (op) == PARM_DECL)
1875 {
1876 int index = ipa_get_param_decl_index (info, op);
1877 gcc_assert (index >= 0);
1878 ipa_set_param_used (info, index, true);
1879 }
1880
1881 return false;
1882 }
1883
1884 /* Scan the function body of NODE and inspect the uses of formal parameters.
1885 Store the findings in various structures of the associated ipa_node_params
1886 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1887 vector containing intermediate information about each formal parameter. */
1888
1889 static void
1890 ipa_analyze_params_uses (struct cgraph_node *node,
1891 struct param_analysis_info *parms_ainfo)
1892 {
1893 tree decl = node->symbol.decl;
1894 basic_block bb;
1895 struct function *func;
1896 gimple_stmt_iterator gsi;
1897 struct ipa_node_params *info = IPA_NODE_REF (node);
1898 int i;
1899
1900 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1901 return;
1902
1903 for (i = 0; i < ipa_get_param_count (info); i++)
1904 {
1905 tree parm = ipa_get_param (info, i);
1906 tree ddef;
1907 /* For SSA regs see if parameter is used. For non-SSA we compute
1908 the flag during modification analysis. */
1909 if (is_gimple_reg (parm)
1910 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1911 parm)) != NULL_TREE
1912 && !has_zero_uses (ddef))
1913 ipa_set_param_used (info, i, true);
1914 }
1915
1916 func = DECL_STRUCT_FUNCTION (decl);
1917 FOR_EACH_BB_FN (bb, func)
1918 {
1919 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1920 {
1921 gimple stmt = gsi_stmt (gsi);
1922
1923 if (is_gimple_debug (stmt))
1924 continue;
1925
1926 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1927 walk_stmt_load_store_addr_ops (stmt, info,
1928 visit_ref_for_mod_analysis,
1929 visit_ref_for_mod_analysis,
1930 visit_ref_for_mod_analysis);
1931 }
1932 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1933 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1934 visit_ref_for_mod_analysis,
1935 visit_ref_for_mod_analysis,
1936 visit_ref_for_mod_analysis);
1937 }
1938
1939 info->uses_analysis_done = 1;
1940 }
1941
1942 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
1943
1944 static void
1945 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
1946 {
1947 int i;
1948
1949 for (i = 0; i < param_count; i++)
1950 {
1951 if (parms_ainfo[i].parm_visited_statements)
1952 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1953 if (parms_ainfo[i].pt_visited_statements)
1954 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1955 }
1956 }
1957
1958 /* Initialize the array describing properties of of formal parameters
1959 of NODE, analyze their uses and compute jump functions associated
1960 with actual arguments of calls from within NODE. */
1961
1962 void
1963 ipa_analyze_node (struct cgraph_node *node)
1964 {
1965 struct ipa_node_params *info;
1966 struct param_analysis_info *parms_ainfo;
1967 int param_count;
1968
1969 ipa_check_create_node_params ();
1970 ipa_check_create_edge_args ();
1971 info = IPA_NODE_REF (node);
1972 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1973 ipa_initialize_node_params (node);
1974
1975 param_count = ipa_get_param_count (info);
1976 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1977 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1978
1979 ipa_analyze_params_uses (node, parms_ainfo);
1980 ipa_compute_jump_functions (node, parms_ainfo);
1981
1982 free_parms_ainfo (parms_ainfo, param_count);
1983 pop_cfun ();
1984 }
1985
1986
1987 /* Update the jump function DST when the call graph edge corresponding to SRC is
1988 is being inlined, knowing that DST is of type ancestor and src of known
1989 type. */
1990
1991 static void
1992 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1993 struct ipa_jump_func *dst)
1994 {
1995 HOST_WIDE_INT combined_offset;
1996 tree combined_type;
1997
1998 combined_offset = ipa_get_jf_known_type_offset (src)
1999 + ipa_get_jf_ancestor_offset (dst);
2000 combined_type = ipa_get_jf_ancestor_type (dst);
2001
2002 ipa_set_jf_known_type (dst, combined_offset,
2003 ipa_get_jf_known_type_base_type (src),
2004 combined_type);
2005 }
2006
2007 /* Update the jump functions associated with call graph edge E when the call
2008 graph edge CS is being inlined, assuming that E->caller is already (possibly
2009 indirectly) inlined into CS->callee and that E has not been inlined. */
2010
2011 static void
2012 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2013 struct cgraph_edge *e)
2014 {
2015 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2016 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2017 int count = ipa_get_cs_argument_count (args);
2018 int i;
2019
2020 for (i = 0; i < count; i++)
2021 {
2022 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2023
2024 if (dst->type == IPA_JF_ANCESTOR)
2025 {
2026 struct ipa_jump_func *src;
2027 int dst_fid = dst->value.ancestor.formal_id;
2028
2029 /* Variable number of arguments can cause havoc if we try to access
2030 one that does not exist in the inlined edge. So make sure we
2031 don't. */
2032 if (dst_fid >= ipa_get_cs_argument_count (top))
2033 {
2034 dst->type = IPA_JF_UNKNOWN;
2035 continue;
2036 }
2037
2038 src = ipa_get_ith_jump_func (top, dst_fid);
2039
2040 if (src->agg.items
2041 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2042 {
2043 struct ipa_agg_jf_item *item;
2044 int j;
2045
2046 /* Currently we do not produce clobber aggregate jump functions,
2047 replace with merging when we do. */
2048 gcc_assert (!dst->agg.items);
2049
2050 dst->agg.items = vec_safe_copy (src->agg.items);
2051 dst->agg.by_ref = src->agg.by_ref;
2052 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2053 item->offset -= dst->value.ancestor.offset;
2054 }
2055
2056 if (src->type == IPA_JF_KNOWN_TYPE)
2057 combine_known_type_and_ancestor_jfs (src, dst);
2058 else if (src->type == IPA_JF_PASS_THROUGH
2059 && src->value.pass_through.operation == NOP_EXPR)
2060 {
2061 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2062 dst->value.ancestor.agg_preserved &=
2063 src->value.pass_through.agg_preserved;
2064 }
2065 else if (src->type == IPA_JF_ANCESTOR)
2066 {
2067 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2068 dst->value.ancestor.offset += src->value.ancestor.offset;
2069 dst->value.ancestor.agg_preserved &=
2070 src->value.ancestor.agg_preserved;
2071 }
2072 else
2073 dst->type = IPA_JF_UNKNOWN;
2074 }
2075 else if (dst->type == IPA_JF_PASS_THROUGH)
2076 {
2077 struct ipa_jump_func *src;
2078 /* We must check range due to calls with variable number of arguments
2079 and we cannot combine jump functions with operations. */
2080 if (dst->value.pass_through.operation == NOP_EXPR
2081 && (dst->value.pass_through.formal_id
2082 < ipa_get_cs_argument_count (top)))
2083 {
2084 bool agg_p;
2085 int dst_fid = dst->value.pass_through.formal_id;
2086 src = ipa_get_ith_jump_func (top, dst_fid);
2087 agg_p = dst->value.pass_through.agg_preserved;
2088
2089 dst->type = src->type;
2090 dst->value = src->value;
2091
2092 if (src->agg.items
2093 && (agg_p || !src->agg.by_ref))
2094 {
2095 /* Currently we do not produce clobber aggregate jump
2096 functions, replace with merging when we do. */
2097 gcc_assert (!dst->agg.items);
2098
2099 dst->agg.by_ref = src->agg.by_ref;
2100 dst->agg.items = vec_safe_copy (src->agg.items);
2101 }
2102
2103 if (!agg_p)
2104 {
2105 if (dst->type == IPA_JF_PASS_THROUGH)
2106 dst->value.pass_through.agg_preserved = false;
2107 else if (dst->type == IPA_JF_ANCESTOR)
2108 dst->value.ancestor.agg_preserved = false;
2109 }
2110 }
2111 else
2112 dst->type = IPA_JF_UNKNOWN;
2113 }
2114 }
2115 }
2116
2117 /* If TARGET is an addr_expr of a function declaration, make it the destination
2118 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2119
2120 struct cgraph_edge *
2121 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2122 {
2123 struct cgraph_node *callee;
2124 struct inline_edge_summary *es = inline_edge_summary (ie);
2125
2126 if (TREE_CODE (target) == ADDR_EXPR)
2127 target = TREE_OPERAND (target, 0);
2128 if (TREE_CODE (target) != FUNCTION_DECL)
2129 return NULL;
2130 callee = cgraph_get_node (target);
2131 if (!callee)
2132 return NULL;
2133 ipa_check_create_node_params ();
2134
2135 /* We can not make edges to inline clones. It is bug that someone removed
2136 the cgraph node too early. */
2137 gcc_assert (!callee->global.inlined_to);
2138
2139 cgraph_make_edge_direct (ie, callee);
2140 es = inline_edge_summary (ie);
2141 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2142 - eni_size_weights.call_cost);
2143 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2144 - eni_time_weights.call_cost);
2145 if (dump_file)
2146 {
2147 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2148 "(%s/%i -> %s/%i), for stmt ",
2149 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2150 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2151 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2152 if (ie->call_stmt)
2153 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2154 else
2155 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2156 }
2157 callee = cgraph_function_or_thunk_node (callee, NULL);
2158
2159 return ie;
2160 }
2161
2162 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2163 return NULL if there is not any. BY_REF specifies whether the value has to
2164 be passed by reference or by value. */
2165
2166 tree
2167 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2168 HOST_WIDE_INT offset, bool by_ref)
2169 {
2170 struct ipa_agg_jf_item *item;
2171 int i;
2172
2173 if (by_ref != agg->by_ref)
2174 return NULL;
2175
2176 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2177 if (item->offset == offset)
2178 {
2179 /* Currently we do not have clobber values, return NULL for them once
2180 we do. */
2181 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2182 return item->value;
2183 }
2184 return NULL;
2185 }
2186
2187 /* Try to find a destination for indirect edge IE that corresponds to a simple
2188 call or a call of a member function pointer and where the destination is a
2189 pointer formal parameter described by jump function JFUNC. If it can be
2190 determined, return the newly direct edge, otherwise return NULL. */
2191
2192 static struct cgraph_edge *
2193 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2194 struct ipa_jump_func *jfunc)
2195 {
2196 tree target;
2197
2198 if (ie->indirect_info->agg_contents)
2199 {
2200 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2201 ie->indirect_info->offset,
2202 ie->indirect_info->by_ref);
2203 if (!target)
2204 return NULL;
2205 }
2206 else
2207 {
2208 if (jfunc->type != IPA_JF_CONST)
2209 return NULL;
2210 target = ipa_get_jf_constant (jfunc);
2211 }
2212 return ipa_make_edge_direct_to_target (ie, target);
2213 }
2214
2215 /* Try to find a destination for indirect edge IE that corresponds to a
2216 virtual call based on a formal parameter which is described by jump
2217 function JFUNC and if it can be determined, make it direct and return the
2218 direct edge. Otherwise, return NULL. */
2219
2220 static struct cgraph_edge *
2221 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2222 struct ipa_jump_func *jfunc)
2223 {
2224 tree binfo, target;
2225
2226 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2227 return NULL;
2228
2229 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2230 gcc_checking_assert (binfo);
2231 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2232 + ie->indirect_info->offset,
2233 ie->indirect_info->otr_type);
2234 if (binfo)
2235 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2236 binfo);
2237 else
2238 return NULL;
2239
2240 if (target)
2241 return ipa_make_edge_direct_to_target (ie, target);
2242 else
2243 return NULL;
2244 }
2245
2246 /* Update the param called notes associated with NODE when CS is being inlined,
2247 assuming NODE is (potentially indirectly) inlined into CS->callee.
2248 Moreover, if the callee is discovered to be constant, create a new cgraph
2249 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2250 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2251
2252 static bool
2253 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2254 struct cgraph_node *node,
2255 vec<cgraph_edge_p> *new_edges)
2256 {
2257 struct ipa_edge_args *top;
2258 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2259 bool res = false;
2260
2261 ipa_check_create_edge_args ();
2262 top = IPA_EDGE_REF (cs);
2263
2264 for (ie = node->indirect_calls; ie; ie = next_ie)
2265 {
2266 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2267 struct ipa_jump_func *jfunc;
2268 int param_index;
2269
2270 next_ie = ie->next_callee;
2271
2272 if (ici->param_index == -1)
2273 continue;
2274
2275 /* We must check range due to calls with variable number of arguments: */
2276 if (ici->param_index >= ipa_get_cs_argument_count (top))
2277 {
2278 ici->param_index = -1;
2279 continue;
2280 }
2281
2282 param_index = ici->param_index;
2283 jfunc = ipa_get_ith_jump_func (top, param_index);
2284 if (jfunc->type == IPA_JF_PASS_THROUGH
2285 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2286 {
2287 if (ici->agg_contents
2288 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2289 ici->param_index = -1;
2290 else
2291 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2292 }
2293 else if (jfunc->type == IPA_JF_ANCESTOR)
2294 {
2295 if (ici->agg_contents
2296 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2297 ici->param_index = -1;
2298 else
2299 {
2300 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2301 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2302 }
2303 }
2304 else
2305 /* Either we can find a destination for this edge now or never. */
2306 ici->param_index = -1;
2307
2308 if (!flag_indirect_inlining)
2309 continue;
2310
2311 if (ici->polymorphic)
2312 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2313 else
2314 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2315
2316 if (new_direct_edge)
2317 {
2318 new_direct_edge->indirect_inlining_edge = 1;
2319 if (new_direct_edge->call_stmt)
2320 new_direct_edge->call_stmt_cannot_inline_p
2321 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2322 new_direct_edge->callee->symbol.decl);
2323 if (new_edges)
2324 {
2325 new_edges->safe_push (new_direct_edge);
2326 top = IPA_EDGE_REF (cs);
2327 res = true;
2328 }
2329 }
2330 }
2331
2332 return res;
2333 }
2334
2335 /* Recursively traverse subtree of NODE (including node) made of inlined
2336 cgraph_edges when CS has been inlined and invoke
2337 update_indirect_edges_after_inlining on all nodes and
2338 update_jump_functions_after_inlining on all non-inlined edges that lead out
2339 of this subtree. Newly discovered indirect edges will be added to
2340 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2341 created. */
2342
2343 static bool
2344 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2345 struct cgraph_node *node,
2346 vec<cgraph_edge_p> *new_edges)
2347 {
2348 struct cgraph_edge *e;
2349 bool res;
2350
2351 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2352
2353 for (e = node->callees; e; e = e->next_callee)
2354 if (!e->inline_failed)
2355 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2356 else
2357 update_jump_functions_after_inlining (cs, e);
2358 for (e = node->indirect_calls; e; e = e->next_callee)
2359 update_jump_functions_after_inlining (cs, e);
2360
2361 return res;
2362 }
2363
2364 /* Update jump functions and call note functions on inlining the call site CS.
2365 CS is expected to lead to a node already cloned by
2366 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2367 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2368 created. */
2369
2370 bool
2371 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2372 vec<cgraph_edge_p> *new_edges)
2373 {
2374 bool changed;
2375 /* Do nothing if the preparation phase has not been carried out yet
2376 (i.e. during early inlining). */
2377 if (!ipa_node_params_vector.exists ())
2378 return false;
2379 gcc_assert (ipa_edge_args_vector);
2380
2381 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2382
2383 /* We do not keep jump functions of inlined edges up to date. Better to free
2384 them so we do not access them accidentally. */
2385 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2386 return changed;
2387 }
2388
2389 /* Frees all dynamically allocated structures that the argument info points
2390 to. */
2391
2392 void
2393 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2394 {
2395 vec_free (args->jump_functions);
2396 memset (args, 0, sizeof (*args));
2397 }
2398
2399 /* Free all ipa_edge structures. */
2400
2401 void
2402 ipa_free_all_edge_args (void)
2403 {
2404 int i;
2405 struct ipa_edge_args *args;
2406
2407 if (!ipa_edge_args_vector)
2408 return;
2409
2410 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
2411 ipa_free_edge_args_substructures (args);
2412
2413 vec_free (ipa_edge_args_vector);
2414 }
2415
2416 /* Frees all dynamically allocated structures that the param info points
2417 to. */
2418
2419 void
2420 ipa_free_node_params_substructures (struct ipa_node_params *info)
2421 {
2422 info->descriptors.release ();
2423 free (info->lattices);
2424 /* Lattice values and their sources are deallocated with their alocation
2425 pool. */
2426 info->known_vals.release ();
2427 memset (info, 0, sizeof (*info));
2428 }
2429
2430 /* Free all ipa_node_params structures. */
2431
2432 void
2433 ipa_free_all_node_params (void)
2434 {
2435 int i;
2436 struct ipa_node_params *info;
2437
2438 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
2439 ipa_free_node_params_substructures (info);
2440
2441 ipa_node_params_vector.release ();
2442 }
2443
2444 /* Set the aggregate replacements of NODE to be AGGVALS. */
2445
2446 void
2447 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2448 struct ipa_agg_replacement_value *aggvals)
2449 {
2450 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2451 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2452
2453 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2454 }
2455
2456 /* Hook that is called by cgraph.c when an edge is removed. */
2457
2458 static void
2459 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2460 {
2461 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2462 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
2463 return;
2464 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2465 }
2466
2467 /* Hook that is called by cgraph.c when a node is removed. */
2468
2469 static void
2470 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2471 {
2472 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2473 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2474 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2475 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2476 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
2477 }
2478
2479 /* Hook that is called by cgraph.c when an edge is duplicated. */
2480
2481 static void
2482 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2483 __attribute__((unused)) void *data)
2484 {
2485 struct ipa_edge_args *old_args, *new_args;
2486 unsigned int i;
2487
2488 ipa_check_create_edge_args ();
2489
2490 old_args = IPA_EDGE_REF (src);
2491 new_args = IPA_EDGE_REF (dst);
2492
2493 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
2494
2495 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2496 (*new_args->jump_functions)[i].agg.items
2497 = vec_safe_copy ((*old_args->jump_functions)[i].agg.items);
2498 }
2499
2500 /* Hook that is called by cgraph.c when a node is duplicated. */
2501
2502 static void
2503 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2504 ATTRIBUTE_UNUSED void *data)
2505 {
2506 struct ipa_node_params *old_info, *new_info;
2507 struct ipa_agg_replacement_value *old_av, *new_av;
2508
2509 ipa_check_create_node_params ();
2510 old_info = IPA_NODE_REF (src);
2511 new_info = IPA_NODE_REF (dst);
2512
2513 new_info->descriptors = old_info->descriptors.copy ();
2514 new_info->lattices = NULL;
2515 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2516
2517 new_info->uses_analysis_done = old_info->uses_analysis_done;
2518 new_info->node_enqueued = old_info->node_enqueued;
2519
2520 old_av = ipa_get_agg_replacements_for_node (src);
2521 if (!old_av)
2522 return;
2523
2524 new_av = NULL;
2525 while (old_av)
2526 {
2527 struct ipa_agg_replacement_value *v;
2528
2529 v = ggc_alloc_ipa_agg_replacement_value ();
2530 memcpy (v, old_av, sizeof (*v));
2531 v->next = new_av;
2532 new_av = v;
2533 old_av = old_av->next;
2534 }
2535 ipa_set_node_agg_value_chain (dst, new_av);
2536 }
2537
2538
2539 /* Analyze newly added function into callgraph. */
2540
2541 static void
2542 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2543 {
2544 ipa_analyze_node (node);
2545 }
2546
2547 /* Register our cgraph hooks if they are not already there. */
2548
2549 void
2550 ipa_register_cgraph_hooks (void)
2551 {
2552 if (!edge_removal_hook_holder)
2553 edge_removal_hook_holder =
2554 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2555 if (!node_removal_hook_holder)
2556 node_removal_hook_holder =
2557 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2558 if (!edge_duplication_hook_holder)
2559 edge_duplication_hook_holder =
2560 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2561 if (!node_duplication_hook_holder)
2562 node_duplication_hook_holder =
2563 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2564 function_insertion_hook_holder =
2565 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2566 }
2567
2568 /* Unregister our cgraph hooks if they are not already there. */
2569
2570 static void
2571 ipa_unregister_cgraph_hooks (void)
2572 {
2573 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2574 edge_removal_hook_holder = NULL;
2575 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2576 node_removal_hook_holder = NULL;
2577 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2578 edge_duplication_hook_holder = NULL;
2579 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2580 node_duplication_hook_holder = NULL;
2581 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2582 function_insertion_hook_holder = NULL;
2583 }
2584
2585 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2586 longer needed after ipa-cp. */
2587
2588 void
2589 ipa_free_all_structures_after_ipa_cp (void)
2590 {
2591 if (!optimize)
2592 {
2593 ipa_free_all_edge_args ();
2594 ipa_free_all_node_params ();
2595 free_alloc_pool (ipcp_sources_pool);
2596 free_alloc_pool (ipcp_values_pool);
2597 free_alloc_pool (ipcp_agg_lattice_pool);
2598 ipa_unregister_cgraph_hooks ();
2599 }
2600 }
2601
2602 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2603 longer needed after indirect inlining. */
2604
2605 void
2606 ipa_free_all_structures_after_iinln (void)
2607 {
2608 ipa_free_all_edge_args ();
2609 ipa_free_all_node_params ();
2610 ipa_unregister_cgraph_hooks ();
2611 if (ipcp_sources_pool)
2612 free_alloc_pool (ipcp_sources_pool);
2613 if (ipcp_values_pool)
2614 free_alloc_pool (ipcp_values_pool);
2615 if (ipcp_agg_lattice_pool)
2616 free_alloc_pool (ipcp_agg_lattice_pool);
2617 }
2618
2619 /* Print ipa_tree_map data structures of all functions in the
2620 callgraph to F. */
2621
2622 void
2623 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2624 {
2625 int i, count;
2626 tree temp;
2627 struct ipa_node_params *info;
2628
2629 if (!node->analyzed)
2630 return;
2631 info = IPA_NODE_REF (node);
2632 fprintf (f, " function %s parameter descriptors:\n",
2633 cgraph_node_name (node));
2634 count = ipa_get_param_count (info);
2635 for (i = 0; i < count; i++)
2636 {
2637 temp = ipa_get_param (info, i);
2638 if (TREE_CODE (temp) == PARM_DECL)
2639 fprintf (f, " param %d : %s", i,
2640 (DECL_NAME (temp)
2641 ? (*lang_hooks.decl_printable_name) (temp, 2)
2642 : "(unnamed)"));
2643 if (ipa_is_param_used (info, i))
2644 fprintf (f, " used");
2645 fprintf (f, "\n");
2646 }
2647 }
2648
2649 /* Print ipa_tree_map data structures of all functions in the
2650 callgraph to F. */
2651
2652 void
2653 ipa_print_all_params (FILE * f)
2654 {
2655 struct cgraph_node *node;
2656
2657 fprintf (f, "\nFunction parameters:\n");
2658 FOR_EACH_FUNCTION (node)
2659 ipa_print_node_params (f, node);
2660 }
2661
2662 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2663
2664 vec<tree>
2665 ipa_get_vector_of_formal_parms (tree fndecl)
2666 {
2667 vec<tree> args;
2668 int count;
2669 tree parm;
2670
2671 count = count_formal_params (fndecl);
2672 args.create (count);
2673 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2674 args.quick_push (parm);
2675
2676 return args;
2677 }
2678
2679 /* Return a heap allocated vector containing types of formal parameters of
2680 function type FNTYPE. */
2681
2682 static inline vec<tree>
2683 get_vector_of_formal_parm_types (tree fntype)
2684 {
2685 vec<tree> types;
2686 int count = 0;
2687 tree t;
2688
2689 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2690 count++;
2691
2692 types.create (count);
2693 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2694 types.quick_push (TREE_VALUE (t));
2695
2696 return types;
2697 }
2698
2699 /* Modify the function declaration FNDECL and its type according to the plan in
2700 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2701 to reflect the actual parameters being modified which are determined by the
2702 base_index field. */
2703
2704 void
2705 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2706 const char *synth_parm_prefix)
2707 {
2708 vec<tree> oparms, otypes;
2709 tree orig_type, new_type = NULL;
2710 tree old_arg_types, t, new_arg_types = NULL;
2711 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2712 int i, len = adjustments.length ();
2713 tree new_reversed = NULL;
2714 bool care_for_types, last_parm_void;
2715
2716 if (!synth_parm_prefix)
2717 synth_parm_prefix = "SYNTH";
2718
2719 oparms = ipa_get_vector_of_formal_parms (fndecl);
2720 orig_type = TREE_TYPE (fndecl);
2721 old_arg_types = TYPE_ARG_TYPES (orig_type);
2722
2723 /* The following test is an ugly hack, some functions simply don't have any
2724 arguments in their type. This is probably a bug but well... */
2725 care_for_types = (old_arg_types != NULL_TREE);
2726 if (care_for_types)
2727 {
2728 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2729 == void_type_node);
2730 otypes = get_vector_of_formal_parm_types (orig_type);
2731 if (last_parm_void)
2732 gcc_assert (oparms.length () + 1 == otypes.length ());
2733 else
2734 gcc_assert (oparms.length () == otypes.length ());
2735 }
2736 else
2737 {
2738 last_parm_void = false;
2739 otypes.create (0);
2740 }
2741
2742 for (i = 0; i < len; i++)
2743 {
2744 struct ipa_parm_adjustment *adj;
2745 gcc_assert (link);
2746
2747 adj = &adjustments[i];
2748 parm = oparms[adj->base_index];
2749 adj->base = parm;
2750
2751 if (adj->copy_param)
2752 {
2753 if (care_for_types)
2754 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
2755 new_arg_types);
2756 *link = parm;
2757 link = &DECL_CHAIN (parm);
2758 }
2759 else if (!adj->remove_param)
2760 {
2761 tree new_parm;
2762 tree ptype;
2763
2764 if (adj->by_ref)
2765 ptype = build_pointer_type (adj->type);
2766 else
2767 ptype = adj->type;
2768
2769 if (care_for_types)
2770 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2771
2772 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2773 ptype);
2774 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2775
2776 DECL_ARTIFICIAL (new_parm) = 1;
2777 DECL_ARG_TYPE (new_parm) = ptype;
2778 DECL_CONTEXT (new_parm) = fndecl;
2779 TREE_USED (new_parm) = 1;
2780 DECL_IGNORED_P (new_parm) = 1;
2781 layout_decl (new_parm, 0);
2782
2783 adj->base = parm;
2784 adj->reduction = new_parm;
2785
2786 *link = new_parm;
2787
2788 link = &DECL_CHAIN (new_parm);
2789 }
2790 }
2791
2792 *link = NULL_TREE;
2793
2794 if (care_for_types)
2795 {
2796 new_reversed = nreverse (new_arg_types);
2797 if (last_parm_void)
2798 {
2799 if (new_reversed)
2800 TREE_CHAIN (new_arg_types) = void_list_node;
2801 else
2802 new_reversed = void_list_node;
2803 }
2804 }
2805
2806 /* Use copy_node to preserve as much as possible from original type
2807 (debug info, attribute lists etc.)
2808 Exception is METHOD_TYPEs must have THIS argument.
2809 When we are asked to remove it, we need to build new FUNCTION_TYPE
2810 instead. */
2811 if (TREE_CODE (orig_type) != METHOD_TYPE
2812 || (adjustments[0].copy_param
2813 && adjustments[0].base_index == 0))
2814 {
2815 new_type = build_distinct_type_copy (orig_type);
2816 TYPE_ARG_TYPES (new_type) = new_reversed;
2817 }
2818 else
2819 {
2820 new_type
2821 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2822 new_reversed));
2823 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2824 DECL_VINDEX (fndecl) = NULL_TREE;
2825 }
2826
2827 /* When signature changes, we need to clear builtin info. */
2828 if (DECL_BUILT_IN (fndecl))
2829 {
2830 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2831 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2832 }
2833
2834 /* This is a new type, not a copy of an old type. Need to reassociate
2835 variants. We can handle everything except the main variant lazily. */
2836 t = TYPE_MAIN_VARIANT (orig_type);
2837 if (orig_type != t)
2838 {
2839 TYPE_MAIN_VARIANT (new_type) = t;
2840 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2841 TYPE_NEXT_VARIANT (t) = new_type;
2842 }
2843 else
2844 {
2845 TYPE_MAIN_VARIANT (new_type) = new_type;
2846 TYPE_NEXT_VARIANT (new_type) = NULL;
2847 }
2848
2849 TREE_TYPE (fndecl) = new_type;
2850 DECL_VIRTUAL_P (fndecl) = 0;
2851 otypes.release ();
2852 oparms.release ();
2853 }
2854
2855 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2856 If this is a directly recursive call, CS must be NULL. Otherwise it must
2857 contain the corresponding call graph edge. */
2858
2859 void
2860 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2861 ipa_parm_adjustment_vec adjustments)
2862 {
2863 vec<tree> vargs;
2864 vec<tree, va_gc> **debug_args = NULL;
2865 gimple new_stmt;
2866 gimple_stmt_iterator gsi;
2867 tree callee_decl;
2868 int i, len;
2869
2870 len = adjustments.length ();
2871 vargs.create (len);
2872 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2873
2874 gsi = gsi_for_stmt (stmt);
2875 for (i = 0; i < len; i++)
2876 {
2877 struct ipa_parm_adjustment *adj;
2878
2879 adj = &adjustments[i];
2880
2881 if (adj->copy_param)
2882 {
2883 tree arg = gimple_call_arg (stmt, adj->base_index);
2884
2885 vargs.quick_push (arg);
2886 }
2887 else if (!adj->remove_param)
2888 {
2889 tree expr, base, off;
2890 location_t loc;
2891
2892 /* We create a new parameter out of the value of the old one, we can
2893 do the following kind of transformations:
2894
2895 - A scalar passed by reference is converted to a scalar passed by
2896 value. (adj->by_ref is false and the type of the original
2897 actual argument is a pointer to a scalar).
2898
2899 - A part of an aggregate is passed instead of the whole aggregate.
2900 The part can be passed either by value or by reference, this is
2901 determined by value of adj->by_ref. Moreover, the code below
2902 handles both situations when the original aggregate is passed by
2903 value (its type is not a pointer) and when it is passed by
2904 reference (it is a pointer to an aggregate).
2905
2906 When the new argument is passed by reference (adj->by_ref is true)
2907 it must be a part of an aggregate and therefore we form it by
2908 simply taking the address of a reference inside the original
2909 aggregate. */
2910
2911 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2912 base = gimple_call_arg (stmt, adj->base_index);
2913 loc = EXPR_LOCATION (base);
2914
2915 if (TREE_CODE (base) != ADDR_EXPR
2916 && POINTER_TYPE_P (TREE_TYPE (base)))
2917 off = build_int_cst (adj->alias_ptr_type,
2918 adj->offset / BITS_PER_UNIT);
2919 else
2920 {
2921 HOST_WIDE_INT base_offset;
2922 tree prev_base;
2923
2924 if (TREE_CODE (base) == ADDR_EXPR)
2925 base = TREE_OPERAND (base, 0);
2926 prev_base = base;
2927 base = get_addr_base_and_unit_offset (base, &base_offset);
2928 /* Aggregate arguments can have non-invariant addresses. */
2929 if (!base)
2930 {
2931 base = build_fold_addr_expr (prev_base);
2932 off = build_int_cst (adj->alias_ptr_type,
2933 adj->offset / BITS_PER_UNIT);
2934 }
2935 else if (TREE_CODE (base) == MEM_REF)
2936 {
2937 off = build_int_cst (adj->alias_ptr_type,
2938 base_offset
2939 + adj->offset / BITS_PER_UNIT);
2940 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2941 off);
2942 base = TREE_OPERAND (base, 0);
2943 }
2944 else
2945 {
2946 off = build_int_cst (adj->alias_ptr_type,
2947 base_offset
2948 + adj->offset / BITS_PER_UNIT);
2949 base = build_fold_addr_expr (base);
2950 }
2951 }
2952
2953 if (!adj->by_ref)
2954 {
2955 tree type = adj->type;
2956 unsigned int align;
2957 unsigned HOST_WIDE_INT misalign;
2958
2959 get_pointer_alignment_1 (base, &align, &misalign);
2960 misalign += (tree_to_double_int (off)
2961 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2962 * BITS_PER_UNIT);
2963 misalign = misalign & (align - 1);
2964 if (misalign != 0)
2965 align = (misalign & -misalign);
2966 if (align < TYPE_ALIGN (type))
2967 type = build_aligned_type (type, align);
2968 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2969 }
2970 else
2971 {
2972 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2973 expr = build_fold_addr_expr (expr);
2974 }
2975
2976 expr = force_gimple_operand_gsi (&gsi, expr,
2977 adj->by_ref
2978 || is_gimple_reg_type (adj->type),
2979 NULL, true, GSI_SAME_STMT);
2980 vargs.quick_push (expr);
2981 }
2982 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2983 {
2984 unsigned int ix;
2985 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
2986 gimple def_temp;
2987
2988 arg = gimple_call_arg (stmt, adj->base_index);
2989 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
2990 {
2991 if (!fold_convertible_p (TREE_TYPE (origin), arg))
2992 continue;
2993 arg = fold_convert_loc (gimple_location (stmt),
2994 TREE_TYPE (origin), arg);
2995 }
2996 if (debug_args == NULL)
2997 debug_args = decl_debug_args_insert (callee_decl);
2998 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
2999 if (ddecl == origin)
3000 {
3001 ddecl = (**debug_args)[ix + 1];
3002 break;
3003 }
3004 if (ddecl == NULL)
3005 {
3006 ddecl = make_node (DEBUG_EXPR_DECL);
3007 DECL_ARTIFICIAL (ddecl) = 1;
3008 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3009 DECL_MODE (ddecl) = DECL_MODE (origin);
3010
3011 vec_safe_push (*debug_args, origin);
3012 vec_safe_push (*debug_args, ddecl);
3013 }
3014 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
3015 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3016 }
3017 }
3018
3019 if (dump_file && (dump_flags & TDF_DETAILS))
3020 {
3021 fprintf (dump_file, "replacing stmt:");
3022 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3023 }
3024
3025 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3026 vargs.release ();
3027 if (gimple_call_lhs (stmt))
3028 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3029
3030 gimple_set_block (new_stmt, gimple_block (stmt));
3031 if (gimple_has_location (stmt))
3032 gimple_set_location (new_stmt, gimple_location (stmt));
3033 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3034 gimple_call_copy_flags (new_stmt, stmt);
3035
3036 if (dump_file && (dump_flags & TDF_DETAILS))
3037 {
3038 fprintf (dump_file, "with stmt:");
3039 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3040 fprintf (dump_file, "\n");
3041 }
3042 gsi_replace (&gsi, new_stmt, true);
3043 if (cs)
3044 cgraph_set_call_stmt (cs, new_stmt);
3045 update_ssa (TODO_update_ssa);
3046 free_dominance_info (CDI_DOMINATORS);
3047 }
3048
3049 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3050
3051 static bool
3052 index_in_adjustments_multiple_times_p (int base_index,
3053 ipa_parm_adjustment_vec adjustments)
3054 {
3055 int i, len = adjustments.length ();
3056 bool one = false;
3057
3058 for (i = 0; i < len; i++)
3059 {
3060 struct ipa_parm_adjustment *adj;
3061 adj = &adjustments[i];
3062
3063 if (adj->base_index == base_index)
3064 {
3065 if (one)
3066 return true;
3067 else
3068 one = true;
3069 }
3070 }
3071 return false;
3072 }
3073
3074
3075 /* Return adjustments that should have the same effect on function parameters
3076 and call arguments as if they were first changed according to adjustments in
3077 INNER and then by adjustments in OUTER. */
3078
3079 ipa_parm_adjustment_vec
3080 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3081 ipa_parm_adjustment_vec outer)
3082 {
3083 int i, outlen = outer.length ();
3084 int inlen = inner.length ();
3085 int removals = 0;
3086 ipa_parm_adjustment_vec adjustments, tmp;
3087
3088 tmp.create (inlen);
3089 for (i = 0; i < inlen; i++)
3090 {
3091 struct ipa_parm_adjustment *n;
3092 n = &inner[i];
3093
3094 if (n->remove_param)
3095 removals++;
3096 else
3097 tmp.quick_push (*n);
3098 }
3099
3100 adjustments.create (outlen + removals);
3101 for (i = 0; i < outlen; i++)
3102 {
3103 struct ipa_parm_adjustment r;
3104 struct ipa_parm_adjustment *out = &outer[i];
3105 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3106
3107 memset (&r, 0, sizeof (r));
3108 gcc_assert (!in->remove_param);
3109 if (out->remove_param)
3110 {
3111 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3112 {
3113 r.remove_param = true;
3114 adjustments.quick_push (r);
3115 }
3116 continue;
3117 }
3118
3119 r.base_index = in->base_index;
3120 r.type = out->type;
3121
3122 /* FIXME: Create nonlocal value too. */
3123
3124 if (in->copy_param && out->copy_param)
3125 r.copy_param = true;
3126 else if (in->copy_param)
3127 r.offset = out->offset;
3128 else if (out->copy_param)
3129 r.offset = in->offset;
3130 else
3131 r.offset = in->offset + out->offset;
3132 adjustments.quick_push (r);
3133 }
3134
3135 for (i = 0; i < inlen; i++)
3136 {
3137 struct ipa_parm_adjustment *n = &inner[i];
3138
3139 if (n->remove_param)
3140 adjustments.quick_push (*n);
3141 }
3142
3143 tmp.release ();
3144 return adjustments;
3145 }
3146
3147 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3148 friendly way, assuming they are meant to be applied to FNDECL. */
3149
3150 void
3151 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3152 tree fndecl)
3153 {
3154 int i, len = adjustments.length ();
3155 bool first = true;
3156 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3157
3158 fprintf (file, "IPA param adjustments: ");
3159 for (i = 0; i < len; i++)
3160 {
3161 struct ipa_parm_adjustment *adj;
3162 adj = &adjustments[i];
3163
3164 if (!first)
3165 fprintf (file, " ");
3166 else
3167 first = false;
3168
3169 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3170 print_generic_expr (file, parms[adj->base_index], 0);
3171 if (adj->base)
3172 {
3173 fprintf (file, ", base: ");
3174 print_generic_expr (file, adj->base, 0);
3175 }
3176 if (adj->reduction)
3177 {
3178 fprintf (file, ", reduction: ");
3179 print_generic_expr (file, adj->reduction, 0);
3180 }
3181 if (adj->new_ssa_base)
3182 {
3183 fprintf (file, ", new_ssa_base: ");
3184 print_generic_expr (file, adj->new_ssa_base, 0);
3185 }
3186
3187 if (adj->copy_param)
3188 fprintf (file, ", copy_param");
3189 else if (adj->remove_param)
3190 fprintf (file, ", remove_param");
3191 else
3192 fprintf (file, ", offset %li", (long) adj->offset);
3193 if (adj->by_ref)
3194 fprintf (file, ", by_ref");
3195 print_node_brief (file, ", type: ", adj->type, 0);
3196 fprintf (file, "\n");
3197 }
3198 parms.release ();
3199 }
3200
3201 /* Dump the AV linked list. */
3202
3203 void
3204 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3205 {
3206 bool comma = false;
3207 fprintf (f, " Aggregate replacements:");
3208 for (; av; av = av->next)
3209 {
3210 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3211 av->index, av->offset);
3212 print_generic_expr (f, av->value, 0);
3213 comma = true;
3214 }
3215 fprintf (f, "\n");
3216 }
3217
3218 /* Stream out jump function JUMP_FUNC to OB. */
3219
3220 static void
3221 ipa_write_jump_function (struct output_block *ob,
3222 struct ipa_jump_func *jump_func)
3223 {
3224 struct ipa_agg_jf_item *item;
3225 struct bitpack_d bp;
3226 int i, count;
3227
3228 streamer_write_uhwi (ob, jump_func->type);
3229 switch (jump_func->type)
3230 {
3231 case IPA_JF_UNKNOWN:
3232 break;
3233 case IPA_JF_KNOWN_TYPE:
3234 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3235 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3236 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3237 break;
3238 case IPA_JF_CONST:
3239 gcc_assert (
3240 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3241 stream_write_tree (ob, jump_func->value.constant, true);
3242 break;
3243 case IPA_JF_PASS_THROUGH:
3244 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3245 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3246 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3247 bp = bitpack_create (ob->main_stream);
3248 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3249 streamer_write_bitpack (&bp);
3250 break;
3251 case IPA_JF_ANCESTOR:
3252 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3253 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3254 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3255 bp = bitpack_create (ob->main_stream);
3256 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3257 streamer_write_bitpack (&bp);
3258 break;
3259 }
3260
3261 count = vec_safe_length (jump_func->agg.items);
3262 streamer_write_uhwi (ob, count);
3263 if (count)
3264 {
3265 bp = bitpack_create (ob->main_stream);
3266 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3267 streamer_write_bitpack (&bp);
3268 }
3269
3270 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
3271 {
3272 streamer_write_uhwi (ob, item->offset);
3273 stream_write_tree (ob, item->value, true);
3274 }
3275 }
3276
3277 /* Read in jump function JUMP_FUNC from IB. */
3278
3279 static void
3280 ipa_read_jump_function (struct lto_input_block *ib,
3281 struct ipa_jump_func *jump_func,
3282 struct data_in *data_in)
3283 {
3284 struct bitpack_d bp;
3285 int i, count;
3286
3287 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3288 switch (jump_func->type)
3289 {
3290 case IPA_JF_UNKNOWN:
3291 break;
3292 case IPA_JF_KNOWN_TYPE:
3293 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3294 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3295 jump_func->value.known_type.component_type = stream_read_tree (ib,
3296 data_in);
3297 break;
3298 case IPA_JF_CONST:
3299 jump_func->value.constant = stream_read_tree (ib, data_in);
3300 break;
3301 case IPA_JF_PASS_THROUGH:
3302 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3303 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3304 jump_func->value.pass_through.operation
3305 = (enum tree_code) streamer_read_uhwi (ib);
3306 bp = streamer_read_bitpack (ib);
3307 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3308 break;
3309 case IPA_JF_ANCESTOR:
3310 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3311 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3312 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3313 bp = streamer_read_bitpack (ib);
3314 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3315 break;
3316 }
3317
3318 count = streamer_read_uhwi (ib);
3319 vec_alloc (jump_func->agg.items, count);
3320 if (count)
3321 {
3322 bp = streamer_read_bitpack (ib);
3323 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3324 }
3325 for (i = 0; i < count; i++)
3326 {
3327 struct ipa_agg_jf_item item;
3328 item.offset = streamer_read_uhwi (ib);
3329 item.value = stream_read_tree (ib, data_in);
3330 jump_func->agg.items->quick_push (item);
3331 }
3332 }
3333
3334 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3335 relevant to indirect inlining to OB. */
3336
3337 static void
3338 ipa_write_indirect_edge_info (struct output_block *ob,
3339 struct cgraph_edge *cs)
3340 {
3341 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3342 struct bitpack_d bp;
3343
3344 streamer_write_hwi (ob, ii->param_index);
3345 streamer_write_hwi (ob, ii->offset);
3346 bp = bitpack_create (ob->main_stream);
3347 bp_pack_value (&bp, ii->polymorphic, 1);
3348 bp_pack_value (&bp, ii->agg_contents, 1);
3349 bp_pack_value (&bp, ii->by_ref, 1);
3350 streamer_write_bitpack (&bp);
3351
3352 if (ii->polymorphic)
3353 {
3354 streamer_write_hwi (ob, ii->otr_token);
3355 stream_write_tree (ob, ii->otr_type, true);
3356 }
3357 }
3358
3359 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3360 relevant to indirect inlining from IB. */
3361
3362 static void
3363 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3364 struct data_in *data_in ATTRIBUTE_UNUSED,
3365 struct cgraph_edge *cs)
3366 {
3367 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3368 struct bitpack_d bp;
3369
3370 ii->param_index = (int) streamer_read_hwi (ib);
3371 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3372 bp = streamer_read_bitpack (ib);
3373 ii->polymorphic = bp_unpack_value (&bp, 1);
3374 ii->agg_contents = bp_unpack_value (&bp, 1);
3375 ii->by_ref = bp_unpack_value (&bp, 1);
3376 if (ii->polymorphic)
3377 {
3378 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3379 ii->otr_type = stream_read_tree (ib, data_in);
3380 }
3381 }
3382
3383 /* Stream out NODE info to OB. */
3384
3385 static void
3386 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3387 {
3388 int node_ref;
3389 lto_symtab_encoder_t encoder;
3390 struct ipa_node_params *info = IPA_NODE_REF (node);
3391 int j;
3392 struct cgraph_edge *e;
3393 struct bitpack_d bp;
3394
3395 encoder = ob->decl_state->symtab_node_encoder;
3396 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3397 streamer_write_uhwi (ob, node_ref);
3398
3399 bp = bitpack_create (ob->main_stream);
3400 gcc_assert (info->uses_analysis_done
3401 || ipa_get_param_count (info) == 0);
3402 gcc_assert (!info->node_enqueued);
3403 gcc_assert (!info->ipcp_orig_node);
3404 for (j = 0; j < ipa_get_param_count (info); j++)
3405 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3406 streamer_write_bitpack (&bp);
3407 for (e = node->callees; e; e = e->next_callee)
3408 {
3409 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3410
3411 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3412 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3413 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3414 }
3415 for (e = node->indirect_calls; e; e = e->next_callee)
3416 {
3417 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3418
3419 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3420 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3421 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3422 ipa_write_indirect_edge_info (ob, e);
3423 }
3424 }
3425
3426 /* Stream in NODE info from IB. */
3427
3428 static void
3429 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3430 struct data_in *data_in)
3431 {
3432 struct ipa_node_params *info = IPA_NODE_REF (node);
3433 int k;
3434 struct cgraph_edge *e;
3435 struct bitpack_d bp;
3436
3437 ipa_initialize_node_params (node);
3438
3439 bp = streamer_read_bitpack (ib);
3440 if (ipa_get_param_count (info) != 0)
3441 info->uses_analysis_done = true;
3442 info->node_enqueued = false;
3443 for (k = 0; k < ipa_get_param_count (info); k++)
3444 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3445 for (e = node->callees; e; e = e->next_callee)
3446 {
3447 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3448 int count = streamer_read_uhwi (ib);
3449
3450 if (!count)
3451 continue;
3452 vec_safe_grow_cleared (args->jump_functions, count);
3453
3454 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3455 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3456 }
3457 for (e = node->indirect_calls; e; e = e->next_callee)
3458 {
3459 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3460 int count = streamer_read_uhwi (ib);
3461
3462 if (count)
3463 {
3464 vec_safe_grow_cleared (args->jump_functions, count);
3465 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3466 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3467 data_in);
3468 }
3469 ipa_read_indirect_edge_info (ib, data_in, e);
3470 }
3471 }
3472
3473 /* Write jump functions for nodes in SET. */
3474
3475 void
3476 ipa_prop_write_jump_functions (void)
3477 {
3478 struct cgraph_node *node;
3479 struct output_block *ob;
3480 unsigned int count = 0;
3481 lto_symtab_encoder_iterator lsei;
3482 lto_symtab_encoder_t encoder;
3483
3484
3485 if (!ipa_node_params_vector.exists ())
3486 return;
3487
3488 ob = create_output_block (LTO_section_jump_functions);
3489 encoder = ob->decl_state->symtab_node_encoder;
3490 ob->cgraph_node = NULL;
3491 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3492 lsei_next_function_in_partition (&lsei))
3493 {
3494 node = lsei_cgraph_node (lsei);
3495 if (cgraph_function_with_gimple_body_p (node)
3496 && IPA_NODE_REF (node) != NULL)
3497 count++;
3498 }
3499
3500 streamer_write_uhwi (ob, count);
3501
3502 /* Process all of the functions. */
3503 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3504 lsei_next_function_in_partition (&lsei))
3505 {
3506 node = lsei_cgraph_node (lsei);
3507 if (cgraph_function_with_gimple_body_p (node)
3508 && IPA_NODE_REF (node) != NULL)
3509 ipa_write_node_info (ob, node);
3510 }
3511 streamer_write_char_stream (ob->main_stream, 0);
3512 produce_asm (ob, NULL);
3513 destroy_output_block (ob);
3514 }
3515
3516 /* Read section in file FILE_DATA of length LEN with data DATA. */
3517
3518 static void
3519 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3520 size_t len)
3521 {
3522 const struct lto_function_header *header =
3523 (const struct lto_function_header *) data;
3524 const int cfg_offset = sizeof (struct lto_function_header);
3525 const int main_offset = cfg_offset + header->cfg_size;
3526 const int string_offset = main_offset + header->main_size;
3527 struct data_in *data_in;
3528 struct lto_input_block ib_main;
3529 unsigned int i;
3530 unsigned int count;
3531
3532 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3533 header->main_size);
3534
3535 data_in =
3536 lto_data_in_create (file_data, (const char *) data + string_offset,
3537 header->string_size, vNULL);
3538 count = streamer_read_uhwi (&ib_main);
3539
3540 for (i = 0; i < count; i++)
3541 {
3542 unsigned int index;
3543 struct cgraph_node *node;
3544 lto_symtab_encoder_t encoder;
3545
3546 index = streamer_read_uhwi (&ib_main);
3547 encoder = file_data->symtab_node_encoder;
3548 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3549 gcc_assert (node->analyzed);
3550 ipa_read_node_info (&ib_main, node, data_in);
3551 }
3552 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3553 len);
3554 lto_data_in_delete (data_in);
3555 }
3556
3557 /* Read ipcp jump functions. */
3558
3559 void
3560 ipa_prop_read_jump_functions (void)
3561 {
3562 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3563 struct lto_file_decl_data *file_data;
3564 unsigned int j = 0;
3565
3566 ipa_check_create_node_params ();
3567 ipa_check_create_edge_args ();
3568 ipa_register_cgraph_hooks ();
3569
3570 while ((file_data = file_data_vec[j++]))
3571 {
3572 size_t len;
3573 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3574
3575 if (data)
3576 ipa_prop_read_section (file_data, data, len);
3577 }
3578 }
3579
3580 /* After merging units, we can get mismatch in argument counts.
3581 Also decl merging might've rendered parameter lists obsolete.
3582 Also compute called_with_variable_arg info. */
3583
3584 void
3585 ipa_update_after_lto_read (void)
3586 {
3587 struct cgraph_node *node;
3588
3589 ipa_check_create_node_params ();
3590 ipa_check_create_edge_args ();
3591
3592 FOR_EACH_DEFINED_FUNCTION (node)
3593 if (node->analyzed)
3594 ipa_initialize_node_params (node);
3595 }
3596
3597 void
3598 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
3599 {
3600 int node_ref;
3601 unsigned int count = 0;
3602 lto_symtab_encoder_t encoder;
3603 struct ipa_agg_replacement_value *aggvals, *av;
3604
3605 aggvals = ipa_get_agg_replacements_for_node (node);
3606 encoder = ob->decl_state->symtab_node_encoder;
3607 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3608 streamer_write_uhwi (ob, node_ref);
3609
3610 for (av = aggvals; av; av = av->next)
3611 count++;
3612 streamer_write_uhwi (ob, count);
3613
3614 for (av = aggvals; av; av = av->next)
3615 {
3616 streamer_write_uhwi (ob, av->offset);
3617 streamer_write_uhwi (ob, av->index);
3618 stream_write_tree (ob, av->value, true);
3619 }
3620 }
3621
3622 /* Stream in the aggregate value replacement chain for NODE from IB. */
3623
3624 static void
3625 read_agg_replacement_chain (struct lto_input_block *ib,
3626 struct cgraph_node *node,
3627 struct data_in *data_in)
3628 {
3629 struct ipa_agg_replacement_value *aggvals = NULL;
3630 unsigned int count, i;
3631
3632 count = streamer_read_uhwi (ib);
3633 for (i = 0; i <count; i++)
3634 {
3635 struct ipa_agg_replacement_value *av;
3636
3637 av = ggc_alloc_ipa_agg_replacement_value ();
3638 av->offset = streamer_read_uhwi (ib);
3639 av->index = streamer_read_uhwi (ib);
3640 av->value = stream_read_tree (ib, data_in);
3641 av->next = aggvals;
3642 aggvals = av;
3643 }
3644 ipa_set_node_agg_value_chain (node, aggvals);
3645 }
3646
3647 /* Write all aggregate replacement for nodes in set. */
3648
3649 void
3650 ipa_prop_write_all_agg_replacement (void)
3651 {
3652 struct cgraph_node *node;
3653 struct output_block *ob;
3654 unsigned int count = 0;
3655 lto_symtab_encoder_iterator lsei;
3656 lto_symtab_encoder_t encoder;
3657
3658 if (!ipa_node_agg_replacements)
3659 return;
3660
3661 ob = create_output_block (LTO_section_ipcp_transform);
3662 encoder = ob->decl_state->symtab_node_encoder;
3663 ob->cgraph_node = NULL;
3664 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3665 lsei_next_function_in_partition (&lsei))
3666 {
3667 node = lsei_cgraph_node (lsei);
3668 if (cgraph_function_with_gimple_body_p (node)
3669 && ipa_get_agg_replacements_for_node (node) != NULL)
3670 count++;
3671 }
3672
3673 streamer_write_uhwi (ob, count);
3674
3675 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3676 lsei_next_function_in_partition (&lsei))
3677 {
3678 node = lsei_cgraph_node (lsei);
3679 if (cgraph_function_with_gimple_body_p (node)
3680 && ipa_get_agg_replacements_for_node (node) != NULL)
3681 write_agg_replacement_chain (ob, node);
3682 }
3683 streamer_write_char_stream (ob->main_stream, 0);
3684 produce_asm (ob, NULL);
3685 destroy_output_block (ob);
3686 }
3687
3688 /* Read replacements section in file FILE_DATA of length LEN with data
3689 DATA. */
3690
3691 static void
3692 read_replacements_section (struct lto_file_decl_data *file_data,
3693 const char *data,
3694 size_t len)
3695 {
3696 const struct lto_function_header *header =
3697 (const struct lto_function_header *) data;
3698 const int cfg_offset = sizeof (struct lto_function_header);
3699 const int main_offset = cfg_offset + header->cfg_size;
3700 const int string_offset = main_offset + header->main_size;
3701 struct data_in *data_in;
3702 struct lto_input_block ib_main;
3703 unsigned int i;
3704 unsigned int count;
3705
3706 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3707 header->main_size);
3708
3709 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
3710 header->string_size, vNULL);
3711 count = streamer_read_uhwi (&ib_main);
3712
3713 for (i = 0; i < count; i++)
3714 {
3715 unsigned int index;
3716 struct cgraph_node *node;
3717 lto_symtab_encoder_t encoder;
3718
3719 index = streamer_read_uhwi (&ib_main);
3720 encoder = file_data->symtab_node_encoder;
3721 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3722 gcc_assert (node->analyzed);
3723 read_agg_replacement_chain (&ib_main, node, data_in);
3724 }
3725 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3726 len);
3727 lto_data_in_delete (data_in);
3728 }
3729
3730 /* Read IPA-CP aggregate replacements. */
3731
3732 void
3733 ipa_prop_read_all_agg_replacement (void)
3734 {
3735 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3736 struct lto_file_decl_data *file_data;
3737 unsigned int j = 0;
3738
3739 while ((file_data = file_data_vec[j++]))
3740 {
3741 size_t len;
3742 const char *data = lto_get_section_data (file_data,
3743 LTO_section_ipcp_transform,
3744 NULL, &len);
3745 if (data)
3746 read_replacements_section (file_data, data, len);
3747 }
3748 }
3749
3750 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
3751 NODE. */
3752
3753 static void
3754 adjust_agg_replacement_values (struct cgraph_node *node,
3755 struct ipa_agg_replacement_value *aggval)
3756 {
3757 struct ipa_agg_replacement_value *v;
3758 int i, c = 0, d = 0, *adj;
3759
3760 if (!node->clone.combined_args_to_skip)
3761 return;
3762
3763 for (v = aggval; v; v = v->next)
3764 {
3765 gcc_assert (v->index >= 0);
3766 if (c < v->index)
3767 c = v->index;
3768 }
3769 c++;
3770
3771 adj = XALLOCAVEC (int, c);
3772 for (i = 0; i < c; i++)
3773 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
3774 {
3775 adj[i] = -1;
3776 d++;
3777 }
3778 else
3779 adj[i] = i - d;
3780
3781 for (v = aggval; v; v = v->next)
3782 v->index = adj[v->index];
3783 }
3784
3785
3786 /* Function body transformation phase. */
3787
3788 unsigned int
3789 ipcp_transform_function (struct cgraph_node *node)
3790 {
3791 vec<ipa_param_descriptor_t> descriptors = vNULL;
3792 struct param_analysis_info *parms_ainfo;
3793 struct ipa_agg_replacement_value *aggval;
3794 gimple_stmt_iterator gsi;
3795 basic_block bb;
3796 int param_count;
3797 bool cfg_changed = false, something_changed = false;
3798
3799 gcc_checking_assert (cfun);
3800 gcc_checking_assert (current_function_decl);
3801
3802 if (dump_file)
3803 fprintf (dump_file, "Modification phase of node %s/%i\n",
3804 cgraph_node_name (node), node->uid);
3805
3806 aggval = ipa_get_agg_replacements_for_node (node);
3807 if (!aggval)
3808 return 0;
3809 param_count = count_formal_params (node->symbol.decl);
3810 if (param_count == 0)
3811 return 0;
3812 adjust_agg_replacement_values (node, aggval);
3813 if (dump_file)
3814 ipa_dump_agg_replacement_values (dump_file, aggval);
3815 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
3816 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
3817 descriptors.safe_grow_cleared (param_count);
3818 ipa_populate_param_decls (node, descriptors);
3819
3820 FOR_EACH_BB (bb)
3821 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3822 {
3823 struct ipa_agg_replacement_value *v;
3824 gimple stmt = gsi_stmt (gsi);
3825 tree rhs, val, t;
3826 HOST_WIDE_INT offset;
3827 int index;
3828 bool by_ref, vce;
3829
3830 if (!gimple_assign_load_p (stmt))
3831 continue;
3832 rhs = gimple_assign_rhs1 (stmt);
3833 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
3834 continue;
3835
3836 vce = false;
3837 t = rhs;
3838 while (handled_component_p (t))
3839 {
3840 /* V_C_E can do things like convert an array of integers to one
3841 bigger integer and similar things we do not handle below. */
3842 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
3843 {
3844 vce = true;
3845 break;
3846 }
3847 t = TREE_OPERAND (t, 0);
3848 }
3849 if (vce)
3850 continue;
3851
3852 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3853 rhs, &index, &offset, &by_ref))
3854 continue;
3855 for (v = aggval; v; v = v->next)
3856 if (v->index == index
3857 && v->offset == offset)
3858 break;
3859 if (!v)
3860 continue;
3861
3862 gcc_checking_assert (is_gimple_ip_invariant (v->value));
3863 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
3864 {
3865 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
3866 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
3867 else if (TYPE_SIZE (TREE_TYPE (rhs))
3868 == TYPE_SIZE (TREE_TYPE (v->value)))
3869 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
3870 else
3871 {
3872 if (dump_file)
3873 {
3874 fprintf (dump_file, " const ");
3875 print_generic_expr (dump_file, v->value, 0);
3876 fprintf (dump_file, " can't be converted to type of ");
3877 print_generic_expr (dump_file, rhs, 0);
3878 fprintf (dump_file, "\n");
3879 }
3880 continue;
3881 }
3882 }
3883 else
3884 val = v->value;
3885
3886 if (dump_file && (dump_flags & TDF_DETAILS))
3887 {
3888 fprintf (dump_file, "Modifying stmt:\n ");
3889 print_gimple_stmt (dump_file, stmt, 0, 0);
3890 }
3891 gimple_assign_set_rhs_from_tree (&gsi, val);
3892 update_stmt (stmt);
3893
3894 if (dump_file && (dump_flags & TDF_DETAILS))
3895 {
3896 fprintf (dump_file, "into:\n ");
3897 print_gimple_stmt (dump_file, stmt, 0, 0);
3898 fprintf (dump_file, "\n");
3899 }
3900
3901 something_changed = true;
3902 if (maybe_clean_eh_stmt (stmt)
3903 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3904 cfg_changed = true;
3905 }
3906
3907 (*ipa_node_agg_replacements)[node->uid] = NULL;
3908 free_parms_ainfo (parms_ainfo, param_count);
3909 descriptors.release ();
3910
3911 if (!something_changed)
3912 return 0;
3913 else if (cfg_changed)
3914 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
3915 else
3916 return TODO_update_ssa_only_virtuals;
3917 }