re PR tree-optimization/53787 (Possible IPA-SRA / IPA-CP improvement)
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 VEC (ipa_agg_replacement_value_p, gc) *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 static int
70 ipa_get_param_decl_index_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
71 tree ptree)
72 {
73 int i, count;
74
75 count = VEC_length (ipa_param_descriptor_t, descriptors);
76 for (i = 0; i < count; i++)
77 if (VEC_index (ipa_param_descriptor_t, descriptors, i).decl == ptree)
78 return i;
79
80 return -1;
81 }
82
83 /* Return index of the formal whose tree is PTREE in function which corresponds
84 to INFO. */
85
86 int
87 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
88 {
89 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
90 }
91
92 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
93 NODE. */
94
95 static void
96 ipa_populate_param_decls (struct cgraph_node *node,
97 VEC (ipa_param_descriptor_t, heap) *descriptors)
98 {
99 tree fndecl;
100 tree fnargs;
101 tree parm;
102 int param_num;
103
104 fndecl = node->symbol.decl;
105 fnargs = DECL_ARGUMENTS (fndecl);
106 param_num = 0;
107 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
108 {
109 VEC_index (ipa_param_descriptor_t, descriptors, param_num).decl = parm;
110 param_num++;
111 }
112 }
113
114 /* Return how many formal parameters FNDECL has. */
115
116 static inline int
117 count_formal_params (tree fndecl)
118 {
119 tree parm;
120 int count = 0;
121
122 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
123 count++;
124
125 return count;
126 }
127
128 /* Initialize the ipa_node_params structure associated with NODE by counting
129 the function parameters, creating the descriptors and populating their
130 param_decls. */
131
132 void
133 ipa_initialize_node_params (struct cgraph_node *node)
134 {
135 struct ipa_node_params *info = IPA_NODE_REF (node);
136
137 if (!info->descriptors)
138 {
139 int param_count;
140
141 param_count = count_formal_params (node->symbol.decl);
142 if (param_count)
143 {
144 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
145 info->descriptors, param_count);
146 ipa_populate_param_decls (node, info->descriptors);
147 }
148 }
149 }
150
151 /* Print the jump functions associated with call graph edge CS to file F. */
152
153 static void
154 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
155 {
156 int i, count;
157
158 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
159 for (i = 0; i < count; i++)
160 {
161 struct ipa_jump_func *jump_func;
162 enum jump_func_type type;
163
164 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
165 type = jump_func->type;
166
167 fprintf (f, " param %d: ", i);
168 if (type == IPA_JF_UNKNOWN)
169 fprintf (f, "UNKNOWN\n");
170 else if (type == IPA_JF_KNOWN_TYPE)
171 {
172 fprintf (f, "KNOWN TYPE: base ");
173 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
174 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
175 jump_func->value.known_type.offset);
176 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
177 fprintf (f, "\n");
178 }
179 else if (type == IPA_JF_CONST)
180 {
181 tree val = jump_func->value.constant;
182 fprintf (f, "CONST: ");
183 print_generic_expr (f, val, 0);
184 if (TREE_CODE (val) == ADDR_EXPR
185 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
186 {
187 fprintf (f, " -> ");
188 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
189 0);
190 }
191 fprintf (f, "\n");
192 }
193 else if (type == IPA_JF_PASS_THROUGH)
194 {
195 fprintf (f, "PASS THROUGH: ");
196 fprintf (f, "%d, op %s",
197 jump_func->value.pass_through.formal_id,
198 tree_code_name[(int)
199 jump_func->value.pass_through.operation]);
200 if (jump_func->value.pass_through.operation != NOP_EXPR)
201 {
202 fprintf (f, " ");
203 print_generic_expr (f,
204 jump_func->value.pass_through.operand, 0);
205 }
206 if (jump_func->value.pass_through.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
209 }
210 else if (type == IPA_JF_ANCESTOR)
211 {
212 fprintf (f, "ANCESTOR: ");
213 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
214 jump_func->value.ancestor.formal_id,
215 jump_func->value.ancestor.offset);
216 print_generic_expr (f, jump_func->value.ancestor.type, 0);
217 if (jump_func->value.ancestor.agg_preserved)
218 fprintf (f, ", agg_preserved");
219 fprintf (f, "\n");
220 }
221
222 if (jump_func->agg.items)
223 {
224 struct ipa_agg_jf_item *item;
225 int j;
226
227 fprintf (f, " Aggregate passed by %s:\n",
228 jump_func->agg.by_ref ? "reference" : "value");
229 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
230 j, item)
231 {
232 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
233 item->offset);
234 if (TYPE_P (item->value))
235 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
236 tree_low_cst (TYPE_SIZE (item->value), 1));
237 else
238 {
239 fprintf (f, "cst: ");
240 print_generic_expr (f, item->value, 0);
241 }
242 fprintf (f, "\n");
243 }
244 }
245 }
246 }
247
248
249 /* Print the jump functions of all arguments on all call graph edges going from
250 NODE to file F. */
251
252 void
253 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
254 {
255 struct cgraph_edge *cs;
256 int i;
257
258 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
259 for (cs = node->callees; cs; cs = cs->next_callee)
260 {
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
263
264 fprintf (f, " callsite %s/%i -> %s/%i : \n",
265 xstrdup (cgraph_node_name (node)), node->uid,
266 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
267 ipa_print_node_jump_functions_for_edge (f, cs);
268 }
269
270 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
271 {
272 if (!ipa_edge_args_info_available_for_edge_p (cs))
273 continue;
274
275 if (cs->call_stmt)
276 {
277 fprintf (f, " indirect callsite %d for stmt ", i);
278 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
279 }
280 else
281 fprintf (f, " indirect callsite %d :\n", i);
282 ipa_print_node_jump_functions_for_edge (f, cs);
283
284 }
285 }
286
287 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
288
289 void
290 ipa_print_all_jump_functions (FILE *f)
291 {
292 struct cgraph_node *node;
293
294 fprintf (f, "\nJump functions:\n");
295 FOR_EACH_FUNCTION (node)
296 {
297 ipa_print_node_jump_functions (f, node);
298 }
299 }
300
301 /* Worker for prune_expression_for_jf. */
302
303 static tree
304 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
305 {
306 if (EXPR_P (*tp))
307 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
308 else
309 *walk_subtrees = 0;
310 return NULL_TREE;
311 }
312
313 /* Return the expression tree EXPR unshared and with location stripped off. */
314
315 static tree
316 prune_expression_for_jf (tree exp)
317 {
318 if (EXPR_P (exp))
319 {
320 exp = unshare_expr (exp);
321 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
322 }
323 return exp;
324 }
325
326 /* Set JFUNC to be a known type jump function. */
327
328 static void
329 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
330 tree base_type, tree component_type)
331 {
332 jfunc->type = IPA_JF_KNOWN_TYPE;
333 jfunc->value.known_type.offset = offset,
334 jfunc->value.known_type.base_type = base_type;
335 jfunc->value.known_type.component_type = component_type;
336 }
337
338 /* Set JFUNC to be a constant jmp function. */
339
340 static void
341 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
342 {
343 constant = unshare_expr (constant);
344 if (constant && EXPR_P (constant))
345 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
346 jfunc->type = IPA_JF_CONST;
347 jfunc->value.constant = prune_expression_for_jf (constant);
348 }
349
350 /* Set JFUNC to be a simple pass-through jump function. */
351 static void
352 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
353 bool agg_preserved)
354 {
355 jfunc->type = IPA_JF_PASS_THROUGH;
356 jfunc->value.pass_through.operand = NULL_TREE;
357 jfunc->value.pass_through.formal_id = formal_id;
358 jfunc->value.pass_through.operation = NOP_EXPR;
359 jfunc->value.pass_through.agg_preserved = agg_preserved;
360 }
361
362 /* Set JFUNC to be an arithmetic pass through jump function. */
363
364 static void
365 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
366 tree operand, enum tree_code operation)
367 {
368 jfunc->type = IPA_JF_PASS_THROUGH;
369 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
370 jfunc->value.pass_through.formal_id = formal_id;
371 jfunc->value.pass_through.operation = operation;
372 jfunc->value.pass_through.agg_preserved = false;
373 }
374
375 /* Set JFUNC to be an ancestor jump function. */
376
377 static void
378 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
379 tree type, int formal_id, bool agg_preserved)
380 {
381 jfunc->type = IPA_JF_ANCESTOR;
382 jfunc->value.ancestor.formal_id = formal_id;
383 jfunc->value.ancestor.offset = offset;
384 jfunc->value.ancestor.type = type;
385 jfunc->value.ancestor.agg_preserved = agg_preserved;
386 }
387
388 /* Structure to be passed in between detect_type_change and
389 check_stmt_for_type_change. */
390
391 struct type_change_info
392 {
393 /* Offset into the object where there is the virtual method pointer we are
394 looking for. */
395 HOST_WIDE_INT offset;
396 /* The declaration or SSA_NAME pointer of the base that we are checking for
397 type change. */
398 tree object;
399 /* If we actually can tell the type that the object has changed to, it is
400 stored in this field. Otherwise it remains NULL_TREE. */
401 tree known_current_type;
402 /* Set to true if dynamic type change has been detected. */
403 bool type_maybe_changed;
404 /* Set to true if multiple types have been encountered. known_current_type
405 must be disregarded in that case. */
406 bool multiple_types_encountered;
407 };
408
409 /* Return true if STMT can modify a virtual method table pointer.
410
411 This function makes special assumptions about both constructors and
412 destructors which are all the functions that are allowed to alter the VMT
413 pointers. It assumes that destructors begin with assignment into all VMT
414 pointers and that constructors essentially look in the following way:
415
416 1) The very first thing they do is that they call constructors of ancestor
417 sub-objects that have them.
418
419 2) Then VMT pointers of this and all its ancestors is set to new values
420 corresponding to the type corresponding to the constructor.
421
422 3) Only afterwards, other stuff such as constructor of member sub-objects
423 and the code written by the user is run. Only this may include calling
424 virtual functions, directly or indirectly.
425
426 There is no way to call a constructor of an ancestor sub-object in any
427 other way.
428
429 This means that we do not have to care whether constructors get the correct
430 type information because they will always change it (in fact, if we define
431 the type to be given by the VMT pointer, it is undefined).
432
433 The most important fact to derive from the above is that if, for some
434 statement in the section 3, we try to detect whether the dynamic type has
435 changed, we can safely ignore all calls as we examine the function body
436 backwards until we reach statements in section 2 because these calls cannot
437 be ancestor constructors or destructors (if the input is not bogus) and so
438 do not change the dynamic type (this holds true only for automatically
439 allocated objects but at the moment we devirtualize only these). We then
440 must detect that statements in section 2 change the dynamic type and can try
441 to derive the new type. That is enough and we can stop, we will never see
442 the calls into constructors of sub-objects in this code. Therefore we can
443 safely ignore all call statements that we traverse.
444 */
445
446 static bool
447 stmt_may_be_vtbl_ptr_store (gimple stmt)
448 {
449 if (is_gimple_call (stmt))
450 return false;
451 else if (is_gimple_assign (stmt))
452 {
453 tree lhs = gimple_assign_lhs (stmt);
454
455 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
456 {
457 if (flag_strict_aliasing
458 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
459 return false;
460
461 if (TREE_CODE (lhs) == COMPONENT_REF
462 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
463 return false;
464 /* In the future we might want to use get_base_ref_and_offset to find
465 if there is a field corresponding to the offset and if so, proceed
466 almost like if it was a component ref. */
467 }
468 }
469 return true;
470 }
471
472 /* If STMT can be proved to be an assignment to the virtual method table
473 pointer of ANALYZED_OBJ and the type associated with the new table
474 identified, return the type. Otherwise return NULL_TREE. */
475
476 static tree
477 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
478 {
479 HOST_WIDE_INT offset, size, max_size;
480 tree lhs, rhs, base;
481
482 if (!gimple_assign_single_p (stmt))
483 return NULL_TREE;
484
485 lhs = gimple_assign_lhs (stmt);
486 rhs = gimple_assign_rhs1 (stmt);
487 if (TREE_CODE (lhs) != COMPONENT_REF
488 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
489 || TREE_CODE (rhs) != ADDR_EXPR)
490 return NULL_TREE;
491 rhs = get_base_address (TREE_OPERAND (rhs, 0));
492 if (!rhs
493 || TREE_CODE (rhs) != VAR_DECL
494 || !DECL_VIRTUAL_P (rhs))
495 return NULL_TREE;
496
497 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
498 if (offset != tci->offset
499 || size != POINTER_SIZE
500 || max_size != POINTER_SIZE)
501 return NULL_TREE;
502 if (TREE_CODE (base) == MEM_REF)
503 {
504 if (TREE_CODE (tci->object) != MEM_REF
505 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
506 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
507 TREE_OPERAND (base, 1)))
508 return NULL_TREE;
509 }
510 else if (tci->object != base)
511 return NULL_TREE;
512
513 return DECL_CONTEXT (rhs);
514 }
515
516 /* Callback of walk_aliased_vdefs and a helper function for
517 detect_type_change to check whether a particular statement may modify
518 the virtual table pointer, and if possible also determine the new type of
519 the (sub-)object. It stores its result into DATA, which points to a
520 type_change_info structure. */
521
522 static bool
523 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
524 {
525 gimple stmt = SSA_NAME_DEF_STMT (vdef);
526 struct type_change_info *tci = (struct type_change_info *) data;
527
528 if (stmt_may_be_vtbl_ptr_store (stmt))
529 {
530 tree type;
531 type = extr_type_from_vtbl_ptr_store (stmt, tci);
532 if (tci->type_maybe_changed
533 && type != tci->known_current_type)
534 tci->multiple_types_encountered = true;
535 tci->known_current_type = type;
536 tci->type_maybe_changed = true;
537 return true;
538 }
539 else
540 return false;
541 }
542
543
544
545 /* Like detect_type_change but with extra argument COMP_TYPE which will become
546 the component type part of new JFUNC of dynamic type change is detected and
547 the new base type is identified. */
548
549 static bool
550 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
551 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
552 {
553 struct type_change_info tci;
554 ao_ref ao;
555
556 gcc_checking_assert (DECL_P (arg)
557 || TREE_CODE (arg) == MEM_REF
558 || handled_component_p (arg));
559 /* Const calls cannot call virtual methods through VMT and so type changes do
560 not matter. */
561 if (!flag_devirtualize || !gimple_vuse (call))
562 return false;
563
564 ao_ref_init (&ao, arg);
565 ao.base = base;
566 ao.offset = offset;
567 ao.size = POINTER_SIZE;
568 ao.max_size = ao.size;
569
570 tci.offset = offset;
571 tci.object = get_base_address (arg);
572 tci.known_current_type = NULL_TREE;
573 tci.type_maybe_changed = false;
574 tci.multiple_types_encountered = false;
575
576 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
577 &tci, NULL);
578 if (!tci.type_maybe_changed)
579 return false;
580
581 if (!tci.known_current_type
582 || tci.multiple_types_encountered
583 || offset != 0)
584 jfunc->type = IPA_JF_UNKNOWN;
585 else
586 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
587
588 return true;
589 }
590
591 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
592 looking for assignments to its virtual table pointer. If it is, return true
593 and fill in the jump function JFUNC with relevant type information or set it
594 to unknown. ARG is the object itself (not a pointer to it, unless
595 dereferenced). BASE is the base of the memory access as returned by
596 get_ref_base_and_extent, as is the offset. */
597
598 static bool
599 detect_type_change (tree arg, tree base, gimple call,
600 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
601 {
602 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
603 }
604
605 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
606 SSA name (its dereference will become the base and the offset is assumed to
607 be zero). */
608
609 static bool
610 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
611 {
612 tree comp_type;
613
614 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
615 if (!flag_devirtualize
616 || !POINTER_TYPE_P (TREE_TYPE (arg))
617 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
618 return false;
619
620 comp_type = TREE_TYPE (TREE_TYPE (arg));
621 arg = build2 (MEM_REF, ptr_type_node, arg,
622 build_int_cst (ptr_type_node, 0));
623
624 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
625 }
626
627 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
628 boolean variable pointed to by DATA. */
629
630 static bool
631 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
632 void *data)
633 {
634 bool *b = (bool *) data;
635 *b = true;
636 return true;
637 }
638
639 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
640 a value known not to be modified in this function before reaching the
641 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
642 information about the parameter. */
643
644 static bool
645 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
646 gimple stmt, tree parm_load)
647 {
648 bool modified = false;
649 bitmap *visited_stmts;
650 ao_ref refd;
651
652 if (parm_ainfo && parm_ainfo->parm_modified)
653 return false;
654
655 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
656 ao_ref_init (&refd, parm_load);
657 /* We can cache visited statements only when parm_ainfo is available and when
658 we are looking at a naked load of the whole parameter. */
659 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
660 visited_stmts = NULL;
661 else
662 visited_stmts = &parm_ainfo->parm_visited_statements;
663 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
664 visited_stmts);
665 if (parm_ainfo && modified)
666 parm_ainfo->parm_modified = true;
667 return !modified;
668 }
669
670 /* If STMT is an assignment that loads a value from an parameter declaration,
671 return the index of the parameter in ipa_node_params which has not been
672 modified. Otherwise return -1. */
673
674 static int
675 load_from_unmodified_param (VEC (ipa_param_descriptor_t, heap) *descriptors,
676 struct param_analysis_info *parms_ainfo,
677 gimple stmt)
678 {
679 int index;
680 tree op1;
681
682 if (!gimple_assign_single_p (stmt))
683 return -1;
684
685 op1 = gimple_assign_rhs1 (stmt);
686 if (TREE_CODE (op1) != PARM_DECL)
687 return -1;
688
689 index = ipa_get_param_decl_index_1 (descriptors, op1);
690 if (index < 0
691 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
692 : NULL, stmt, op1))
693 return -1;
694
695 return index;
696 }
697
698 /* Return true if memory reference REF loads data that are known to be
699 unmodified in this function before reaching statement STMT. PARM_AINFO, if
700 non-NULL, is a pointer to a structure containing temporary information about
701 PARM. */
702
703 static bool
704 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
705 gimple stmt, tree ref)
706 {
707 bool modified = false;
708 ao_ref refd;
709
710 gcc_checking_assert (gimple_vuse (stmt));
711 if (parm_ainfo && parm_ainfo->ref_modified)
712 return false;
713
714 ao_ref_init (&refd, ref);
715 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
716 NULL);
717 if (parm_ainfo && modified)
718 parm_ainfo->ref_modified = true;
719 return !modified;
720 }
721
722 /* Return true if the data pointed to by PARM is known to be unmodified in this
723 function before reaching call statement CALL into which it is passed.
724 PARM_AINFO is a pointer to a structure containing temporary information
725 about PARM. */
726
727 static bool
728 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
729 gimple call, tree parm)
730 {
731 bool modified = false;
732 ao_ref refd;
733
734 /* It's unnecessary to calculate anything about memory contnets for a const
735 function because it is not goin to use it. But do not cache the result
736 either. Also, no such calculations for non-pointers. */
737 if (!gimple_vuse (call)
738 || !POINTER_TYPE_P (TREE_TYPE (parm)))
739 return false;
740
741 if (parm_ainfo->pt_modified)
742 return false;
743
744 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
745 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
746 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
747 if (modified)
748 parm_ainfo->pt_modified = true;
749 return !modified;
750 }
751
752 /* Return true if we can prove that OP is a memory reference loading unmodified
753 data from an aggregate passed as a parameter and if the aggregate is passed
754 by reference, that the alias type of the load corresponds to the type of the
755 formal parameter (so that we can rely on this type for TBAA in callers).
756 INFO and PARMS_AINFO describe parameters of the current function (but the
757 latter can be NULL), STMT is the load statement. If function returns true,
758 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
759 within the aggregate and whether it is a load from a value passed by
760 reference respectively. */
761
762 static bool
763 ipa_load_from_parm_agg_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
764 struct param_analysis_info *parms_ainfo, gimple stmt,
765 tree op, int *index_p, HOST_WIDE_INT *offset_p,
766 bool *by_ref_p)
767 {
768 int index;
769 HOST_WIDE_INT size, max_size;
770 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
771
772 if (max_size == -1 || max_size != size || *offset_p < 0)
773 return false;
774
775 if (DECL_P (base))
776 {
777 int index = ipa_get_param_decl_index_1 (descriptors, base);
778 if (index >= 0
779 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
780 : NULL, stmt, op))
781 {
782 *index_p = index;
783 *by_ref_p = false;
784 return true;
785 }
786 return false;
787 }
788
789 if (TREE_CODE (base) != MEM_REF
790 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
791 || !integer_zerop (TREE_OPERAND (base, 1)))
792 return false;
793
794 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
795 {
796 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
797 index = ipa_get_param_decl_index_1 (descriptors, parm);
798 }
799 else
800 {
801 /* This branch catches situations where a pointer parameter is not a
802 gimple register, for example:
803
804 void hip7(S*) (struct S * p)
805 {
806 void (*<T2e4>) (struct S *) D.1867;
807 struct S * p.1;
808
809 <bb 2>:
810 p.1_1 = p;
811 D.1867_2 = p.1_1->f;
812 D.1867_2 ();
813 gdp = &p;
814 */
815
816 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
817 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
818 }
819
820 if (index >= 0
821 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
822 stmt, op))
823 {
824 *index_p = index;
825 *by_ref_p = true;
826 return true;
827 }
828 return false;
829 }
830
831 /* Just like the previous function, just without the param_analysis_info
832 pointer, for users outside of this file. */
833
834 bool
835 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
836 tree op, int *index_p, HOST_WIDE_INT *offset_p,
837 bool *by_ref_p)
838 {
839 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
840 offset_p, by_ref_p);
841 }
842
843 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
844 of an assignment statement STMT, try to determine whether we are actually
845 handling any of the following cases and construct an appropriate jump
846 function into JFUNC if so:
847
848 1) The passed value is loaded from a formal parameter which is not a gimple
849 register (most probably because it is addressable, the value has to be
850 scalar) and we can guarantee the value has not changed. This case can
851 therefore be described by a simple pass-through jump function. For example:
852
853 foo (int a)
854 {
855 int a.0;
856
857 a.0_2 = a;
858 bar (a.0_2);
859
860 2) The passed value can be described by a simple arithmetic pass-through
861 jump function. E.g.
862
863 foo (int a)
864 {
865 int D.2064;
866
867 D.2064_4 = a.1(D) + 4;
868 bar (D.2064_4);
869
870 This case can also occur in combination of the previous one, e.g.:
871
872 foo (int a, int z)
873 {
874 int a.0;
875 int D.2064;
876
877 a.0_3 = a;
878 D.2064_4 = a.0_3 + 4;
879 foo (D.2064_4);
880
881 3) The passed value is an address of an object within another one (which
882 also passed by reference). Such situations are described by an ancestor
883 jump function and describe situations such as:
884
885 B::foo() (struct B * const this)
886 {
887 struct A * D.1845;
888
889 D.1845_2 = &this_1(D)->D.1748;
890 A::bar (D.1845_2);
891
892 INFO is the structure describing individual parameters access different
893 stages of IPA optimizations. PARMS_AINFO contains the information that is
894 only needed for intraprocedural analysis. */
895
896 static void
897 compute_complex_assign_jump_func (struct ipa_node_params *info,
898 struct param_analysis_info *parms_ainfo,
899 struct ipa_jump_func *jfunc,
900 gimple call, gimple stmt, tree name)
901 {
902 HOST_WIDE_INT offset, size, max_size;
903 tree op1, tc_ssa, base, ssa;
904 int index;
905
906 op1 = gimple_assign_rhs1 (stmt);
907
908 if (TREE_CODE (op1) == SSA_NAME)
909 {
910 if (SSA_NAME_IS_DEFAULT_DEF (op1))
911 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
912 else
913 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
914 SSA_NAME_DEF_STMT (op1));
915 tc_ssa = op1;
916 }
917 else
918 {
919 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
920 tc_ssa = gimple_assign_lhs (stmt);
921 }
922
923 if (index >= 0)
924 {
925 tree op2 = gimple_assign_rhs2 (stmt);
926
927 if (op2)
928 {
929 if (!is_gimple_ip_invariant (op2)
930 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
931 && !useless_type_conversion_p (TREE_TYPE (name),
932 TREE_TYPE (op1))))
933 return;
934
935 ipa_set_jf_arith_pass_through (jfunc, index, op2,
936 gimple_assign_rhs_code (stmt));
937 }
938 else if (gimple_assign_single_p (stmt)
939 && !detect_type_change_ssa (tc_ssa, call, jfunc))
940 {
941 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
942 call, tc_ssa);
943 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
944 }
945 return;
946 }
947
948 if (TREE_CODE (op1) != ADDR_EXPR)
949 return;
950 op1 = TREE_OPERAND (op1, 0);
951 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
952 return;
953 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
954 if (TREE_CODE (base) != MEM_REF
955 /* If this is a varying address, punt. */
956 || max_size == -1
957 || max_size != size)
958 return;
959 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
960 ssa = TREE_OPERAND (base, 0);
961 if (TREE_CODE (ssa) != SSA_NAME
962 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
963 || offset < 0)
964 return;
965
966 /* Dynamic types are changed only in constructors and destructors and */
967 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
968 if (index >= 0
969 && !detect_type_change (op1, base, call, jfunc, offset))
970 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
971 parm_ref_data_pass_through_p (&parms_ainfo[index],
972 call, ssa));
973 }
974
975 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
976 it looks like:
977
978 iftmp.1_3 = &obj_2(D)->D.1762;
979
980 The base of the MEM_REF must be a default definition SSA NAME of a
981 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
982 whole MEM_REF expression is returned and the offset calculated from any
983 handled components and the MEM_REF itself is stored into *OFFSET. The whole
984 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
985
986 static tree
987 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
988 {
989 HOST_WIDE_INT size, max_size;
990 tree expr, parm, obj;
991
992 if (!gimple_assign_single_p (assign))
993 return NULL_TREE;
994 expr = gimple_assign_rhs1 (assign);
995
996 if (TREE_CODE (expr) != ADDR_EXPR)
997 return NULL_TREE;
998 expr = TREE_OPERAND (expr, 0);
999 obj = expr;
1000 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1001
1002 if (TREE_CODE (expr) != MEM_REF
1003 /* If this is a varying address, punt. */
1004 || max_size == -1
1005 || max_size != size
1006 || *offset < 0)
1007 return NULL_TREE;
1008 parm = TREE_OPERAND (expr, 0);
1009 if (TREE_CODE (parm) != SSA_NAME
1010 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1011 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1012 return NULL_TREE;
1013
1014 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1015 *obj_p = obj;
1016 return expr;
1017 }
1018
1019
1020 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1021 statement PHI, try to find out whether NAME is in fact a
1022 multiple-inheritance typecast from a descendant into an ancestor of a formal
1023 parameter and thus can be described by an ancestor jump function and if so,
1024 write the appropriate function into JFUNC.
1025
1026 Essentially we want to match the following pattern:
1027
1028 if (obj_2(D) != 0B)
1029 goto <bb 3>;
1030 else
1031 goto <bb 4>;
1032
1033 <bb 3>:
1034 iftmp.1_3 = &obj_2(D)->D.1762;
1035
1036 <bb 4>:
1037 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1038 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1039 return D.1879_6; */
1040
1041 static void
1042 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1043 struct param_analysis_info *parms_ainfo,
1044 struct ipa_jump_func *jfunc,
1045 gimple call, gimple phi)
1046 {
1047 HOST_WIDE_INT offset;
1048 gimple assign, cond;
1049 basic_block phi_bb, assign_bb, cond_bb;
1050 tree tmp, parm, expr, obj;
1051 int index, i;
1052
1053 if (gimple_phi_num_args (phi) != 2)
1054 return;
1055
1056 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1057 tmp = PHI_ARG_DEF (phi, 0);
1058 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1059 tmp = PHI_ARG_DEF (phi, 1);
1060 else
1061 return;
1062 if (TREE_CODE (tmp) != SSA_NAME
1063 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1064 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1065 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1066 return;
1067
1068 assign = SSA_NAME_DEF_STMT (tmp);
1069 assign_bb = gimple_bb (assign);
1070 if (!single_pred_p (assign_bb))
1071 return;
1072 expr = get_ancestor_addr_info (assign, &obj, &offset);
1073 if (!expr)
1074 return;
1075 parm = TREE_OPERAND (expr, 0);
1076 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1077 gcc_assert (index >= 0);
1078
1079 cond_bb = single_pred (assign_bb);
1080 cond = last_stmt (cond_bb);
1081 if (!cond
1082 || gimple_code (cond) != GIMPLE_COND
1083 || gimple_cond_code (cond) != NE_EXPR
1084 || gimple_cond_lhs (cond) != parm
1085 || !integer_zerop (gimple_cond_rhs (cond)))
1086 return;
1087
1088 phi_bb = gimple_bb (phi);
1089 for (i = 0; i < 2; i++)
1090 {
1091 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1092 if (pred != assign_bb && pred != cond_bb)
1093 return;
1094 }
1095
1096 if (!detect_type_change (obj, expr, call, jfunc, offset))
1097 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1098 parm_ref_data_pass_through_p (&parms_ainfo[index],
1099 call, parm));
1100 }
1101
1102 /* Given OP which is passed as an actual argument to a called function,
1103 determine if it is possible to construct a KNOWN_TYPE jump function for it
1104 and if so, create one and store it to JFUNC. */
1105
1106 static void
1107 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1108 gimple call)
1109 {
1110 HOST_WIDE_INT offset, size, max_size;
1111 tree base;
1112
1113 if (!flag_devirtualize
1114 || TREE_CODE (op) != ADDR_EXPR
1115 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1116 return;
1117
1118 op = TREE_OPERAND (op, 0);
1119 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1120 if (!DECL_P (base)
1121 || max_size == -1
1122 || max_size != size
1123 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1124 || is_global_var (base))
1125 return;
1126
1127 if (!TYPE_BINFO (TREE_TYPE (base))
1128 || detect_type_change (op, base, call, jfunc, offset))
1129 return;
1130
1131 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1132 }
1133
1134 /* Inspect the given TYPE and return true iff it has the same structure (the
1135 same number of fields of the same types) as a C++ member pointer. If
1136 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1137 corresponding fields there. */
1138
1139 static bool
1140 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1141 {
1142 tree fld;
1143
1144 if (TREE_CODE (type) != RECORD_TYPE)
1145 return false;
1146
1147 fld = TYPE_FIELDS (type);
1148 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1149 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1150 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1151 return false;
1152
1153 if (method_ptr)
1154 *method_ptr = fld;
1155
1156 fld = DECL_CHAIN (fld);
1157 if (!fld || INTEGRAL_TYPE_P (fld)
1158 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1159 return false;
1160 if (delta)
1161 *delta = fld;
1162
1163 if (DECL_CHAIN (fld))
1164 return false;
1165
1166 return true;
1167 }
1168
1169 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1170 return the rhs of its defining statement. Otherwise return RHS as it
1171 is. */
1172
1173 static inline tree
1174 get_ssa_def_if_simple_copy (tree rhs)
1175 {
1176 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1177 {
1178 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1179
1180 if (gimple_assign_single_p (def_stmt))
1181 rhs = gimple_assign_rhs1 (def_stmt);
1182 else
1183 break;
1184 }
1185 return rhs;
1186 }
1187
1188 /* Simple linked list, describing known contents of an aggregate beforere
1189 call. */
1190
1191 struct ipa_known_agg_contents_list
1192 {
1193 /* Offset and size of the described part of the aggregate. */
1194 HOST_WIDE_INT offset, size;
1195 /* Known constant value or NULL if the contents is known to be unknown. */
1196 tree constant;
1197 /* Pointer to the next structure in the list. */
1198 struct ipa_known_agg_contents_list *next;
1199 };
1200
1201 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1202 in ARG is filled in with constant values. ARG can either be an aggregate
1203 expression or a pointer to an aggregate. JFUNC is the jump function into
1204 which the constants are subsequently stored. */
1205
1206 static void
1207 determine_known_aggregate_parts (gimple call, tree arg,
1208 struct ipa_jump_func *jfunc)
1209 {
1210 struct ipa_known_agg_contents_list *list = NULL;
1211 int item_count = 0, const_count = 0;
1212 HOST_WIDE_INT arg_offset, arg_size;
1213 gimple_stmt_iterator gsi;
1214 tree arg_base;
1215 bool check_ref, by_ref;
1216 ao_ref r;
1217
1218 /* The function operates in three stages. First, we prepare check_ref, r,
1219 arg_base and arg_offset based on what is actually passed as an actual
1220 argument. */
1221
1222 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1223 {
1224 by_ref = true;
1225 if (TREE_CODE (arg) == SSA_NAME)
1226 {
1227 tree type_size;
1228 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1229 return;
1230 check_ref = true;
1231 arg_base = arg;
1232 arg_offset = 0;
1233 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1234 arg_size = tree_low_cst (type_size, 1);
1235 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1236 }
1237 else if (TREE_CODE (arg) == ADDR_EXPR)
1238 {
1239 HOST_WIDE_INT arg_max_size;
1240
1241 arg = TREE_OPERAND (arg, 0);
1242 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1243 &arg_max_size);
1244 if (arg_max_size == -1
1245 || arg_max_size != arg_size
1246 || arg_offset < 0)
1247 return;
1248 if (DECL_P (arg_base))
1249 {
1250 tree size;
1251 check_ref = false;
1252 size = build_int_cst (integer_type_node, arg_size);
1253 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1254 }
1255 else
1256 return;
1257 }
1258 else
1259 return;
1260 }
1261 else
1262 {
1263 HOST_WIDE_INT arg_max_size;
1264
1265 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1266
1267 by_ref = false;
1268 check_ref = false;
1269 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1270 &arg_max_size);
1271 if (arg_max_size == -1
1272 || arg_max_size != arg_size
1273 || arg_offset < 0)
1274 return;
1275
1276 ao_ref_init (&r, arg);
1277 }
1278
1279 /* Second stage walks back the BB, looks at individual statements and as long
1280 as it is confident of how the statements affect contents of the
1281 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1282 describing it. */
1283 gsi = gsi_for_stmt (call);
1284 gsi_prev (&gsi);
1285 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1286 {
1287 struct ipa_known_agg_contents_list *n, **p;
1288 gimple stmt = gsi_stmt (gsi);
1289 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1290 tree lhs, rhs, lhs_base;
1291 bool partial_overlap;
1292
1293 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1294 continue;
1295 if (!gimple_assign_single_p (stmt))
1296 break;
1297
1298 lhs = gimple_assign_lhs (stmt);
1299 rhs = gimple_assign_rhs1 (stmt);
1300 if (!is_gimple_reg_type (rhs))
1301 break;
1302
1303 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1304 &lhs_max_size);
1305 if (lhs_max_size == -1
1306 || lhs_max_size != lhs_size
1307 || (lhs_offset < arg_offset
1308 && lhs_offset + lhs_size > arg_offset)
1309 || (lhs_offset < arg_offset + arg_size
1310 && lhs_offset + lhs_size > arg_offset + arg_size))
1311 break;
1312
1313 if (check_ref)
1314 {
1315 if (TREE_CODE (lhs_base) != MEM_REF
1316 || TREE_OPERAND (lhs_base, 0) != arg_base
1317 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1318 break;
1319 }
1320 else if (lhs_base != arg_base)
1321 break;
1322
1323 if (lhs_offset + lhs_size < arg_offset
1324 || lhs_offset >= (arg_offset + arg_size))
1325 continue;
1326
1327 partial_overlap = false;
1328 p = &list;
1329 while (*p && (*p)->offset < lhs_offset)
1330 {
1331 if ((*p)->offset + (*p)->size > lhs_offset)
1332 {
1333 partial_overlap = true;
1334 break;
1335 }
1336 p = &(*p)->next;
1337 }
1338 if (partial_overlap)
1339 break;
1340 if (*p && (*p)->offset < lhs_offset + lhs_size)
1341 {
1342 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1343 /* We already know this value is subsequently overwritten with
1344 something else. */
1345 continue;
1346 else
1347 /* Otherwise this is a partial overlap which we cannot
1348 represent. */
1349 break;
1350 }
1351
1352 rhs = get_ssa_def_if_simple_copy (rhs);
1353 n = XALLOCA (struct ipa_known_agg_contents_list);
1354 n->size = lhs_size;
1355 n->offset = lhs_offset;
1356 if (is_gimple_ip_invariant (rhs))
1357 {
1358 n->constant = rhs;
1359 const_count++;
1360 }
1361 else
1362 n->constant = NULL_TREE;
1363 n->next = *p;
1364 *p = n;
1365
1366 item_count++;
1367 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1368 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1369 break;
1370 }
1371
1372 /* Third stage just goes over the list and creates an appropriate vector of
1373 ipa_agg_jf_item structures out of it, of sourse only if there are
1374 any known constants to begin with. */
1375
1376 if (const_count)
1377 {
1378 jfunc->agg.by_ref = by_ref;
1379 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1380 while (list)
1381 {
1382 if (list->constant)
1383 {
1384 struct ipa_agg_jf_item item;
1385 item.offset = list->offset - arg_offset;
1386 item.value = prune_expression_for_jf (list->constant);
1387 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1388 }
1389 list = list->next;
1390 }
1391 }
1392 }
1393
1394 /* Compute jump function for all arguments of callsite CS and insert the
1395 information in the jump_functions array in the ipa_edge_args corresponding
1396 to this callsite. */
1397
1398 static void
1399 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1400 struct cgraph_edge *cs)
1401 {
1402 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1403 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1404 gimple call = cs->call_stmt;
1405 int n, arg_num = gimple_call_num_args (call);
1406
1407 if (arg_num == 0 || args->jump_functions)
1408 return;
1409 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1410
1411 for (n = 0; n < arg_num; n++)
1412 {
1413 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1414 tree arg = gimple_call_arg (call, n);
1415
1416 if (is_gimple_ip_invariant (arg))
1417 ipa_set_jf_constant (jfunc, arg);
1418 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1419 && TREE_CODE (arg) == PARM_DECL)
1420 {
1421 int index = ipa_get_param_decl_index (info, arg);
1422
1423 gcc_assert (index >=0);
1424 /* Aggregate passed by value, check for pass-through, otherwise we
1425 will attempt to fill in aggregate contents later in this
1426 for cycle. */
1427 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1428 {
1429 ipa_set_jf_simple_pass_through (jfunc, index, false);
1430 continue;
1431 }
1432 }
1433 else if (TREE_CODE (arg) == SSA_NAME)
1434 {
1435 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1436 {
1437 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1438 if (index >= 0
1439 && !detect_type_change_ssa (arg, call, jfunc))
1440 {
1441 bool agg_p;
1442 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1443 call, arg);
1444 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1445 }
1446 }
1447 else
1448 {
1449 gimple stmt = SSA_NAME_DEF_STMT (arg);
1450 if (is_gimple_assign (stmt))
1451 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1452 call, stmt, arg);
1453 else if (gimple_code (stmt) == GIMPLE_PHI)
1454 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1455 call, stmt);
1456 }
1457 }
1458 else
1459 compute_known_type_jump_func (arg, jfunc, call);
1460
1461 if ((jfunc->type != IPA_JF_PASS_THROUGH
1462 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1463 && (jfunc->type != IPA_JF_ANCESTOR
1464 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1465 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1466 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1467 determine_known_aggregate_parts (call, arg, jfunc);
1468 }
1469 }
1470
1471 /* Compute jump functions for all edges - both direct and indirect - outgoing
1472 from NODE. Also count the actual arguments in the process. */
1473
1474 static void
1475 ipa_compute_jump_functions (struct cgraph_node *node,
1476 struct param_analysis_info *parms_ainfo)
1477 {
1478 struct cgraph_edge *cs;
1479
1480 for (cs = node->callees; cs; cs = cs->next_callee)
1481 {
1482 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1483 NULL);
1484 /* We do not need to bother analyzing calls to unknown
1485 functions unless they may become known during lto/whopr. */
1486 if (!callee->analyzed && !flag_lto)
1487 continue;
1488 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1489 }
1490
1491 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1492 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1493 }
1494
1495 /* If STMT looks like a statement loading a value from a member pointer formal
1496 parameter, return that parameter and store the offset of the field to
1497 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1498 might be clobbered). If USE_DELTA, then we look for a use of the delta
1499 field rather than the pfn. */
1500
1501 static tree
1502 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1503 HOST_WIDE_INT *offset_p)
1504 {
1505 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1506
1507 if (!gimple_assign_single_p (stmt))
1508 return NULL_TREE;
1509
1510 rhs = gimple_assign_rhs1 (stmt);
1511 if (TREE_CODE (rhs) == COMPONENT_REF)
1512 {
1513 ref_field = TREE_OPERAND (rhs, 1);
1514 rhs = TREE_OPERAND (rhs, 0);
1515 }
1516 else
1517 ref_field = NULL_TREE;
1518 if (TREE_CODE (rhs) != MEM_REF)
1519 return NULL_TREE;
1520 rec = TREE_OPERAND (rhs, 0);
1521 if (TREE_CODE (rec) != ADDR_EXPR)
1522 return NULL_TREE;
1523 rec = TREE_OPERAND (rec, 0);
1524 if (TREE_CODE (rec) != PARM_DECL
1525 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1526 return NULL_TREE;
1527 ref_offset = TREE_OPERAND (rhs, 1);
1528
1529 if (use_delta)
1530 fld = delta_field;
1531 else
1532 fld = ptr_field;
1533 if (offset_p)
1534 *offset_p = int_bit_position (fld);
1535
1536 if (ref_field)
1537 {
1538 if (integer_nonzerop (ref_offset))
1539 return NULL_TREE;
1540 return ref_field == fld ? rec : NULL_TREE;
1541 }
1542 else
1543 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1544 : NULL_TREE;
1545 }
1546
1547 /* Returns true iff T is an SSA_NAME defined by a statement. */
1548
1549 static bool
1550 ipa_is_ssa_with_stmt_def (tree t)
1551 {
1552 if (TREE_CODE (t) == SSA_NAME
1553 && !SSA_NAME_IS_DEFAULT_DEF (t))
1554 return true;
1555 else
1556 return false;
1557 }
1558
1559 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1560 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1561 indirect call graph edge. */
1562
1563 static struct cgraph_edge *
1564 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1565 {
1566 struct cgraph_edge *cs;
1567
1568 cs = cgraph_edge (node, stmt);
1569 cs->indirect_info->param_index = param_index;
1570 cs->indirect_info->offset = 0;
1571 cs->indirect_info->polymorphic = 0;
1572 cs->indirect_info->agg_contents = 0;
1573 return cs;
1574 }
1575
1576 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1577 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1578 intermediate information about each formal parameter. Currently it checks
1579 whether the call calls a pointer that is a formal parameter and if so, the
1580 parameter is marked with the called flag and an indirect call graph edge
1581 describing the call is created. This is very simple for ordinary pointers
1582 represented in SSA but not-so-nice when it comes to member pointers. The
1583 ugly part of this function does nothing more than trying to match the
1584 pattern of such a call. An example of such a pattern is the gimple dump
1585 below, the call is on the last line:
1586
1587 <bb 2>:
1588 f$__delta_5 = f.__delta;
1589 f$__pfn_24 = f.__pfn;
1590
1591 or
1592 <bb 2>:
1593 f$__delta_5 = MEM[(struct *)&f];
1594 f$__pfn_24 = MEM[(struct *)&f + 4B];
1595
1596 and a few lines below:
1597
1598 <bb 5>
1599 D.2496_3 = (int) f$__pfn_24;
1600 D.2497_4 = D.2496_3 & 1;
1601 if (D.2497_4 != 0)
1602 goto <bb 3>;
1603 else
1604 goto <bb 4>;
1605
1606 <bb 6>:
1607 D.2500_7 = (unsigned int) f$__delta_5;
1608 D.2501_8 = &S + D.2500_7;
1609 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1610 D.2503_10 = *D.2502_9;
1611 D.2504_12 = f$__pfn_24 + -1;
1612 D.2505_13 = (unsigned int) D.2504_12;
1613 D.2506_14 = D.2503_10 + D.2505_13;
1614 D.2507_15 = *D.2506_14;
1615 iftmp.11_16 = (String:: *) D.2507_15;
1616
1617 <bb 7>:
1618 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1619 D.2500_19 = (unsigned int) f$__delta_5;
1620 D.2508_20 = &S + D.2500_19;
1621 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1622
1623 Such patterns are results of simple calls to a member pointer:
1624
1625 int doprinting (int (MyString::* f)(int) const)
1626 {
1627 MyString S ("somestring");
1628
1629 return (S.*f)(4);
1630 }
1631
1632 Moreover, the function also looks for called pointers loaded from aggregates
1633 passed by value or reference. */
1634
1635 static void
1636 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1637 struct ipa_node_params *info,
1638 struct param_analysis_info *parms_ainfo,
1639 gimple call, tree target)
1640 {
1641 gimple def;
1642 tree n1, n2;
1643 gimple d1, d2;
1644 tree rec, rec2, cond;
1645 gimple branch;
1646 int index;
1647 basic_block bb, virt_bb, join;
1648 HOST_WIDE_INT offset;
1649 bool by_ref;
1650
1651 if (SSA_NAME_IS_DEFAULT_DEF (target))
1652 {
1653 tree var = SSA_NAME_VAR (target);
1654 index = ipa_get_param_decl_index (info, var);
1655 if (index >= 0)
1656 ipa_note_param_call (node, index, call);
1657 return;
1658 }
1659
1660 def = SSA_NAME_DEF_STMT (target);
1661 if (gimple_assign_single_p (def)
1662 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1663 gimple_assign_rhs1 (def), &index, &offset,
1664 &by_ref))
1665 {
1666 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1667 cs->indirect_info->offset = offset;
1668 cs->indirect_info->agg_contents = 1;
1669 cs->indirect_info->by_ref = by_ref;
1670 return;
1671 }
1672
1673 /* Now we need to try to match the complex pattern of calling a member
1674 pointer. */
1675 if (gimple_code (def) != GIMPLE_PHI
1676 || gimple_phi_num_args (def) != 2
1677 || !POINTER_TYPE_P (TREE_TYPE (target))
1678 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1679 return;
1680
1681 /* First, we need to check whether one of these is a load from a member
1682 pointer that is a parameter to this function. */
1683 n1 = PHI_ARG_DEF (def, 0);
1684 n2 = PHI_ARG_DEF (def, 1);
1685 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1686 return;
1687 d1 = SSA_NAME_DEF_STMT (n1);
1688 d2 = SSA_NAME_DEF_STMT (n2);
1689
1690 join = gimple_bb (def);
1691 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1692 {
1693 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1694 return;
1695
1696 bb = EDGE_PRED (join, 0)->src;
1697 virt_bb = gimple_bb (d2);
1698 }
1699 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1700 {
1701 bb = EDGE_PRED (join, 1)->src;
1702 virt_bb = gimple_bb (d1);
1703 }
1704 else
1705 return;
1706
1707 /* Second, we need to check that the basic blocks are laid out in the way
1708 corresponding to the pattern. */
1709
1710 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1711 || single_pred (virt_bb) != bb
1712 || single_succ (virt_bb) != join)
1713 return;
1714
1715 /* Third, let's see that the branching is done depending on the least
1716 significant bit of the pfn. */
1717
1718 branch = last_stmt (bb);
1719 if (!branch || gimple_code (branch) != GIMPLE_COND)
1720 return;
1721
1722 if ((gimple_cond_code (branch) != NE_EXPR
1723 && gimple_cond_code (branch) != EQ_EXPR)
1724 || !integer_zerop (gimple_cond_rhs (branch)))
1725 return;
1726
1727 cond = gimple_cond_lhs (branch);
1728 if (!ipa_is_ssa_with_stmt_def (cond))
1729 return;
1730
1731 def = SSA_NAME_DEF_STMT (cond);
1732 if (!is_gimple_assign (def)
1733 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1734 || !integer_onep (gimple_assign_rhs2 (def)))
1735 return;
1736
1737 cond = gimple_assign_rhs1 (def);
1738 if (!ipa_is_ssa_with_stmt_def (cond))
1739 return;
1740
1741 def = SSA_NAME_DEF_STMT (cond);
1742
1743 if (is_gimple_assign (def)
1744 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1745 {
1746 cond = gimple_assign_rhs1 (def);
1747 if (!ipa_is_ssa_with_stmt_def (cond))
1748 return;
1749 def = SSA_NAME_DEF_STMT (cond);
1750 }
1751
1752 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1753 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1754 == ptrmemfunc_vbit_in_delta),
1755 NULL);
1756 if (rec != rec2)
1757 return;
1758
1759 index = ipa_get_param_decl_index (info, rec);
1760 if (index >= 0
1761 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1762 {
1763 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1764 cs->indirect_info->offset = offset;
1765 cs->indirect_info->agg_contents = 1;
1766 }
1767
1768 return;
1769 }
1770
1771 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1772 object referenced in the expression is a formal parameter of the caller
1773 (described by INFO), create a call note for the statement. */
1774
1775 static void
1776 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1777 struct ipa_node_params *info, gimple call,
1778 tree target)
1779 {
1780 struct cgraph_edge *cs;
1781 struct cgraph_indirect_call_info *ii;
1782 struct ipa_jump_func jfunc;
1783 tree obj = OBJ_TYPE_REF_OBJECT (target);
1784 int index;
1785 HOST_WIDE_INT anc_offset;
1786
1787 if (!flag_devirtualize)
1788 return;
1789
1790 if (TREE_CODE (obj) != SSA_NAME)
1791 return;
1792
1793 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1794 {
1795 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1796 return;
1797
1798 anc_offset = 0;
1799 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1800 gcc_assert (index >= 0);
1801 if (detect_type_change_ssa (obj, call, &jfunc))
1802 return;
1803 }
1804 else
1805 {
1806 gimple stmt = SSA_NAME_DEF_STMT (obj);
1807 tree expr;
1808
1809 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1810 if (!expr)
1811 return;
1812 index = ipa_get_param_decl_index (info,
1813 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1814 gcc_assert (index >= 0);
1815 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1816 return;
1817 }
1818
1819 cs = ipa_note_param_call (node, index, call);
1820 ii = cs->indirect_info;
1821 ii->offset = anc_offset;
1822 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1823 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1824 ii->polymorphic = 1;
1825 }
1826
1827 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1828 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1829 containing intermediate information about each formal parameter. */
1830
1831 static void
1832 ipa_analyze_call_uses (struct cgraph_node *node,
1833 struct ipa_node_params *info,
1834 struct param_analysis_info *parms_ainfo, gimple call)
1835 {
1836 tree target = gimple_call_fn (call);
1837
1838 if (!target)
1839 return;
1840 if (TREE_CODE (target) == SSA_NAME)
1841 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1842 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1843 ipa_analyze_virtual_call_uses (node, info, call, target);
1844 }
1845
1846
1847 /* Analyze the call statement STMT with respect to formal parameters (described
1848 in INFO) of caller given by NODE. Currently it only checks whether formal
1849 parameters are called. PARMS_AINFO is a pointer to a vector containing
1850 intermediate information about each formal parameter. */
1851
1852 static void
1853 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1854 struct param_analysis_info *parms_ainfo, gimple stmt)
1855 {
1856 if (is_gimple_call (stmt))
1857 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1858 }
1859
1860 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1861 If OP is a parameter declaration, mark it as used in the info structure
1862 passed in DATA. */
1863
1864 static bool
1865 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1866 tree op, void *data)
1867 {
1868 struct ipa_node_params *info = (struct ipa_node_params *) data;
1869
1870 op = get_base_address (op);
1871 if (op
1872 && TREE_CODE (op) == PARM_DECL)
1873 {
1874 int index = ipa_get_param_decl_index (info, op);
1875 gcc_assert (index >= 0);
1876 ipa_set_param_used (info, index, true);
1877 }
1878
1879 return false;
1880 }
1881
1882 /* Scan the function body of NODE and inspect the uses of formal parameters.
1883 Store the findings in various structures of the associated ipa_node_params
1884 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1885 vector containing intermediate information about each formal parameter. */
1886
1887 static void
1888 ipa_analyze_params_uses (struct cgraph_node *node,
1889 struct param_analysis_info *parms_ainfo)
1890 {
1891 tree decl = node->symbol.decl;
1892 basic_block bb;
1893 struct function *func;
1894 gimple_stmt_iterator gsi;
1895 struct ipa_node_params *info = IPA_NODE_REF (node);
1896 int i;
1897
1898 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1899 return;
1900
1901 for (i = 0; i < ipa_get_param_count (info); i++)
1902 {
1903 tree parm = ipa_get_param (info, i);
1904 tree ddef;
1905 /* For SSA regs see if parameter is used. For non-SSA we compute
1906 the flag during modification analysis. */
1907 if (is_gimple_reg (parm)
1908 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1909 parm)) != NULL_TREE
1910 && !has_zero_uses (ddef))
1911 ipa_set_param_used (info, i, true);
1912 }
1913
1914 func = DECL_STRUCT_FUNCTION (decl);
1915 FOR_EACH_BB_FN (bb, func)
1916 {
1917 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1918 {
1919 gimple stmt = gsi_stmt (gsi);
1920
1921 if (is_gimple_debug (stmt))
1922 continue;
1923
1924 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1925 walk_stmt_load_store_addr_ops (stmt, info,
1926 visit_ref_for_mod_analysis,
1927 visit_ref_for_mod_analysis,
1928 visit_ref_for_mod_analysis);
1929 }
1930 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1931 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1932 visit_ref_for_mod_analysis,
1933 visit_ref_for_mod_analysis,
1934 visit_ref_for_mod_analysis);
1935 }
1936
1937 info->uses_analysis_done = 1;
1938 }
1939
1940 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
1941
1942 static void
1943 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
1944 {
1945 int i;
1946
1947 for (i = 0; i < param_count; i++)
1948 {
1949 if (parms_ainfo[i].parm_visited_statements)
1950 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1951 if (parms_ainfo[i].pt_visited_statements)
1952 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1953 }
1954 }
1955
1956 /* Initialize the array describing properties of of formal parameters
1957 of NODE, analyze their uses and compute jump functions associated
1958 with actual arguments of calls from within NODE. */
1959
1960 void
1961 ipa_analyze_node (struct cgraph_node *node)
1962 {
1963 struct ipa_node_params *info;
1964 struct param_analysis_info *parms_ainfo;
1965 int param_count;
1966
1967 ipa_check_create_node_params ();
1968 ipa_check_create_edge_args ();
1969 info = IPA_NODE_REF (node);
1970 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1971 ipa_initialize_node_params (node);
1972
1973 param_count = ipa_get_param_count (info);
1974 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1975 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1976
1977 ipa_analyze_params_uses (node, parms_ainfo);
1978 ipa_compute_jump_functions (node, parms_ainfo);
1979
1980 free_parms_ainfo (parms_ainfo, param_count);
1981 pop_cfun ();
1982 }
1983
1984
1985 /* Update the jump function DST when the call graph edge corresponding to SRC is
1986 is being inlined, knowing that DST is of type ancestor and src of known
1987 type. */
1988
1989 static void
1990 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1991 struct ipa_jump_func *dst)
1992 {
1993 HOST_WIDE_INT combined_offset;
1994 tree combined_type;
1995
1996 combined_offset = ipa_get_jf_known_type_offset (src)
1997 + ipa_get_jf_ancestor_offset (dst);
1998 combined_type = ipa_get_jf_ancestor_type (dst);
1999
2000 ipa_set_jf_known_type (dst, combined_offset,
2001 ipa_get_jf_known_type_base_type (src),
2002 combined_type);
2003 }
2004
2005 /* Update the jump functions associated with call graph edge E when the call
2006 graph edge CS is being inlined, assuming that E->caller is already (possibly
2007 indirectly) inlined into CS->callee and that E has not been inlined. */
2008
2009 static void
2010 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2011 struct cgraph_edge *e)
2012 {
2013 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2014 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2015 int count = ipa_get_cs_argument_count (args);
2016 int i;
2017
2018 for (i = 0; i < count; i++)
2019 {
2020 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2021
2022 if (dst->type == IPA_JF_ANCESTOR)
2023 {
2024 struct ipa_jump_func *src;
2025 int dst_fid = dst->value.ancestor.formal_id;
2026
2027 /* Variable number of arguments can cause havoc if we try to access
2028 one that does not exist in the inlined edge. So make sure we
2029 don't. */
2030 if (dst_fid >= ipa_get_cs_argument_count (top))
2031 {
2032 dst->type = IPA_JF_UNKNOWN;
2033 continue;
2034 }
2035
2036 src = ipa_get_ith_jump_func (top, dst_fid);
2037
2038 if (src->agg.items
2039 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2040 {
2041 struct ipa_agg_jf_item *item;
2042 int j;
2043
2044 /* Currently we do not produce clobber aggregate jump functions,
2045 replace with merging when we do. */
2046 gcc_assert (!dst->agg.items);
2047
2048 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2049 dst->agg.by_ref = src->agg.by_ref;
2050 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2051 item->offset -= dst->value.ancestor.offset;
2052 }
2053
2054 if (src->type == IPA_JF_KNOWN_TYPE)
2055 combine_known_type_and_ancestor_jfs (src, dst);
2056 else if (src->type == IPA_JF_PASS_THROUGH
2057 && src->value.pass_through.operation == NOP_EXPR)
2058 {
2059 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2060 dst->value.ancestor.agg_preserved &=
2061 src->value.pass_through.agg_preserved;
2062 }
2063 else if (src->type == IPA_JF_ANCESTOR)
2064 {
2065 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2066 dst->value.ancestor.offset += src->value.ancestor.offset;
2067 dst->value.ancestor.agg_preserved &=
2068 src->value.ancestor.agg_preserved;
2069 }
2070 else
2071 dst->type = IPA_JF_UNKNOWN;
2072 }
2073 else if (dst->type == IPA_JF_PASS_THROUGH)
2074 {
2075 struct ipa_jump_func *src;
2076 /* We must check range due to calls with variable number of arguments
2077 and we cannot combine jump functions with operations. */
2078 if (dst->value.pass_through.operation == NOP_EXPR
2079 && (dst->value.pass_through.formal_id
2080 < ipa_get_cs_argument_count (top)))
2081 {
2082 bool agg_p;
2083 int dst_fid = dst->value.pass_through.formal_id;
2084 src = ipa_get_ith_jump_func (top, dst_fid);
2085 agg_p = dst->value.pass_through.agg_preserved;
2086
2087 dst->type = src->type;
2088 dst->value = src->value;
2089
2090 if (src->agg.items
2091 && (agg_p || !src->agg.by_ref))
2092 {
2093 /* Currently we do not produce clobber aggregate jump
2094 functions, replace with merging when we do. */
2095 gcc_assert (!dst->agg.items);
2096
2097 dst->agg.by_ref = src->agg.by_ref;
2098 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2099 src->agg.items);
2100 }
2101
2102 if (!agg_p)
2103 {
2104 if (dst->type == IPA_JF_PASS_THROUGH)
2105 dst->value.pass_through.agg_preserved = false;
2106 else if (dst->type == IPA_JF_ANCESTOR)
2107 dst->value.ancestor.agg_preserved = false;
2108 }
2109 }
2110 else
2111 dst->type = IPA_JF_UNKNOWN;
2112 }
2113 }
2114 }
2115
2116 /* If TARGET is an addr_expr of a function declaration, make it the destination
2117 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2118
2119 struct cgraph_edge *
2120 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2121 {
2122 struct cgraph_node *callee;
2123 struct inline_edge_summary *es = inline_edge_summary (ie);
2124
2125 if (TREE_CODE (target) == ADDR_EXPR)
2126 target = TREE_OPERAND (target, 0);
2127 if (TREE_CODE (target) != FUNCTION_DECL)
2128 return NULL;
2129 callee = cgraph_get_node (target);
2130 if (!callee)
2131 return NULL;
2132 ipa_check_create_node_params ();
2133
2134 /* We can not make edges to inline clones. It is bug that someone removed
2135 the cgraph node too early. */
2136 gcc_assert (!callee->global.inlined_to);
2137
2138 cgraph_make_edge_direct (ie, callee);
2139 es = inline_edge_summary (ie);
2140 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2141 - eni_size_weights.call_cost);
2142 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2143 - eni_time_weights.call_cost);
2144 if (dump_file)
2145 {
2146 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2147 "(%s/%i -> %s/%i), for stmt ",
2148 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2149 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2150 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2151 if (ie->call_stmt)
2152 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2153 else
2154 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2155 }
2156 callee = cgraph_function_or_thunk_node (callee, NULL);
2157
2158 return ie;
2159 }
2160
2161 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2162 return NULL if there is not any. BY_REF specifies whether the value has to
2163 be passed by reference or by value. */
2164
2165 tree
2166 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2167 HOST_WIDE_INT offset, bool by_ref)
2168 {
2169 struct ipa_agg_jf_item *item;
2170 int i;
2171
2172 if (by_ref != agg->by_ref)
2173 return NULL;
2174
2175 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2176 if (item->offset == offset)
2177 {
2178 /* Currently we do not have clobber values, return NULL for them once
2179 we do. */
2180 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2181 return item->value;
2182 }
2183 return NULL;
2184 }
2185
2186 /* Try to find a destination for indirect edge IE that corresponds to a simple
2187 call or a call of a member function pointer and where the destination is a
2188 pointer formal parameter described by jump function JFUNC. If it can be
2189 determined, return the newly direct edge, otherwise return NULL. */
2190
2191 static struct cgraph_edge *
2192 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2193 struct ipa_jump_func *jfunc)
2194 {
2195 tree target;
2196
2197 if (ie->indirect_info->agg_contents)
2198 {
2199 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2200 ie->indirect_info->offset,
2201 ie->indirect_info->by_ref);
2202 if (!target)
2203 return NULL;
2204 }
2205 else
2206 {
2207 if (jfunc->type != IPA_JF_CONST)
2208 return NULL;
2209 target = ipa_get_jf_constant (jfunc);
2210 }
2211 return ipa_make_edge_direct_to_target (ie, target);
2212 }
2213
2214 /* Try to find a destination for indirect edge IE that corresponds to a
2215 virtual call based on a formal parameter which is described by jump
2216 function JFUNC and if it can be determined, make it direct and return the
2217 direct edge. Otherwise, return NULL. */
2218
2219 static struct cgraph_edge *
2220 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2221 struct ipa_jump_func *jfunc)
2222 {
2223 tree binfo, target;
2224
2225 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2226 return NULL;
2227
2228 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2229 gcc_checking_assert (binfo);
2230 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2231 + ie->indirect_info->offset,
2232 ie->indirect_info->otr_type);
2233 if (binfo)
2234 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2235 binfo);
2236 else
2237 return NULL;
2238
2239 if (target)
2240 return ipa_make_edge_direct_to_target (ie, target);
2241 else
2242 return NULL;
2243 }
2244
2245 /* Update the param called notes associated with NODE when CS is being inlined,
2246 assuming NODE is (potentially indirectly) inlined into CS->callee.
2247 Moreover, if the callee is discovered to be constant, create a new cgraph
2248 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2249 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2250
2251 static bool
2252 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2253 struct cgraph_node *node,
2254 VEC (cgraph_edge_p, heap) **new_edges)
2255 {
2256 struct ipa_edge_args *top;
2257 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2258 bool res = false;
2259
2260 ipa_check_create_edge_args ();
2261 top = IPA_EDGE_REF (cs);
2262
2263 for (ie = node->indirect_calls; ie; ie = next_ie)
2264 {
2265 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2266 struct ipa_jump_func *jfunc;
2267 int param_index;
2268
2269 next_ie = ie->next_callee;
2270
2271 if (ici->param_index == -1)
2272 continue;
2273
2274 /* We must check range due to calls with variable number of arguments: */
2275 if (ici->param_index >= ipa_get_cs_argument_count (top))
2276 {
2277 ici->param_index = -1;
2278 continue;
2279 }
2280
2281 param_index = ici->param_index;
2282 jfunc = ipa_get_ith_jump_func (top, param_index);
2283 if (jfunc->type == IPA_JF_PASS_THROUGH
2284 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2285 {
2286 if (ici->agg_contents
2287 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2288 ici->param_index = -1;
2289 else
2290 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2291 }
2292 else if (jfunc->type == IPA_JF_ANCESTOR)
2293 {
2294 if (ici->agg_contents
2295 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2296 ici->param_index = -1;
2297 else
2298 {
2299 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2300 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2301 }
2302 }
2303 else
2304 /* Either we can find a destination for this edge now or never. */
2305 ici->param_index = -1;
2306
2307 if (!flag_indirect_inlining)
2308 continue;
2309
2310 if (ici->polymorphic)
2311 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2312 else
2313 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2314
2315 if (new_direct_edge)
2316 {
2317 new_direct_edge->indirect_inlining_edge = 1;
2318 if (new_direct_edge->call_stmt)
2319 new_direct_edge->call_stmt_cannot_inline_p
2320 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2321 new_direct_edge->callee->symbol.decl);
2322 if (new_edges)
2323 {
2324 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2325 new_direct_edge);
2326 top = IPA_EDGE_REF (cs);
2327 res = true;
2328 }
2329 }
2330 }
2331
2332 return res;
2333 }
2334
2335 /* Recursively traverse subtree of NODE (including node) made of inlined
2336 cgraph_edges when CS has been inlined and invoke
2337 update_indirect_edges_after_inlining on all nodes and
2338 update_jump_functions_after_inlining on all non-inlined edges that lead out
2339 of this subtree. Newly discovered indirect edges will be added to
2340 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2341 created. */
2342
2343 static bool
2344 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2345 struct cgraph_node *node,
2346 VEC (cgraph_edge_p, heap) **new_edges)
2347 {
2348 struct cgraph_edge *e;
2349 bool res;
2350
2351 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2352
2353 for (e = node->callees; e; e = e->next_callee)
2354 if (!e->inline_failed)
2355 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2356 else
2357 update_jump_functions_after_inlining (cs, e);
2358 for (e = node->indirect_calls; e; e = e->next_callee)
2359 update_jump_functions_after_inlining (cs, e);
2360
2361 return res;
2362 }
2363
2364 /* Update jump functions and call note functions on inlining the call site CS.
2365 CS is expected to lead to a node already cloned by
2366 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2367 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2368 created. */
2369
2370 bool
2371 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2372 VEC (cgraph_edge_p, heap) **new_edges)
2373 {
2374 bool changed;
2375 /* Do nothing if the preparation phase has not been carried out yet
2376 (i.e. during early inlining). */
2377 if (!ipa_node_params_vector)
2378 return false;
2379 gcc_assert (ipa_edge_args_vector);
2380
2381 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2382
2383 /* We do not keep jump functions of inlined edges up to date. Better to free
2384 them so we do not access them accidentally. */
2385 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2386 return changed;
2387 }
2388
2389 /* Frees all dynamically allocated structures that the argument info points
2390 to. */
2391
2392 void
2393 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2394 {
2395 if (args->jump_functions)
2396 ggc_free (args->jump_functions);
2397
2398 memset (args, 0, sizeof (*args));
2399 }
2400
2401 /* Free all ipa_edge structures. */
2402
2403 void
2404 ipa_free_all_edge_args (void)
2405 {
2406 int i;
2407 struct ipa_edge_args *args;
2408
2409 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2410 ipa_free_edge_args_substructures (args);
2411
2412 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2413 ipa_edge_args_vector = NULL;
2414 }
2415
2416 /* Frees all dynamically allocated structures that the param info points
2417 to. */
2418
2419 void
2420 ipa_free_node_params_substructures (struct ipa_node_params *info)
2421 {
2422 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2423 free (info->lattices);
2424 /* Lattice values and their sources are deallocated with their alocation
2425 pool. */
2426 VEC_free (tree, heap, info->known_vals);
2427 memset (info, 0, sizeof (*info));
2428 }
2429
2430 /* Free all ipa_node_params structures. */
2431
2432 void
2433 ipa_free_all_node_params (void)
2434 {
2435 int i;
2436 struct ipa_node_params *info;
2437
2438 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2439 ipa_free_node_params_substructures (info);
2440
2441 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2442 ipa_node_params_vector = NULL;
2443 }
2444
2445 /* Set the aggregate replacements of NODE to be AGGVALS. */
2446
2447 void
2448 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2449 struct ipa_agg_replacement_value *aggvals)
2450 {
2451 if (VEC_length (ipa_agg_replacement_value_p, ipa_node_agg_replacements)
2452 <= (unsigned) cgraph_max_uid)
2453 VEC_safe_grow_cleared (ipa_agg_replacement_value_p, gc,
2454 ipa_node_agg_replacements, cgraph_max_uid + 1);
2455
2456 VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
2457 node->uid, aggvals);
2458 }
2459
2460 /* Hook that is called by cgraph.c when an edge is removed. */
2461
2462 static void
2463 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2464 {
2465 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2466 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2467 <= (unsigned)cs->uid)
2468 return;
2469 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2470 }
2471
2472 /* Hook that is called by cgraph.c when a node is removed. */
2473
2474 static void
2475 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2476 {
2477 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2478 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2479 > (unsigned)node->uid)
2480 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2481 if (VEC_length (ipa_agg_replacement_value_p, ipa_node_agg_replacements)
2482 > (unsigned)node->uid)
2483 VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
2484 (unsigned)node->uid, NULL);
2485 }
2486
2487 /* Hook that is called by cgraph.c when an edge is duplicated. */
2488
2489 static void
2490 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2491 __attribute__((unused)) void *data)
2492 {
2493 struct ipa_edge_args *old_args, *new_args;
2494 unsigned int i;
2495
2496 ipa_check_create_edge_args ();
2497
2498 old_args = IPA_EDGE_REF (src);
2499 new_args = IPA_EDGE_REF (dst);
2500
2501 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2502 old_args->jump_functions);
2503
2504 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2505 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2506 = VEC_copy (ipa_agg_jf_item_t, gc,
2507 VEC_index (ipa_jump_func_t,
2508 old_args->jump_functions, i).agg.items);
2509 }
2510
2511 /* Hook that is called by cgraph.c when a node is duplicated. */
2512
2513 static void
2514 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2515 ATTRIBUTE_UNUSED void *data)
2516 {
2517 struct ipa_node_params *old_info, *new_info;
2518 struct ipa_agg_replacement_value *old_av, *new_av;
2519
2520 ipa_check_create_node_params ();
2521 old_info = IPA_NODE_REF (src);
2522 new_info = IPA_NODE_REF (dst);
2523
2524 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2525 old_info->descriptors);
2526 new_info->lattices = NULL;
2527 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2528
2529 new_info->uses_analysis_done = old_info->uses_analysis_done;
2530 new_info->node_enqueued = old_info->node_enqueued;
2531
2532 old_av = ipa_get_agg_replacements_for_node (src);
2533 if (!old_av)
2534 return;
2535
2536 new_av = NULL;
2537 while (old_av)
2538 {
2539 struct ipa_agg_replacement_value *v;
2540
2541 v = ggc_alloc_ipa_agg_replacement_value ();
2542 memcpy (v, old_av, sizeof (*v));
2543 v->next = new_av;
2544 new_av = v;
2545 old_av = old_av->next;
2546 }
2547 ipa_set_node_agg_value_chain (dst, new_av);
2548 }
2549
2550
2551 /* Analyze newly added function into callgraph. */
2552
2553 static void
2554 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2555 {
2556 ipa_analyze_node (node);
2557 }
2558
2559 /* Register our cgraph hooks if they are not already there. */
2560
2561 void
2562 ipa_register_cgraph_hooks (void)
2563 {
2564 if (!edge_removal_hook_holder)
2565 edge_removal_hook_holder =
2566 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2567 if (!node_removal_hook_holder)
2568 node_removal_hook_holder =
2569 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2570 if (!edge_duplication_hook_holder)
2571 edge_duplication_hook_holder =
2572 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2573 if (!node_duplication_hook_holder)
2574 node_duplication_hook_holder =
2575 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2576 function_insertion_hook_holder =
2577 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2578 }
2579
2580 /* Unregister our cgraph hooks if they are not already there. */
2581
2582 static void
2583 ipa_unregister_cgraph_hooks (void)
2584 {
2585 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2586 edge_removal_hook_holder = NULL;
2587 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2588 node_removal_hook_holder = NULL;
2589 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2590 edge_duplication_hook_holder = NULL;
2591 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2592 node_duplication_hook_holder = NULL;
2593 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2594 function_insertion_hook_holder = NULL;
2595 }
2596
2597 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2598 longer needed after ipa-cp. */
2599
2600 void
2601 ipa_free_all_structures_after_ipa_cp (void)
2602 {
2603 if (!optimize)
2604 {
2605 ipa_free_all_edge_args ();
2606 ipa_free_all_node_params ();
2607 free_alloc_pool (ipcp_sources_pool);
2608 free_alloc_pool (ipcp_values_pool);
2609 free_alloc_pool (ipcp_agg_lattice_pool);
2610 ipa_unregister_cgraph_hooks ();
2611 }
2612 }
2613
2614 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2615 longer needed after indirect inlining. */
2616
2617 void
2618 ipa_free_all_structures_after_iinln (void)
2619 {
2620 ipa_free_all_edge_args ();
2621 ipa_free_all_node_params ();
2622 ipa_unregister_cgraph_hooks ();
2623 if (ipcp_sources_pool)
2624 free_alloc_pool (ipcp_sources_pool);
2625 if (ipcp_values_pool)
2626 free_alloc_pool (ipcp_values_pool);
2627 if (ipcp_agg_lattice_pool)
2628 free_alloc_pool (ipcp_agg_lattice_pool);
2629 }
2630
2631 /* Print ipa_tree_map data structures of all functions in the
2632 callgraph to F. */
2633
2634 void
2635 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2636 {
2637 int i, count;
2638 tree temp;
2639 struct ipa_node_params *info;
2640
2641 if (!node->analyzed)
2642 return;
2643 info = IPA_NODE_REF (node);
2644 fprintf (f, " function %s parameter descriptors:\n",
2645 cgraph_node_name (node));
2646 count = ipa_get_param_count (info);
2647 for (i = 0; i < count; i++)
2648 {
2649 temp = ipa_get_param (info, i);
2650 if (TREE_CODE (temp) == PARM_DECL)
2651 fprintf (f, " param %d : %s", i,
2652 (DECL_NAME (temp)
2653 ? (*lang_hooks.decl_printable_name) (temp, 2)
2654 : "(unnamed)"));
2655 if (ipa_is_param_used (info, i))
2656 fprintf (f, " used");
2657 fprintf (f, "\n");
2658 }
2659 }
2660
2661 /* Print ipa_tree_map data structures of all functions in the
2662 callgraph to F. */
2663
2664 void
2665 ipa_print_all_params (FILE * f)
2666 {
2667 struct cgraph_node *node;
2668
2669 fprintf (f, "\nFunction parameters:\n");
2670 FOR_EACH_FUNCTION (node)
2671 ipa_print_node_params (f, node);
2672 }
2673
2674 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2675
2676 VEC(tree, heap) *
2677 ipa_get_vector_of_formal_parms (tree fndecl)
2678 {
2679 VEC(tree, heap) *args;
2680 int count;
2681 tree parm;
2682
2683 count = count_formal_params (fndecl);
2684 args = VEC_alloc (tree, heap, count);
2685 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2686 VEC_quick_push (tree, args, parm);
2687
2688 return args;
2689 }
2690
2691 /* Return a heap allocated vector containing types of formal parameters of
2692 function type FNTYPE. */
2693
2694 static inline VEC(tree, heap) *
2695 get_vector_of_formal_parm_types (tree fntype)
2696 {
2697 VEC(tree, heap) *types;
2698 int count = 0;
2699 tree t;
2700
2701 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2702 count++;
2703
2704 types = VEC_alloc (tree, heap, count);
2705 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2706 VEC_quick_push (tree, types, TREE_VALUE (t));
2707
2708 return types;
2709 }
2710
2711 /* Modify the function declaration FNDECL and its type according to the plan in
2712 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2713 to reflect the actual parameters being modified which are determined by the
2714 base_index field. */
2715
2716 void
2717 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2718 const char *synth_parm_prefix)
2719 {
2720 VEC(tree, heap) *oparms, *otypes;
2721 tree orig_type, new_type = NULL;
2722 tree old_arg_types, t, new_arg_types = NULL;
2723 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2724 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2725 tree new_reversed = NULL;
2726 bool care_for_types, last_parm_void;
2727
2728 if (!synth_parm_prefix)
2729 synth_parm_prefix = "SYNTH";
2730
2731 oparms = ipa_get_vector_of_formal_parms (fndecl);
2732 orig_type = TREE_TYPE (fndecl);
2733 old_arg_types = TYPE_ARG_TYPES (orig_type);
2734
2735 /* The following test is an ugly hack, some functions simply don't have any
2736 arguments in their type. This is probably a bug but well... */
2737 care_for_types = (old_arg_types != NULL_TREE);
2738 if (care_for_types)
2739 {
2740 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2741 == void_type_node);
2742 otypes = get_vector_of_formal_parm_types (orig_type);
2743 if (last_parm_void)
2744 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2745 else
2746 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2747 }
2748 else
2749 {
2750 last_parm_void = false;
2751 otypes = NULL;
2752 }
2753
2754 for (i = 0; i < len; i++)
2755 {
2756 struct ipa_parm_adjustment *adj;
2757 gcc_assert (link);
2758
2759 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2760 parm = VEC_index (tree, oparms, adj->base_index);
2761 adj->base = parm;
2762
2763 if (adj->copy_param)
2764 {
2765 if (care_for_types)
2766 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2767 adj->base_index),
2768 new_arg_types);
2769 *link = parm;
2770 link = &DECL_CHAIN (parm);
2771 }
2772 else if (!adj->remove_param)
2773 {
2774 tree new_parm;
2775 tree ptype;
2776
2777 if (adj->by_ref)
2778 ptype = build_pointer_type (adj->type);
2779 else
2780 ptype = adj->type;
2781
2782 if (care_for_types)
2783 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2784
2785 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2786 ptype);
2787 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2788
2789 DECL_ARTIFICIAL (new_parm) = 1;
2790 DECL_ARG_TYPE (new_parm) = ptype;
2791 DECL_CONTEXT (new_parm) = fndecl;
2792 TREE_USED (new_parm) = 1;
2793 DECL_IGNORED_P (new_parm) = 1;
2794 layout_decl (new_parm, 0);
2795
2796 adj->base = parm;
2797 adj->reduction = new_parm;
2798
2799 *link = new_parm;
2800
2801 link = &DECL_CHAIN (new_parm);
2802 }
2803 }
2804
2805 *link = NULL_TREE;
2806
2807 if (care_for_types)
2808 {
2809 new_reversed = nreverse (new_arg_types);
2810 if (last_parm_void)
2811 {
2812 if (new_reversed)
2813 TREE_CHAIN (new_arg_types) = void_list_node;
2814 else
2815 new_reversed = void_list_node;
2816 }
2817 }
2818
2819 /* Use copy_node to preserve as much as possible from original type
2820 (debug info, attribute lists etc.)
2821 Exception is METHOD_TYPEs must have THIS argument.
2822 When we are asked to remove it, we need to build new FUNCTION_TYPE
2823 instead. */
2824 if (TREE_CODE (orig_type) != METHOD_TYPE
2825 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2826 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2827 {
2828 new_type = build_distinct_type_copy (orig_type);
2829 TYPE_ARG_TYPES (new_type) = new_reversed;
2830 }
2831 else
2832 {
2833 new_type
2834 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2835 new_reversed));
2836 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2837 DECL_VINDEX (fndecl) = NULL_TREE;
2838 }
2839
2840 /* When signature changes, we need to clear builtin info. */
2841 if (DECL_BUILT_IN (fndecl))
2842 {
2843 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2844 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2845 }
2846
2847 /* This is a new type, not a copy of an old type. Need to reassociate
2848 variants. We can handle everything except the main variant lazily. */
2849 t = TYPE_MAIN_VARIANT (orig_type);
2850 if (orig_type != t)
2851 {
2852 TYPE_MAIN_VARIANT (new_type) = t;
2853 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2854 TYPE_NEXT_VARIANT (t) = new_type;
2855 }
2856 else
2857 {
2858 TYPE_MAIN_VARIANT (new_type) = new_type;
2859 TYPE_NEXT_VARIANT (new_type) = NULL;
2860 }
2861
2862 TREE_TYPE (fndecl) = new_type;
2863 DECL_VIRTUAL_P (fndecl) = 0;
2864 if (otypes)
2865 VEC_free (tree, heap, otypes);
2866 VEC_free (tree, heap, oparms);
2867 }
2868
2869 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2870 If this is a directly recursive call, CS must be NULL. Otherwise it must
2871 contain the corresponding call graph edge. */
2872
2873 void
2874 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2875 ipa_parm_adjustment_vec adjustments)
2876 {
2877 VEC(tree, heap) *vargs;
2878 VEC(tree, gc) **debug_args = NULL;
2879 gimple new_stmt;
2880 gimple_stmt_iterator gsi;
2881 tree callee_decl;
2882 int i, len;
2883
2884 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2885 vargs = VEC_alloc (tree, heap, len);
2886 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2887
2888 gsi = gsi_for_stmt (stmt);
2889 for (i = 0; i < len; i++)
2890 {
2891 struct ipa_parm_adjustment *adj;
2892
2893 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2894
2895 if (adj->copy_param)
2896 {
2897 tree arg = gimple_call_arg (stmt, adj->base_index);
2898
2899 VEC_quick_push (tree, vargs, arg);
2900 }
2901 else if (!adj->remove_param)
2902 {
2903 tree expr, base, off;
2904 location_t loc;
2905
2906 /* We create a new parameter out of the value of the old one, we can
2907 do the following kind of transformations:
2908
2909 - A scalar passed by reference is converted to a scalar passed by
2910 value. (adj->by_ref is false and the type of the original
2911 actual argument is a pointer to a scalar).
2912
2913 - A part of an aggregate is passed instead of the whole aggregate.
2914 The part can be passed either by value or by reference, this is
2915 determined by value of adj->by_ref. Moreover, the code below
2916 handles both situations when the original aggregate is passed by
2917 value (its type is not a pointer) and when it is passed by
2918 reference (it is a pointer to an aggregate).
2919
2920 When the new argument is passed by reference (adj->by_ref is true)
2921 it must be a part of an aggregate and therefore we form it by
2922 simply taking the address of a reference inside the original
2923 aggregate. */
2924
2925 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2926 base = gimple_call_arg (stmt, adj->base_index);
2927 loc = EXPR_LOCATION (base);
2928
2929 if (TREE_CODE (base) != ADDR_EXPR
2930 && POINTER_TYPE_P (TREE_TYPE (base)))
2931 off = build_int_cst (adj->alias_ptr_type,
2932 adj->offset / BITS_PER_UNIT);
2933 else
2934 {
2935 HOST_WIDE_INT base_offset;
2936 tree prev_base;
2937
2938 if (TREE_CODE (base) == ADDR_EXPR)
2939 base = TREE_OPERAND (base, 0);
2940 prev_base = base;
2941 base = get_addr_base_and_unit_offset (base, &base_offset);
2942 /* Aggregate arguments can have non-invariant addresses. */
2943 if (!base)
2944 {
2945 base = build_fold_addr_expr (prev_base);
2946 off = build_int_cst (adj->alias_ptr_type,
2947 adj->offset / BITS_PER_UNIT);
2948 }
2949 else if (TREE_CODE (base) == MEM_REF)
2950 {
2951 off = build_int_cst (adj->alias_ptr_type,
2952 base_offset
2953 + adj->offset / BITS_PER_UNIT);
2954 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2955 off);
2956 base = TREE_OPERAND (base, 0);
2957 }
2958 else
2959 {
2960 off = build_int_cst (adj->alias_ptr_type,
2961 base_offset
2962 + adj->offset / BITS_PER_UNIT);
2963 base = build_fold_addr_expr (base);
2964 }
2965 }
2966
2967 if (!adj->by_ref)
2968 {
2969 tree type = adj->type;
2970 unsigned int align;
2971 unsigned HOST_WIDE_INT misalign;
2972
2973 get_pointer_alignment_1 (base, &align, &misalign);
2974 misalign += (tree_to_double_int (off)
2975 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2976 * BITS_PER_UNIT);
2977 misalign = misalign & (align - 1);
2978 if (misalign != 0)
2979 align = (misalign & -misalign);
2980 if (align < TYPE_ALIGN (type))
2981 type = build_aligned_type (type, align);
2982 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2983 }
2984 else
2985 {
2986 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2987 expr = build_fold_addr_expr (expr);
2988 }
2989
2990 expr = force_gimple_operand_gsi (&gsi, expr,
2991 adj->by_ref
2992 || is_gimple_reg_type (adj->type),
2993 NULL, true, GSI_SAME_STMT);
2994 VEC_quick_push (tree, vargs, expr);
2995 }
2996 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
2997 {
2998 unsigned int ix;
2999 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3000 gimple def_temp;
3001
3002 arg = gimple_call_arg (stmt, adj->base_index);
3003 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3004 {
3005 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3006 continue;
3007 arg = fold_convert_loc (gimple_location (stmt),
3008 TREE_TYPE (origin), arg);
3009 }
3010 if (debug_args == NULL)
3011 debug_args = decl_debug_args_insert (callee_decl);
3012 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
3013 if (ddecl == origin)
3014 {
3015 ddecl = VEC_index (tree, *debug_args, ix + 1);
3016 break;
3017 }
3018 if (ddecl == NULL)
3019 {
3020 ddecl = make_node (DEBUG_EXPR_DECL);
3021 DECL_ARTIFICIAL (ddecl) = 1;
3022 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3023 DECL_MODE (ddecl) = DECL_MODE (origin);
3024
3025 VEC_safe_push (tree, gc, *debug_args, origin);
3026 VEC_safe_push (tree, gc, *debug_args, ddecl);
3027 }
3028 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
3029 stmt);
3030 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3031 }
3032 }
3033
3034 if (dump_file && (dump_flags & TDF_DETAILS))
3035 {
3036 fprintf (dump_file, "replacing stmt:");
3037 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3038 }
3039
3040 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3041 VEC_free (tree, heap, vargs);
3042 if (gimple_call_lhs (stmt))
3043 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3044
3045 gimple_set_block (new_stmt, gimple_block (stmt));
3046 if (gimple_has_location (stmt))
3047 gimple_set_location (new_stmt, gimple_location (stmt));
3048 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3049 gimple_call_copy_flags (new_stmt, stmt);
3050
3051 if (dump_file && (dump_flags & TDF_DETAILS))
3052 {
3053 fprintf (dump_file, "with stmt:");
3054 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3055 fprintf (dump_file, "\n");
3056 }
3057 gsi_replace (&gsi, new_stmt, true);
3058 if (cs)
3059 cgraph_set_call_stmt (cs, new_stmt);
3060 update_ssa (TODO_update_ssa);
3061 free_dominance_info (CDI_DOMINATORS);
3062 }
3063
3064 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3065
3066 static bool
3067 index_in_adjustments_multiple_times_p (int base_index,
3068 ipa_parm_adjustment_vec adjustments)
3069 {
3070 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3071 bool one = false;
3072
3073 for (i = 0; i < len; i++)
3074 {
3075 struct ipa_parm_adjustment *adj;
3076 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3077
3078 if (adj->base_index == base_index)
3079 {
3080 if (one)
3081 return true;
3082 else
3083 one = true;
3084 }
3085 }
3086 return false;
3087 }
3088
3089
3090 /* Return adjustments that should have the same effect on function parameters
3091 and call arguments as if they were first changed according to adjustments in
3092 INNER and then by adjustments in OUTER. */
3093
3094 ipa_parm_adjustment_vec
3095 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3096 ipa_parm_adjustment_vec outer)
3097 {
3098 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3099 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3100 int removals = 0;
3101 ipa_parm_adjustment_vec adjustments, tmp;
3102
3103 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3104 for (i = 0; i < inlen; i++)
3105 {
3106 struct ipa_parm_adjustment *n;
3107 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3108
3109 if (n->remove_param)
3110 removals++;
3111 else
3112 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3113 }
3114
3115 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3116 for (i = 0; i < outlen; i++)
3117 {
3118 struct ipa_parm_adjustment r;
3119 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3120 outer, i);
3121 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3122 out->base_index);
3123
3124 memset (&r, 0, sizeof (r));
3125 gcc_assert (!in->remove_param);
3126 if (out->remove_param)
3127 {
3128 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3129 {
3130 r.remove_param = true;
3131 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3132 }
3133 continue;
3134 }
3135
3136 r.base_index = in->base_index;
3137 r.type = out->type;
3138
3139 /* FIXME: Create nonlocal value too. */
3140
3141 if (in->copy_param && out->copy_param)
3142 r.copy_param = true;
3143 else if (in->copy_param)
3144 r.offset = out->offset;
3145 else if (out->copy_param)
3146 r.offset = in->offset;
3147 else
3148 r.offset = in->offset + out->offset;
3149 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3150 }
3151
3152 for (i = 0; i < inlen; i++)
3153 {
3154 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3155 inner, i);
3156
3157 if (n->remove_param)
3158 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3159 }
3160
3161 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3162 return adjustments;
3163 }
3164
3165 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3166 friendly way, assuming they are meant to be applied to FNDECL. */
3167
3168 void
3169 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3170 tree fndecl)
3171 {
3172 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3173 bool first = true;
3174 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3175
3176 fprintf (file, "IPA param adjustments: ");
3177 for (i = 0; i < len; i++)
3178 {
3179 struct ipa_parm_adjustment *adj;
3180 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3181
3182 if (!first)
3183 fprintf (file, " ");
3184 else
3185 first = false;
3186
3187 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3188 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3189 if (adj->base)
3190 {
3191 fprintf (file, ", base: ");
3192 print_generic_expr (file, adj->base, 0);
3193 }
3194 if (adj->reduction)
3195 {
3196 fprintf (file, ", reduction: ");
3197 print_generic_expr (file, adj->reduction, 0);
3198 }
3199 if (adj->new_ssa_base)
3200 {
3201 fprintf (file, ", new_ssa_base: ");
3202 print_generic_expr (file, adj->new_ssa_base, 0);
3203 }
3204
3205 if (adj->copy_param)
3206 fprintf (file, ", copy_param");
3207 else if (adj->remove_param)
3208 fprintf (file, ", remove_param");
3209 else
3210 fprintf (file, ", offset %li", (long) adj->offset);
3211 if (adj->by_ref)
3212 fprintf (file, ", by_ref");
3213 print_node_brief (file, ", type: ", adj->type, 0);
3214 fprintf (file, "\n");
3215 }
3216 VEC_free (tree, heap, parms);
3217 }
3218
3219 /* Dump the AV linked list. */
3220
3221 void
3222 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3223 {
3224 bool comma = false;
3225 fprintf (f, " Aggregate replacements:");
3226 for (; av; av = av->next)
3227 {
3228 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3229 av->index, av->offset);
3230 print_generic_expr (f, av->value, 0);
3231 comma = true;
3232 }
3233 fprintf (f, "\n");
3234 }
3235
3236 /* Stream out jump function JUMP_FUNC to OB. */
3237
3238 static void
3239 ipa_write_jump_function (struct output_block *ob,
3240 struct ipa_jump_func *jump_func)
3241 {
3242 struct ipa_agg_jf_item *item;
3243 struct bitpack_d bp;
3244 int i, count;
3245
3246 streamer_write_uhwi (ob, jump_func->type);
3247 switch (jump_func->type)
3248 {
3249 case IPA_JF_UNKNOWN:
3250 break;
3251 case IPA_JF_KNOWN_TYPE:
3252 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3253 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3254 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3255 break;
3256 case IPA_JF_CONST:
3257 gcc_assert (
3258 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3259 stream_write_tree (ob, jump_func->value.constant, true);
3260 break;
3261 case IPA_JF_PASS_THROUGH:
3262 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3263 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3264 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3265 bp = bitpack_create (ob->main_stream);
3266 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3267 streamer_write_bitpack (&bp);
3268 break;
3269 case IPA_JF_ANCESTOR:
3270 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3271 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3272 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3273 bp = bitpack_create (ob->main_stream);
3274 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3275 streamer_write_bitpack (&bp);
3276 break;
3277 }
3278
3279 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3280 streamer_write_uhwi (ob, count);
3281 if (count)
3282 {
3283 bp = bitpack_create (ob->main_stream);
3284 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3285 streamer_write_bitpack (&bp);
3286 }
3287
3288 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3289 {
3290 streamer_write_uhwi (ob, item->offset);
3291 stream_write_tree (ob, item->value, true);
3292 }
3293 }
3294
3295 /* Read in jump function JUMP_FUNC from IB. */
3296
3297 static void
3298 ipa_read_jump_function (struct lto_input_block *ib,
3299 struct ipa_jump_func *jump_func,
3300 struct data_in *data_in)
3301 {
3302 struct bitpack_d bp;
3303 int i, count;
3304
3305 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3306 switch (jump_func->type)
3307 {
3308 case IPA_JF_UNKNOWN:
3309 break;
3310 case IPA_JF_KNOWN_TYPE:
3311 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3312 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3313 jump_func->value.known_type.component_type = stream_read_tree (ib,
3314 data_in);
3315 break;
3316 case IPA_JF_CONST:
3317 jump_func->value.constant = stream_read_tree (ib, data_in);
3318 break;
3319 case IPA_JF_PASS_THROUGH:
3320 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3321 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3322 jump_func->value.pass_through.operation
3323 = (enum tree_code) streamer_read_uhwi (ib);
3324 bp = streamer_read_bitpack (ib);
3325 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3326 break;
3327 case IPA_JF_ANCESTOR:
3328 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3329 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3330 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3331 bp = streamer_read_bitpack (ib);
3332 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3333 break;
3334 }
3335
3336 count = streamer_read_uhwi (ib);
3337 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3338 if (count)
3339 {
3340 bp = streamer_read_bitpack (ib);
3341 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3342 }
3343 for (i = 0; i < count; i++)
3344 {
3345 struct ipa_agg_jf_item item;
3346 item.offset = streamer_read_uhwi (ib);
3347 item.value = stream_read_tree (ib, data_in);
3348 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3349 }
3350 }
3351
3352 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3353 relevant to indirect inlining to OB. */
3354
3355 static void
3356 ipa_write_indirect_edge_info (struct output_block *ob,
3357 struct cgraph_edge *cs)
3358 {
3359 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3360 struct bitpack_d bp;
3361
3362 streamer_write_hwi (ob, ii->param_index);
3363 streamer_write_hwi (ob, ii->offset);
3364 bp = bitpack_create (ob->main_stream);
3365 bp_pack_value (&bp, ii->polymorphic, 1);
3366 bp_pack_value (&bp, ii->agg_contents, 1);
3367 bp_pack_value (&bp, ii->by_ref, 1);
3368 streamer_write_bitpack (&bp);
3369
3370 if (ii->polymorphic)
3371 {
3372 streamer_write_hwi (ob, ii->otr_token);
3373 stream_write_tree (ob, ii->otr_type, true);
3374 }
3375 }
3376
3377 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3378 relevant to indirect inlining from IB. */
3379
3380 static void
3381 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3382 struct data_in *data_in ATTRIBUTE_UNUSED,
3383 struct cgraph_edge *cs)
3384 {
3385 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3386 struct bitpack_d bp;
3387
3388 ii->param_index = (int) streamer_read_hwi (ib);
3389 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3390 bp = streamer_read_bitpack (ib);
3391 ii->polymorphic = bp_unpack_value (&bp, 1);
3392 ii->agg_contents = bp_unpack_value (&bp, 1);
3393 ii->by_ref = bp_unpack_value (&bp, 1);
3394 if (ii->polymorphic)
3395 {
3396 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3397 ii->otr_type = stream_read_tree (ib, data_in);
3398 }
3399 }
3400
3401 /* Stream out NODE info to OB. */
3402
3403 static void
3404 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3405 {
3406 int node_ref;
3407 lto_symtab_encoder_t encoder;
3408 struct ipa_node_params *info = IPA_NODE_REF (node);
3409 int j;
3410 struct cgraph_edge *e;
3411 struct bitpack_d bp;
3412
3413 encoder = ob->decl_state->symtab_node_encoder;
3414 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3415 streamer_write_uhwi (ob, node_ref);
3416
3417 bp = bitpack_create (ob->main_stream);
3418 gcc_assert (info->uses_analysis_done
3419 || ipa_get_param_count (info) == 0);
3420 gcc_assert (!info->node_enqueued);
3421 gcc_assert (!info->ipcp_orig_node);
3422 for (j = 0; j < ipa_get_param_count (info); j++)
3423 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3424 streamer_write_bitpack (&bp);
3425 for (e = node->callees; e; e = e->next_callee)
3426 {
3427 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3428
3429 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3430 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3431 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3432 }
3433 for (e = node->indirect_calls; e; e = e->next_callee)
3434 {
3435 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3436
3437 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3438 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3439 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3440 ipa_write_indirect_edge_info (ob, e);
3441 }
3442 }
3443
3444 /* Stream in NODE info from IB. */
3445
3446 static void
3447 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3448 struct data_in *data_in)
3449 {
3450 struct ipa_node_params *info = IPA_NODE_REF (node);
3451 int k;
3452 struct cgraph_edge *e;
3453 struct bitpack_d bp;
3454
3455 ipa_initialize_node_params (node);
3456
3457 bp = streamer_read_bitpack (ib);
3458 if (ipa_get_param_count (info) != 0)
3459 info->uses_analysis_done = true;
3460 info->node_enqueued = false;
3461 for (k = 0; k < ipa_get_param_count (info); k++)
3462 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3463 for (e = node->callees; e; e = e->next_callee)
3464 {
3465 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3466 int count = streamer_read_uhwi (ib);
3467
3468 if (!count)
3469 continue;
3470 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3471
3472 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3473 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3474 }
3475 for (e = node->indirect_calls; e; e = e->next_callee)
3476 {
3477 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3478 int count = streamer_read_uhwi (ib);
3479
3480 if (count)
3481 {
3482 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3483 count);
3484 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3485 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3486 data_in);
3487 }
3488 ipa_read_indirect_edge_info (ib, data_in, e);
3489 }
3490 }
3491
3492 /* Write jump functions for nodes in SET. */
3493
3494 void
3495 ipa_prop_write_jump_functions (void)
3496 {
3497 struct cgraph_node *node;
3498 struct output_block *ob;
3499 unsigned int count = 0;
3500 lto_symtab_encoder_iterator lsei;
3501 lto_symtab_encoder_t encoder;
3502
3503
3504 if (!ipa_node_params_vector)
3505 return;
3506
3507 ob = create_output_block (LTO_section_jump_functions);
3508 encoder = ob->decl_state->symtab_node_encoder;
3509 ob->cgraph_node = NULL;
3510 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3511 lsei_next_function_in_partition (&lsei))
3512 {
3513 node = lsei_cgraph_node (lsei);
3514 if (cgraph_function_with_gimple_body_p (node)
3515 && IPA_NODE_REF (node) != NULL)
3516 count++;
3517 }
3518
3519 streamer_write_uhwi (ob, count);
3520
3521 /* Process all of the functions. */
3522 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3523 lsei_next_function_in_partition (&lsei))
3524 {
3525 node = lsei_cgraph_node (lsei);
3526 if (cgraph_function_with_gimple_body_p (node)
3527 && IPA_NODE_REF (node) != NULL)
3528 ipa_write_node_info (ob, node);
3529 }
3530 streamer_write_char_stream (ob->main_stream, 0);
3531 produce_asm (ob, NULL);
3532 destroy_output_block (ob);
3533 }
3534
3535 /* Read section in file FILE_DATA of length LEN with data DATA. */
3536
3537 static void
3538 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3539 size_t len)
3540 {
3541 const struct lto_function_header *header =
3542 (const struct lto_function_header *) data;
3543 const int cfg_offset = sizeof (struct lto_function_header);
3544 const int main_offset = cfg_offset + header->cfg_size;
3545 const int string_offset = main_offset + header->main_size;
3546 struct data_in *data_in;
3547 struct lto_input_block ib_main;
3548 unsigned int i;
3549 unsigned int count;
3550
3551 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3552 header->main_size);
3553
3554 data_in =
3555 lto_data_in_create (file_data, (const char *) data + string_offset,
3556 header->string_size, NULL);
3557 count = streamer_read_uhwi (&ib_main);
3558
3559 for (i = 0; i < count; i++)
3560 {
3561 unsigned int index;
3562 struct cgraph_node *node;
3563 lto_symtab_encoder_t encoder;
3564
3565 index = streamer_read_uhwi (&ib_main);
3566 encoder = file_data->symtab_node_encoder;
3567 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3568 gcc_assert (node->analyzed);
3569 ipa_read_node_info (&ib_main, node, data_in);
3570 }
3571 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3572 len);
3573 lto_data_in_delete (data_in);
3574 }
3575
3576 /* Read ipcp jump functions. */
3577
3578 void
3579 ipa_prop_read_jump_functions (void)
3580 {
3581 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3582 struct lto_file_decl_data *file_data;
3583 unsigned int j = 0;
3584
3585 ipa_check_create_node_params ();
3586 ipa_check_create_edge_args ();
3587 ipa_register_cgraph_hooks ();
3588
3589 while ((file_data = file_data_vec[j++]))
3590 {
3591 size_t len;
3592 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3593
3594 if (data)
3595 ipa_prop_read_section (file_data, data, len);
3596 }
3597 }
3598
3599 /* After merging units, we can get mismatch in argument counts.
3600 Also decl merging might've rendered parameter lists obsolete.
3601 Also compute called_with_variable_arg info. */
3602
3603 void
3604 ipa_update_after_lto_read (void)
3605 {
3606 struct cgraph_node *node;
3607
3608 ipa_check_create_node_params ();
3609 ipa_check_create_edge_args ();
3610
3611 FOR_EACH_DEFINED_FUNCTION (node)
3612 if (node->analyzed)
3613 ipa_initialize_node_params (node);
3614 }
3615
3616 void
3617 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
3618 {
3619 int node_ref;
3620 unsigned int count = 0;
3621 lto_symtab_encoder_t encoder;
3622 struct ipa_agg_replacement_value *aggvals, *av;
3623
3624 aggvals = ipa_get_agg_replacements_for_node (node);
3625 encoder = ob->decl_state->symtab_node_encoder;
3626 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3627 streamer_write_uhwi (ob, node_ref);
3628
3629 for (av = aggvals; av; av = av->next)
3630 count++;
3631 streamer_write_uhwi (ob, count);
3632
3633 for (av = aggvals; av; av = av->next)
3634 {
3635 streamer_write_uhwi (ob, av->offset);
3636 streamer_write_uhwi (ob, av->index);
3637 stream_write_tree (ob, av->value, true);
3638 }
3639 }
3640
3641 /* Stream in the aggregate value replacement chain for NODE from IB. */
3642
3643 static void
3644 read_agg_replacement_chain (struct lto_input_block *ib,
3645 struct cgraph_node *node,
3646 struct data_in *data_in)
3647 {
3648 struct ipa_agg_replacement_value *aggvals = NULL;
3649 unsigned int count, i;
3650
3651 count = streamer_read_uhwi (ib);
3652 for (i = 0; i <count; i++)
3653 {
3654 struct ipa_agg_replacement_value *av;
3655
3656 av = ggc_alloc_ipa_agg_replacement_value ();
3657 av->offset = streamer_read_uhwi (ib);
3658 av->index = streamer_read_uhwi (ib);
3659 av->value = stream_read_tree (ib, data_in);
3660 av->next = aggvals;
3661 aggvals = av;
3662 }
3663 ipa_set_node_agg_value_chain (node, aggvals);
3664 }
3665
3666 /* Write all aggregate replacement for nodes in set. */
3667
3668 void
3669 ipa_prop_write_all_agg_replacement (void)
3670 {
3671 struct cgraph_node *node;
3672 struct output_block *ob;
3673 unsigned int count = 0;
3674 lto_symtab_encoder_iterator lsei;
3675 lto_symtab_encoder_t encoder;
3676
3677 if (!ipa_node_agg_replacements)
3678 return;
3679
3680 ob = create_output_block (LTO_section_ipcp_transform);
3681 encoder = ob->decl_state->symtab_node_encoder;
3682 ob->cgraph_node = NULL;
3683 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3684 lsei_next_function_in_partition (&lsei))
3685 {
3686 node = lsei_cgraph_node (lsei);
3687 if (cgraph_function_with_gimple_body_p (node)
3688 && ipa_get_agg_replacements_for_node (node) != NULL)
3689 count++;
3690 }
3691
3692 streamer_write_uhwi (ob, count);
3693
3694 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3695 lsei_next_function_in_partition (&lsei))
3696 {
3697 node = lsei_cgraph_node (lsei);
3698 if (cgraph_function_with_gimple_body_p (node)
3699 && ipa_get_agg_replacements_for_node (node) != NULL)
3700 write_agg_replacement_chain (ob, node);
3701 }
3702 streamer_write_char_stream (ob->main_stream, 0);
3703 produce_asm (ob, NULL);
3704 destroy_output_block (ob);
3705 }
3706
3707 /* Read replacements section in file FILE_DATA of length LEN with data
3708 DATA. */
3709
3710 static void
3711 read_replacements_section (struct lto_file_decl_data *file_data,
3712 const char *data,
3713 size_t len)
3714 {
3715 const struct lto_function_header *header =
3716 (const struct lto_function_header *) data;
3717 const int cfg_offset = sizeof (struct lto_function_header);
3718 const int main_offset = cfg_offset + header->cfg_size;
3719 const int string_offset = main_offset + header->main_size;
3720 struct data_in *data_in;
3721 struct lto_input_block ib_main;
3722 unsigned int i;
3723 unsigned int count;
3724
3725 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3726 header->main_size);
3727
3728 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
3729 header->string_size, NULL);
3730 count = streamer_read_uhwi (&ib_main);
3731
3732 for (i = 0; i < count; i++)
3733 {
3734 unsigned int index;
3735 struct cgraph_node *node;
3736 lto_symtab_encoder_t encoder;
3737
3738 index = streamer_read_uhwi (&ib_main);
3739 encoder = file_data->symtab_node_encoder;
3740 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3741 gcc_assert (node->analyzed);
3742 read_agg_replacement_chain (&ib_main, node, data_in);
3743 }
3744 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3745 len);
3746 lto_data_in_delete (data_in);
3747 }
3748
3749 /* Read IPA-CP aggregate replacements. */
3750
3751 void
3752 ipa_prop_read_all_agg_replacement (void)
3753 {
3754 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3755 struct lto_file_decl_data *file_data;
3756 unsigned int j = 0;
3757
3758 while ((file_data = file_data_vec[j++]))
3759 {
3760 size_t len;
3761 const char *data = lto_get_section_data (file_data,
3762 LTO_section_ipcp_transform,
3763 NULL, &len);
3764 if (data)
3765 read_replacements_section (file_data, data, len);
3766 }
3767 }
3768
3769 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
3770 NODE. */
3771
3772 static void
3773 adjust_agg_replacement_values (struct cgraph_node *node,
3774 struct ipa_agg_replacement_value *aggval)
3775 {
3776 struct ipa_agg_replacement_value *v;
3777 int i, c = 0, d = 0, *adj;
3778
3779 if (!node->clone.combined_args_to_skip)
3780 return;
3781
3782 for (v = aggval; v; v = v->next)
3783 {
3784 gcc_assert (v->index >= 0);
3785 if (c < v->index)
3786 c = v->index;
3787 }
3788 c++;
3789
3790 adj = XALLOCAVEC (int, c);
3791 for (i = 0; i < c; i++)
3792 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
3793 {
3794 adj[i] = -1;
3795 d++;
3796 }
3797 else
3798 adj[i] = i - d;
3799
3800 for (v = aggval; v; v = v->next)
3801 v->index = adj[v->index];
3802 }
3803
3804
3805 /* Function body transformation phase. */
3806
3807 unsigned int
3808 ipcp_transform_function (struct cgraph_node *node)
3809 {
3810 VEC (ipa_param_descriptor_t, heap) *descriptors = NULL;
3811 struct param_analysis_info *parms_ainfo;
3812 struct ipa_agg_replacement_value *aggval;
3813 gimple_stmt_iterator gsi;
3814 basic_block bb;
3815 int param_count;
3816 bool cfg_changed = false, something_changed = false;
3817
3818 gcc_checking_assert (cfun);
3819 gcc_checking_assert (current_function_decl);
3820
3821 if (dump_file)
3822 fprintf (dump_file, "Modification phase of node %s/%i\n",
3823 cgraph_node_name (node), node->uid);
3824
3825 aggval = ipa_get_agg_replacements_for_node (node);
3826 if (!aggval)
3827 return 0;
3828 param_count = count_formal_params (node->symbol.decl);
3829 if (param_count == 0)
3830 return 0;
3831 adjust_agg_replacement_values (node, aggval);
3832 if (dump_file)
3833 ipa_dump_agg_replacement_values (dump_file, aggval);
3834 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
3835 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
3836 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
3837 descriptors, param_count);
3838 ipa_populate_param_decls (node, descriptors);
3839
3840 FOR_EACH_BB (bb)
3841 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3842 {
3843 struct ipa_agg_replacement_value *v;
3844 gimple stmt = gsi_stmt (gsi);
3845 tree rhs, val, t;
3846 HOST_WIDE_INT offset;
3847 int index;
3848 bool by_ref, vce;
3849
3850 if (!gimple_assign_load_p (stmt))
3851 continue;
3852 rhs = gimple_assign_rhs1 (stmt);
3853 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
3854 continue;
3855
3856 vce = false;
3857 t = rhs;
3858 while (handled_component_p (t))
3859 {
3860 /* V_C_E can do things like convert an array of integers to one
3861 bigger integer and similar things we do not handle below. */
3862 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
3863 {
3864 vce = true;
3865 break;
3866 }
3867 t = TREE_OPERAND (t, 0);
3868 }
3869 if (vce)
3870 continue;
3871
3872 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3873 rhs, &index, &offset, &by_ref))
3874 continue;
3875 for (v = aggval; v; v = v->next)
3876 if (v->index == index
3877 && v->offset == offset)
3878 break;
3879 if (!v)
3880 continue;
3881
3882 gcc_checking_assert (is_gimple_ip_invariant (v->value));
3883 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
3884 {
3885 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
3886 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
3887 else if (TYPE_SIZE (TREE_TYPE (rhs))
3888 == TYPE_SIZE (TREE_TYPE (v->value)))
3889 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
3890 else
3891 {
3892 if (dump_file)
3893 {
3894 fprintf (dump_file, " const ");
3895 print_generic_expr (dump_file, v->value, 0);
3896 fprintf (dump_file, " can't be converted to type of ");
3897 print_generic_expr (dump_file, rhs, 0);
3898 fprintf (dump_file, "\n");
3899 }
3900 continue;
3901 }
3902 }
3903 else
3904 val = v->value;
3905
3906 if (dump_file && (dump_flags & TDF_DETAILS))
3907 {
3908 fprintf (dump_file, "Modifying stmt:\n ");
3909 print_gimple_stmt (dump_file, stmt, 0, 0);
3910 }
3911 gimple_assign_set_rhs_from_tree (&gsi, val);
3912 update_stmt (stmt);
3913
3914 if (dump_file && (dump_flags & TDF_DETAILS))
3915 {
3916 fprintf (dump_file, "into:\n ");
3917 print_gimple_stmt (dump_file, stmt, 0, 0);
3918 fprintf (dump_file, "\n");
3919 }
3920
3921 something_changed = true;
3922 if (maybe_clean_eh_stmt (stmt)
3923 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3924 cfg_changed = true;
3925 }
3926
3927 VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
3928 node->uid, NULL);
3929 free_parms_ainfo (parms_ainfo, param_count);
3930 VEC_free (ipa_param_descriptor_t, heap, descriptors);
3931
3932 if (!something_changed)
3933 return 0;
3934 else if (cfg_changed)
3935 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
3936 else
3937 return TODO_update_ssa_only_virtuals;
3938 }