ipa-prop.c (determine_known_aggregate_parts): Skip writes to different declarations...
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "langhooks.h"
26 #include "ggc.h"
27 #include "target.h"
28 #include "cgraph.h"
29 #include "ipa-prop.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
33 #include "ipa-inline.h"
34 #include "gimple.h"
35 #include "flags.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
38 #include "lto-streamer.h"
39 #include "data-streamer.h"
40 #include "tree-streamer.h"
41 #include "params.h"
42
43 /* Intermediate information about a parameter that is only useful during the
44 run of ipa_analyze_node and is not kept afterwards. */
45
46 struct param_analysis_info
47 {
48 bool parm_modified, ref_modified, pt_modified;
49 bitmap parm_visited_statements, pt_visited_statements;
50 };
51
52 /* Vector where the parameter infos are actually stored. */
53 VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
54 /* Vector of known aggregate values in cloned nodes. */
55 VEC (ipa_agg_replacement_value_p, gc) *ipa_node_agg_replacements;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
58
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
61 static struct cgraph_node_hook_list *node_removal_hook_holder;
62 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
63 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
64 static struct cgraph_node_hook_list *function_insertion_hook_holder;
65
66 /* Return index of the formal whose tree is PTREE in function which corresponds
67 to INFO. */
68
69 static int
70 ipa_get_param_decl_index_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
71 tree ptree)
72 {
73 int i, count;
74
75 count = VEC_length (ipa_param_descriptor_t, descriptors);
76 for (i = 0; i < count; i++)
77 if (VEC_index (ipa_param_descriptor_t, descriptors, i).decl == ptree)
78 return i;
79
80 return -1;
81 }
82
83 /* Return index of the formal whose tree is PTREE in function which corresponds
84 to INFO. */
85
86 int
87 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
88 {
89 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
90 }
91
92 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
93 NODE. */
94
95 static void
96 ipa_populate_param_decls (struct cgraph_node *node,
97 VEC (ipa_param_descriptor_t, heap) *descriptors)
98 {
99 tree fndecl;
100 tree fnargs;
101 tree parm;
102 int param_num;
103
104 fndecl = node->symbol.decl;
105 fnargs = DECL_ARGUMENTS (fndecl);
106 param_num = 0;
107 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
108 {
109 VEC_index (ipa_param_descriptor_t, descriptors, param_num).decl = parm;
110 param_num++;
111 }
112 }
113
114 /* Return how many formal parameters FNDECL has. */
115
116 static inline int
117 count_formal_params (tree fndecl)
118 {
119 tree parm;
120 int count = 0;
121
122 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
123 count++;
124
125 return count;
126 }
127
128 /* Initialize the ipa_node_params structure associated with NODE by counting
129 the function parameters, creating the descriptors and populating their
130 param_decls. */
131
132 void
133 ipa_initialize_node_params (struct cgraph_node *node)
134 {
135 struct ipa_node_params *info = IPA_NODE_REF (node);
136
137 if (!info->descriptors)
138 {
139 int param_count;
140
141 param_count = count_formal_params (node->symbol.decl);
142 if (param_count)
143 {
144 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
145 info->descriptors, param_count);
146 ipa_populate_param_decls (node, info->descriptors);
147 }
148 }
149 }
150
151 /* Print the jump functions associated with call graph edge CS to file F. */
152
153 static void
154 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
155 {
156 int i, count;
157
158 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
159 for (i = 0; i < count; i++)
160 {
161 struct ipa_jump_func *jump_func;
162 enum jump_func_type type;
163
164 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
165 type = jump_func->type;
166
167 fprintf (f, " param %d: ", i);
168 if (type == IPA_JF_UNKNOWN)
169 fprintf (f, "UNKNOWN\n");
170 else if (type == IPA_JF_KNOWN_TYPE)
171 {
172 fprintf (f, "KNOWN TYPE: base ");
173 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
174 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
175 jump_func->value.known_type.offset);
176 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
177 fprintf (f, "\n");
178 }
179 else if (type == IPA_JF_CONST)
180 {
181 tree val = jump_func->value.constant;
182 fprintf (f, "CONST: ");
183 print_generic_expr (f, val, 0);
184 if (TREE_CODE (val) == ADDR_EXPR
185 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
186 {
187 fprintf (f, " -> ");
188 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
189 0);
190 }
191 fprintf (f, "\n");
192 }
193 else if (type == IPA_JF_PASS_THROUGH)
194 {
195 fprintf (f, "PASS THROUGH: ");
196 fprintf (f, "%d, op %s",
197 jump_func->value.pass_through.formal_id,
198 tree_code_name[(int)
199 jump_func->value.pass_through.operation]);
200 if (jump_func->value.pass_through.operation != NOP_EXPR)
201 {
202 fprintf (f, " ");
203 print_generic_expr (f,
204 jump_func->value.pass_through.operand, 0);
205 }
206 if (jump_func->value.pass_through.agg_preserved)
207 fprintf (f, ", agg_preserved");
208 fprintf (f, "\n");
209 }
210 else if (type == IPA_JF_ANCESTOR)
211 {
212 fprintf (f, "ANCESTOR: ");
213 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
214 jump_func->value.ancestor.formal_id,
215 jump_func->value.ancestor.offset);
216 print_generic_expr (f, jump_func->value.ancestor.type, 0);
217 if (jump_func->value.ancestor.agg_preserved)
218 fprintf (f, ", agg_preserved");
219 fprintf (f, "\n");
220 }
221
222 if (jump_func->agg.items)
223 {
224 struct ipa_agg_jf_item *item;
225 int j;
226
227 fprintf (f, " Aggregate passed by %s:\n",
228 jump_func->agg.by_ref ? "reference" : "value");
229 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
230 j, item)
231 {
232 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
233 item->offset);
234 if (TYPE_P (item->value))
235 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
236 tree_low_cst (TYPE_SIZE (item->value), 1));
237 else
238 {
239 fprintf (f, "cst: ");
240 print_generic_expr (f, item->value, 0);
241 }
242 fprintf (f, "\n");
243 }
244 }
245 }
246 }
247
248
249 /* Print the jump functions of all arguments on all call graph edges going from
250 NODE to file F. */
251
252 void
253 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
254 {
255 struct cgraph_edge *cs;
256 int i;
257
258 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
259 for (cs = node->callees; cs; cs = cs->next_callee)
260 {
261 if (!ipa_edge_args_info_available_for_edge_p (cs))
262 continue;
263
264 fprintf (f, " callsite %s/%i -> %s/%i : \n",
265 xstrdup (cgraph_node_name (node)), node->uid,
266 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
267 ipa_print_node_jump_functions_for_edge (f, cs);
268 }
269
270 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
271 {
272 if (!ipa_edge_args_info_available_for_edge_p (cs))
273 continue;
274
275 if (cs->call_stmt)
276 {
277 fprintf (f, " indirect callsite %d for stmt ", i);
278 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
279 }
280 else
281 fprintf (f, " indirect callsite %d :\n", i);
282 ipa_print_node_jump_functions_for_edge (f, cs);
283
284 }
285 }
286
287 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
288
289 void
290 ipa_print_all_jump_functions (FILE *f)
291 {
292 struct cgraph_node *node;
293
294 fprintf (f, "\nJump functions:\n");
295 FOR_EACH_FUNCTION (node)
296 {
297 ipa_print_node_jump_functions (f, node);
298 }
299 }
300
301 /* Worker for prune_expression_for_jf. */
302
303 static tree
304 prune_expression_for_jf_1 (tree *tp, int *walk_subtrees, void *)
305 {
306 if (EXPR_P (*tp))
307 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
308 else
309 *walk_subtrees = 0;
310 return NULL_TREE;
311 }
312
313 /* Return the expression tree EXPR unshared and with location stripped off. */
314
315 static tree
316 prune_expression_for_jf (tree exp)
317 {
318 if (EXPR_P (exp))
319 {
320 exp = unshare_expr (exp);
321 walk_tree (&exp, prune_expression_for_jf_1, NULL, NULL);
322 }
323 return exp;
324 }
325
326 /* Set JFUNC to be a known type jump function. */
327
328 static void
329 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
330 tree base_type, tree component_type)
331 {
332 jfunc->type = IPA_JF_KNOWN_TYPE;
333 jfunc->value.known_type.offset = offset,
334 jfunc->value.known_type.base_type = base_type;
335 jfunc->value.known_type.component_type = component_type;
336 }
337
338 /* Set JFUNC to be a constant jmp function. */
339
340 static void
341 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
342 {
343 constant = unshare_expr (constant);
344 if (constant && EXPR_P (constant))
345 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
346 jfunc->type = IPA_JF_CONST;
347 jfunc->value.constant = prune_expression_for_jf (constant);
348 }
349
350 /* Set JFUNC to be a simple pass-through jump function. */
351 static void
352 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
353 bool agg_preserved)
354 {
355 jfunc->type = IPA_JF_PASS_THROUGH;
356 jfunc->value.pass_through.operand = NULL_TREE;
357 jfunc->value.pass_through.formal_id = formal_id;
358 jfunc->value.pass_through.operation = NOP_EXPR;
359 jfunc->value.pass_through.agg_preserved = agg_preserved;
360 }
361
362 /* Set JFUNC to be an arithmetic pass through jump function. */
363
364 static void
365 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
366 tree operand, enum tree_code operation)
367 {
368 jfunc->type = IPA_JF_PASS_THROUGH;
369 jfunc->value.pass_through.operand = prune_expression_for_jf (operand);
370 jfunc->value.pass_through.formal_id = formal_id;
371 jfunc->value.pass_through.operation = operation;
372 jfunc->value.pass_through.agg_preserved = false;
373 }
374
375 /* Set JFUNC to be an ancestor jump function. */
376
377 static void
378 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
379 tree type, int formal_id, bool agg_preserved)
380 {
381 jfunc->type = IPA_JF_ANCESTOR;
382 jfunc->value.ancestor.formal_id = formal_id;
383 jfunc->value.ancestor.offset = offset;
384 jfunc->value.ancestor.type = type;
385 jfunc->value.ancestor.agg_preserved = agg_preserved;
386 }
387
388 /* Structure to be passed in between detect_type_change and
389 check_stmt_for_type_change. */
390
391 struct type_change_info
392 {
393 /* Offset into the object where there is the virtual method pointer we are
394 looking for. */
395 HOST_WIDE_INT offset;
396 /* The declaration or SSA_NAME pointer of the base that we are checking for
397 type change. */
398 tree object;
399 /* If we actually can tell the type that the object has changed to, it is
400 stored in this field. Otherwise it remains NULL_TREE. */
401 tree known_current_type;
402 /* Set to true if dynamic type change has been detected. */
403 bool type_maybe_changed;
404 /* Set to true if multiple types have been encountered. known_current_type
405 must be disregarded in that case. */
406 bool multiple_types_encountered;
407 };
408
409 /* Return true if STMT can modify a virtual method table pointer.
410
411 This function makes special assumptions about both constructors and
412 destructors which are all the functions that are allowed to alter the VMT
413 pointers. It assumes that destructors begin with assignment into all VMT
414 pointers and that constructors essentially look in the following way:
415
416 1) The very first thing they do is that they call constructors of ancestor
417 sub-objects that have them.
418
419 2) Then VMT pointers of this and all its ancestors is set to new values
420 corresponding to the type corresponding to the constructor.
421
422 3) Only afterwards, other stuff such as constructor of member sub-objects
423 and the code written by the user is run. Only this may include calling
424 virtual functions, directly or indirectly.
425
426 There is no way to call a constructor of an ancestor sub-object in any
427 other way.
428
429 This means that we do not have to care whether constructors get the correct
430 type information because they will always change it (in fact, if we define
431 the type to be given by the VMT pointer, it is undefined).
432
433 The most important fact to derive from the above is that if, for some
434 statement in the section 3, we try to detect whether the dynamic type has
435 changed, we can safely ignore all calls as we examine the function body
436 backwards until we reach statements in section 2 because these calls cannot
437 be ancestor constructors or destructors (if the input is not bogus) and so
438 do not change the dynamic type (this holds true only for automatically
439 allocated objects but at the moment we devirtualize only these). We then
440 must detect that statements in section 2 change the dynamic type and can try
441 to derive the new type. That is enough and we can stop, we will never see
442 the calls into constructors of sub-objects in this code. Therefore we can
443 safely ignore all call statements that we traverse.
444 */
445
446 static bool
447 stmt_may_be_vtbl_ptr_store (gimple stmt)
448 {
449 if (is_gimple_call (stmt))
450 return false;
451 else if (is_gimple_assign (stmt))
452 {
453 tree lhs = gimple_assign_lhs (stmt);
454
455 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
456 {
457 if (flag_strict_aliasing
458 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
459 return false;
460
461 if (TREE_CODE (lhs) == COMPONENT_REF
462 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
463 return false;
464 /* In the future we might want to use get_base_ref_and_offset to find
465 if there is a field corresponding to the offset and if so, proceed
466 almost like if it was a component ref. */
467 }
468 }
469 return true;
470 }
471
472 /* If STMT can be proved to be an assignment to the virtual method table
473 pointer of ANALYZED_OBJ and the type associated with the new table
474 identified, return the type. Otherwise return NULL_TREE. */
475
476 static tree
477 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
478 {
479 HOST_WIDE_INT offset, size, max_size;
480 tree lhs, rhs, base;
481
482 if (!gimple_assign_single_p (stmt))
483 return NULL_TREE;
484
485 lhs = gimple_assign_lhs (stmt);
486 rhs = gimple_assign_rhs1 (stmt);
487 if (TREE_CODE (lhs) != COMPONENT_REF
488 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
489 || TREE_CODE (rhs) != ADDR_EXPR)
490 return NULL_TREE;
491 rhs = get_base_address (TREE_OPERAND (rhs, 0));
492 if (!rhs
493 || TREE_CODE (rhs) != VAR_DECL
494 || !DECL_VIRTUAL_P (rhs))
495 return NULL_TREE;
496
497 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
498 if (offset != tci->offset
499 || size != POINTER_SIZE
500 || max_size != POINTER_SIZE)
501 return NULL_TREE;
502 if (TREE_CODE (base) == MEM_REF)
503 {
504 if (TREE_CODE (tci->object) != MEM_REF
505 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
506 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
507 TREE_OPERAND (base, 1)))
508 return NULL_TREE;
509 }
510 else if (tci->object != base)
511 return NULL_TREE;
512
513 return DECL_CONTEXT (rhs);
514 }
515
516 /* Callback of walk_aliased_vdefs and a helper function for
517 detect_type_change to check whether a particular statement may modify
518 the virtual table pointer, and if possible also determine the new type of
519 the (sub-)object. It stores its result into DATA, which points to a
520 type_change_info structure. */
521
522 static bool
523 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
524 {
525 gimple stmt = SSA_NAME_DEF_STMT (vdef);
526 struct type_change_info *tci = (struct type_change_info *) data;
527
528 if (stmt_may_be_vtbl_ptr_store (stmt))
529 {
530 tree type;
531 type = extr_type_from_vtbl_ptr_store (stmt, tci);
532 if (tci->type_maybe_changed
533 && type != tci->known_current_type)
534 tci->multiple_types_encountered = true;
535 tci->known_current_type = type;
536 tci->type_maybe_changed = true;
537 return true;
538 }
539 else
540 return false;
541 }
542
543
544
545 /* Like detect_type_change but with extra argument COMP_TYPE which will become
546 the component type part of new JFUNC of dynamic type change is detected and
547 the new base type is identified. */
548
549 static bool
550 detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
551 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
552 {
553 struct type_change_info tci;
554 ao_ref ao;
555
556 gcc_checking_assert (DECL_P (arg)
557 || TREE_CODE (arg) == MEM_REF
558 || handled_component_p (arg));
559 /* Const calls cannot call virtual methods through VMT and so type changes do
560 not matter. */
561 if (!flag_devirtualize || !gimple_vuse (call))
562 return false;
563
564 ao_ref_init (&ao, arg);
565 ao.base = base;
566 ao.offset = offset;
567 ao.size = POINTER_SIZE;
568 ao.max_size = ao.size;
569
570 tci.offset = offset;
571 tci.object = get_base_address (arg);
572 tci.known_current_type = NULL_TREE;
573 tci.type_maybe_changed = false;
574 tci.multiple_types_encountered = false;
575
576 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
577 &tci, NULL);
578 if (!tci.type_maybe_changed)
579 return false;
580
581 if (!tci.known_current_type
582 || tci.multiple_types_encountered
583 || offset != 0)
584 jfunc->type = IPA_JF_UNKNOWN;
585 else
586 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
587
588 return true;
589 }
590
591 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
592 looking for assignments to its virtual table pointer. If it is, return true
593 and fill in the jump function JFUNC with relevant type information or set it
594 to unknown. ARG is the object itself (not a pointer to it, unless
595 dereferenced). BASE is the base of the memory access as returned by
596 get_ref_base_and_extent, as is the offset. */
597
598 static bool
599 detect_type_change (tree arg, tree base, gimple call,
600 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
601 {
602 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
603 }
604
605 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
606 SSA name (its dereference will become the base and the offset is assumed to
607 be zero). */
608
609 static bool
610 detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
611 {
612 tree comp_type;
613
614 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
615 if (!flag_devirtualize
616 || !POINTER_TYPE_P (TREE_TYPE (arg))
617 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
618 return false;
619
620 comp_type = TREE_TYPE (TREE_TYPE (arg));
621 arg = build2 (MEM_REF, ptr_type_node, arg,
622 build_int_cst (ptr_type_node, 0));
623
624 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
625 }
626
627 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
628 boolean variable pointed to by DATA. */
629
630 static bool
631 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
632 void *data)
633 {
634 bool *b = (bool *) data;
635 *b = true;
636 return true;
637 }
638
639 /* Return true if a load from a formal parameter PARM_LOAD is known to retreive
640 a value known not to be modified in this function before reaching the
641 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
642 information about the parameter. */
643
644 static bool
645 parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
646 gimple stmt, tree parm_load)
647 {
648 bool modified = false;
649 bitmap *visited_stmts;
650 ao_ref refd;
651
652 if (parm_ainfo && parm_ainfo->parm_modified)
653 return false;
654
655 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
656 ao_ref_init (&refd, parm_load);
657 /* We can cache visited statements only when parm_ainfo is available and when
658 we are looking at a naked load of the whole parameter. */
659 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
660 visited_stmts = NULL;
661 else
662 visited_stmts = &parm_ainfo->parm_visited_statements;
663 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
664 visited_stmts);
665 if (parm_ainfo && modified)
666 parm_ainfo->parm_modified = true;
667 return !modified;
668 }
669
670 /* If STMT is an assignment that loads a value from an parameter declaration,
671 return the index of the parameter in ipa_node_params which has not been
672 modified. Otherwise return -1. */
673
674 static int
675 load_from_unmodified_param (VEC (ipa_param_descriptor_t, heap) *descriptors,
676 struct param_analysis_info *parms_ainfo,
677 gimple stmt)
678 {
679 int index;
680 tree op1;
681
682 if (!gimple_assign_single_p (stmt))
683 return -1;
684
685 op1 = gimple_assign_rhs1 (stmt);
686 if (TREE_CODE (op1) != PARM_DECL)
687 return -1;
688
689 index = ipa_get_param_decl_index_1 (descriptors, op1);
690 if (index < 0
691 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
692 : NULL, stmt, op1))
693 return -1;
694
695 return index;
696 }
697
698 /* Return true if memory reference REF loads data that are known to be
699 unmodified in this function before reaching statement STMT. PARM_AINFO, if
700 non-NULL, is a pointer to a structure containing temporary information about
701 PARM. */
702
703 static bool
704 parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
705 gimple stmt, tree ref)
706 {
707 bool modified = false;
708 ao_ref refd;
709
710 gcc_checking_assert (gimple_vuse (stmt));
711 if (parm_ainfo && parm_ainfo->ref_modified)
712 return false;
713
714 ao_ref_init (&refd, ref);
715 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
716 NULL);
717 if (parm_ainfo && modified)
718 parm_ainfo->ref_modified = true;
719 return !modified;
720 }
721
722 /* Return true if the data pointed to by PARM is known to be unmodified in this
723 function before reaching call statement CALL into which it is passed.
724 PARM_AINFO is a pointer to a structure containing temporary information
725 about PARM. */
726
727 static bool
728 parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
729 gimple call, tree parm)
730 {
731 bool modified = false;
732 ao_ref refd;
733
734 /* It's unnecessary to calculate anything about memory contnets for a const
735 function because it is not goin to use it. But do not cache the result
736 either. Also, no such calculations for non-pointers. */
737 if (!gimple_vuse (call)
738 || !POINTER_TYPE_P (TREE_TYPE (parm)))
739 return false;
740
741 if (parm_ainfo->pt_modified)
742 return false;
743
744 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
745 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
746 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
747 if (modified)
748 parm_ainfo->pt_modified = true;
749 return !modified;
750 }
751
752 /* Return true if we can prove that OP is a memory reference loading unmodified
753 data from an aggregate passed as a parameter and if the aggregate is passed
754 by reference, that the alias type of the load corresponds to the type of the
755 formal parameter (so that we can rely on this type for TBAA in callers).
756 INFO and PARMS_AINFO describe parameters of the current function (but the
757 latter can be NULL), STMT is the load statement. If function returns true,
758 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
759 within the aggregate and whether it is a load from a value passed by
760 reference respectively. */
761
762 static bool
763 ipa_load_from_parm_agg_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
764 struct param_analysis_info *parms_ainfo, gimple stmt,
765 tree op, int *index_p, HOST_WIDE_INT *offset_p,
766 bool *by_ref_p)
767 {
768 int index;
769 HOST_WIDE_INT size, max_size;
770 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
771
772 if (max_size == -1 || max_size != size || *offset_p < 0)
773 return false;
774
775 if (DECL_P (base))
776 {
777 int index = ipa_get_param_decl_index_1 (descriptors, base);
778 if (index >= 0
779 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
780 : NULL, stmt, op))
781 {
782 *index_p = index;
783 *by_ref_p = false;
784 return true;
785 }
786 return false;
787 }
788
789 if (TREE_CODE (base) != MEM_REF
790 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
791 || !integer_zerop (TREE_OPERAND (base, 1)))
792 return false;
793
794 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
795 {
796 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
797 index = ipa_get_param_decl_index_1 (descriptors, parm);
798 }
799 else
800 {
801 /* This branch catches situations where a pointer parameter is not a
802 gimple register, for example:
803
804 void hip7(S*) (struct S * p)
805 {
806 void (*<T2e4>) (struct S *) D.1867;
807 struct S * p.1;
808
809 <bb 2>:
810 p.1_1 = p;
811 D.1867_2 = p.1_1->f;
812 D.1867_2 ();
813 gdp = &p;
814 */
815
816 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
817 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
818 }
819
820 if (index >= 0
821 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
822 stmt, op))
823 {
824 *index_p = index;
825 *by_ref_p = true;
826 return true;
827 }
828 return false;
829 }
830
831 /* Just like the previous function, just without the param_analysis_info
832 pointer, for users outside of this file. */
833
834 bool
835 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
836 tree op, int *index_p, HOST_WIDE_INT *offset_p,
837 bool *by_ref_p)
838 {
839 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
840 offset_p, by_ref_p);
841 }
842
843 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
844 of an assignment statement STMT, try to determine whether we are actually
845 handling any of the following cases and construct an appropriate jump
846 function into JFUNC if so:
847
848 1) The passed value is loaded from a formal parameter which is not a gimple
849 register (most probably because it is addressable, the value has to be
850 scalar) and we can guarantee the value has not changed. This case can
851 therefore be described by a simple pass-through jump function. For example:
852
853 foo (int a)
854 {
855 int a.0;
856
857 a.0_2 = a;
858 bar (a.0_2);
859
860 2) The passed value can be described by a simple arithmetic pass-through
861 jump function. E.g.
862
863 foo (int a)
864 {
865 int D.2064;
866
867 D.2064_4 = a.1(D) + 4;
868 bar (D.2064_4);
869
870 This case can also occur in combination of the previous one, e.g.:
871
872 foo (int a, int z)
873 {
874 int a.0;
875 int D.2064;
876
877 a.0_3 = a;
878 D.2064_4 = a.0_3 + 4;
879 foo (D.2064_4);
880
881 3) The passed value is an address of an object within another one (which
882 also passed by reference). Such situations are described by an ancestor
883 jump function and describe situations such as:
884
885 B::foo() (struct B * const this)
886 {
887 struct A * D.1845;
888
889 D.1845_2 = &this_1(D)->D.1748;
890 A::bar (D.1845_2);
891
892 INFO is the structure describing individual parameters access different
893 stages of IPA optimizations. PARMS_AINFO contains the information that is
894 only needed for intraprocedural analysis. */
895
896 static void
897 compute_complex_assign_jump_func (struct ipa_node_params *info,
898 struct param_analysis_info *parms_ainfo,
899 struct ipa_jump_func *jfunc,
900 gimple call, gimple stmt, tree name)
901 {
902 HOST_WIDE_INT offset, size, max_size;
903 tree op1, tc_ssa, base, ssa;
904 int index;
905
906 op1 = gimple_assign_rhs1 (stmt);
907
908 if (TREE_CODE (op1) == SSA_NAME)
909 {
910 if (SSA_NAME_IS_DEFAULT_DEF (op1))
911 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
912 else
913 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
914 SSA_NAME_DEF_STMT (op1));
915 tc_ssa = op1;
916 }
917 else
918 {
919 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
920 tc_ssa = gimple_assign_lhs (stmt);
921 }
922
923 if (index >= 0)
924 {
925 tree op2 = gimple_assign_rhs2 (stmt);
926
927 if (op2)
928 {
929 if (!is_gimple_ip_invariant (op2)
930 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
931 && !useless_type_conversion_p (TREE_TYPE (name),
932 TREE_TYPE (op1))))
933 return;
934
935 ipa_set_jf_arith_pass_through (jfunc, index, op2,
936 gimple_assign_rhs_code (stmt));
937 }
938 else if (gimple_assign_single_p (stmt)
939 && !detect_type_change_ssa (tc_ssa, call, jfunc))
940 {
941 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
942 call, tc_ssa);
943 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
944 }
945 return;
946 }
947
948 if (TREE_CODE (op1) != ADDR_EXPR)
949 return;
950 op1 = TREE_OPERAND (op1, 0);
951 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
952 return;
953 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
954 if (TREE_CODE (base) != MEM_REF
955 /* If this is a varying address, punt. */
956 || max_size == -1
957 || max_size != size)
958 return;
959 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
960 ssa = TREE_OPERAND (base, 0);
961 if (TREE_CODE (ssa) != SSA_NAME
962 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
963 || offset < 0)
964 return;
965
966 /* Dynamic types are changed only in constructors and destructors and */
967 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
968 if (index >= 0
969 && !detect_type_change (op1, base, call, jfunc, offset))
970 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
971 parm_ref_data_pass_through_p (&parms_ainfo[index],
972 call, ssa));
973 }
974
975 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
976 it looks like:
977
978 iftmp.1_3 = &obj_2(D)->D.1762;
979
980 The base of the MEM_REF must be a default definition SSA NAME of a
981 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
982 whole MEM_REF expression is returned and the offset calculated from any
983 handled components and the MEM_REF itself is stored into *OFFSET. The whole
984 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
985
986 static tree
987 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
988 {
989 HOST_WIDE_INT size, max_size;
990 tree expr, parm, obj;
991
992 if (!gimple_assign_single_p (assign))
993 return NULL_TREE;
994 expr = gimple_assign_rhs1 (assign);
995
996 if (TREE_CODE (expr) != ADDR_EXPR)
997 return NULL_TREE;
998 expr = TREE_OPERAND (expr, 0);
999 obj = expr;
1000 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1001
1002 if (TREE_CODE (expr) != MEM_REF
1003 /* If this is a varying address, punt. */
1004 || max_size == -1
1005 || max_size != size
1006 || *offset < 0)
1007 return NULL_TREE;
1008 parm = TREE_OPERAND (expr, 0);
1009 if (TREE_CODE (parm) != SSA_NAME
1010 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1011 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1012 return NULL_TREE;
1013
1014 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1015 *obj_p = obj;
1016 return expr;
1017 }
1018
1019
1020 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1021 statement PHI, try to find out whether NAME is in fact a
1022 multiple-inheritance typecast from a descendant into an ancestor of a formal
1023 parameter and thus can be described by an ancestor jump function and if so,
1024 write the appropriate function into JFUNC.
1025
1026 Essentially we want to match the following pattern:
1027
1028 if (obj_2(D) != 0B)
1029 goto <bb 3>;
1030 else
1031 goto <bb 4>;
1032
1033 <bb 3>:
1034 iftmp.1_3 = &obj_2(D)->D.1762;
1035
1036 <bb 4>:
1037 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1038 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1039 return D.1879_6; */
1040
1041 static void
1042 compute_complex_ancestor_jump_func (struct ipa_node_params *info,
1043 struct param_analysis_info *parms_ainfo,
1044 struct ipa_jump_func *jfunc,
1045 gimple call, gimple phi)
1046 {
1047 HOST_WIDE_INT offset;
1048 gimple assign, cond;
1049 basic_block phi_bb, assign_bb, cond_bb;
1050 tree tmp, parm, expr, obj;
1051 int index, i;
1052
1053 if (gimple_phi_num_args (phi) != 2)
1054 return;
1055
1056 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1057 tmp = PHI_ARG_DEF (phi, 0);
1058 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1059 tmp = PHI_ARG_DEF (phi, 1);
1060 else
1061 return;
1062 if (TREE_CODE (tmp) != SSA_NAME
1063 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1064 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1065 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1066 return;
1067
1068 assign = SSA_NAME_DEF_STMT (tmp);
1069 assign_bb = gimple_bb (assign);
1070 if (!single_pred_p (assign_bb))
1071 return;
1072 expr = get_ancestor_addr_info (assign, &obj, &offset);
1073 if (!expr)
1074 return;
1075 parm = TREE_OPERAND (expr, 0);
1076 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1077 gcc_assert (index >= 0);
1078
1079 cond_bb = single_pred (assign_bb);
1080 cond = last_stmt (cond_bb);
1081 if (!cond
1082 || gimple_code (cond) != GIMPLE_COND
1083 || gimple_cond_code (cond) != NE_EXPR
1084 || gimple_cond_lhs (cond) != parm
1085 || !integer_zerop (gimple_cond_rhs (cond)))
1086 return;
1087
1088 phi_bb = gimple_bb (phi);
1089 for (i = 0; i < 2; i++)
1090 {
1091 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1092 if (pred != assign_bb && pred != cond_bb)
1093 return;
1094 }
1095
1096 if (!detect_type_change (obj, expr, call, jfunc, offset))
1097 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1098 parm_ref_data_pass_through_p (&parms_ainfo[index],
1099 call, parm));
1100 }
1101
1102 /* Given OP which is passed as an actual argument to a called function,
1103 determine if it is possible to construct a KNOWN_TYPE jump function for it
1104 and if so, create one and store it to JFUNC. */
1105
1106 static void
1107 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1108 gimple call)
1109 {
1110 HOST_WIDE_INT offset, size, max_size;
1111 tree base;
1112
1113 if (!flag_devirtualize
1114 || TREE_CODE (op) != ADDR_EXPR
1115 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
1116 return;
1117
1118 op = TREE_OPERAND (op, 0);
1119 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1120 if (!DECL_P (base)
1121 || max_size == -1
1122 || max_size != size
1123 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1124 || is_global_var (base))
1125 return;
1126
1127 if (!TYPE_BINFO (TREE_TYPE (base))
1128 || detect_type_change (op, base, call, jfunc, offset))
1129 return;
1130
1131 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
1132 }
1133
1134 /* Inspect the given TYPE and return true iff it has the same structure (the
1135 same number of fields of the same types) as a C++ member pointer. If
1136 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1137 corresponding fields there. */
1138
1139 static bool
1140 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1141 {
1142 tree fld;
1143
1144 if (TREE_CODE (type) != RECORD_TYPE)
1145 return false;
1146
1147 fld = TYPE_FIELDS (type);
1148 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1149 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1150 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1151 return false;
1152
1153 if (method_ptr)
1154 *method_ptr = fld;
1155
1156 fld = DECL_CHAIN (fld);
1157 if (!fld || INTEGRAL_TYPE_P (fld)
1158 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
1159 return false;
1160 if (delta)
1161 *delta = fld;
1162
1163 if (DECL_CHAIN (fld))
1164 return false;
1165
1166 return true;
1167 }
1168
1169 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1170 return the rhs of its defining statement. Otherwise return RHS as it
1171 is. */
1172
1173 static inline tree
1174 get_ssa_def_if_simple_copy (tree rhs)
1175 {
1176 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1177 {
1178 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1179
1180 if (gimple_assign_single_p (def_stmt))
1181 rhs = gimple_assign_rhs1 (def_stmt);
1182 else
1183 break;
1184 }
1185 return rhs;
1186 }
1187
1188 /* Simple linked list, describing known contents of an aggregate beforere
1189 call. */
1190
1191 struct ipa_known_agg_contents_list
1192 {
1193 /* Offset and size of the described part of the aggregate. */
1194 HOST_WIDE_INT offset, size;
1195 /* Known constant value or NULL if the contents is known to be unknown. */
1196 tree constant;
1197 /* Pointer to the next structure in the list. */
1198 struct ipa_known_agg_contents_list *next;
1199 };
1200
1201 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1202 in ARG is filled in with constant values. ARG can either be an aggregate
1203 expression or a pointer to an aggregate. JFUNC is the jump function into
1204 which the constants are subsequently stored. */
1205
1206 static void
1207 determine_known_aggregate_parts (gimple call, tree arg,
1208 struct ipa_jump_func *jfunc)
1209 {
1210 struct ipa_known_agg_contents_list *list = NULL;
1211 int item_count = 0, const_count = 0;
1212 HOST_WIDE_INT arg_offset, arg_size;
1213 gimple_stmt_iterator gsi;
1214 tree arg_base;
1215 bool check_ref, by_ref;
1216 ao_ref r;
1217
1218 /* The function operates in three stages. First, we prepare check_ref, r,
1219 arg_base and arg_offset based on what is actually passed as an actual
1220 argument. */
1221
1222 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1223 {
1224 by_ref = true;
1225 if (TREE_CODE (arg) == SSA_NAME)
1226 {
1227 tree type_size;
1228 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1229 return;
1230 check_ref = true;
1231 arg_base = arg;
1232 arg_offset = 0;
1233 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1234 arg_size = tree_low_cst (type_size, 1);
1235 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1236 }
1237 else if (TREE_CODE (arg) == ADDR_EXPR)
1238 {
1239 HOST_WIDE_INT arg_max_size;
1240
1241 arg = TREE_OPERAND (arg, 0);
1242 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1243 &arg_max_size);
1244 if (arg_max_size == -1
1245 || arg_max_size != arg_size
1246 || arg_offset < 0)
1247 return;
1248 if (DECL_P (arg_base))
1249 {
1250 tree size;
1251 check_ref = false;
1252 size = build_int_cst (integer_type_node, arg_size);
1253 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1254 }
1255 else
1256 return;
1257 }
1258 else
1259 return;
1260 }
1261 else
1262 {
1263 HOST_WIDE_INT arg_max_size;
1264
1265 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1266
1267 by_ref = false;
1268 check_ref = false;
1269 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1270 &arg_max_size);
1271 if (arg_max_size == -1
1272 || arg_max_size != arg_size
1273 || arg_offset < 0)
1274 return;
1275
1276 ao_ref_init (&r, arg);
1277 }
1278
1279 /* Second stage walks back the BB, looks at individual statements and as long
1280 as it is confident of how the statements affect contents of the
1281 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1282 describing it. */
1283 gsi = gsi_for_stmt (call);
1284 gsi_prev (&gsi);
1285 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1286 {
1287 struct ipa_known_agg_contents_list *n, **p;
1288 gimple stmt = gsi_stmt (gsi);
1289 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1290 tree lhs, rhs, lhs_base;
1291 bool partial_overlap;
1292
1293 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1294 continue;
1295 if (!gimple_assign_single_p (stmt))
1296 break;
1297
1298 lhs = gimple_assign_lhs (stmt);
1299 rhs = gimple_assign_rhs1 (stmt);
1300 if (!is_gimple_reg_type (rhs))
1301 break;
1302
1303 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1304 &lhs_max_size);
1305 if (lhs_max_size == -1
1306 || lhs_max_size != lhs_size
1307 || (lhs_offset < arg_offset
1308 && lhs_offset + lhs_size > arg_offset)
1309 || (lhs_offset < arg_offset + arg_size
1310 && lhs_offset + lhs_size > arg_offset + arg_size))
1311 break;
1312
1313 if (check_ref)
1314 {
1315 if (TREE_CODE (lhs_base) != MEM_REF
1316 || TREE_OPERAND (lhs_base, 0) != arg_base
1317 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1318 break;
1319 }
1320 else if (lhs_base != arg_base)
1321 {
1322 if (DECL_P (lhs_base))
1323 continue;
1324 else
1325 break;
1326 }
1327
1328 if (lhs_offset + lhs_size < arg_offset
1329 || lhs_offset >= (arg_offset + arg_size))
1330 continue;
1331
1332 partial_overlap = false;
1333 p = &list;
1334 while (*p && (*p)->offset < lhs_offset)
1335 {
1336 if ((*p)->offset + (*p)->size > lhs_offset)
1337 {
1338 partial_overlap = true;
1339 break;
1340 }
1341 p = &(*p)->next;
1342 }
1343 if (partial_overlap)
1344 break;
1345 if (*p && (*p)->offset < lhs_offset + lhs_size)
1346 {
1347 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1348 /* We already know this value is subsequently overwritten with
1349 something else. */
1350 continue;
1351 else
1352 /* Otherwise this is a partial overlap which we cannot
1353 represent. */
1354 break;
1355 }
1356
1357 rhs = get_ssa_def_if_simple_copy (rhs);
1358 n = XALLOCA (struct ipa_known_agg_contents_list);
1359 n->size = lhs_size;
1360 n->offset = lhs_offset;
1361 if (is_gimple_ip_invariant (rhs))
1362 {
1363 n->constant = rhs;
1364 const_count++;
1365 }
1366 else
1367 n->constant = NULL_TREE;
1368 n->next = *p;
1369 *p = n;
1370
1371 item_count++;
1372 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1373 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1374 break;
1375 }
1376
1377 /* Third stage just goes over the list and creates an appropriate vector of
1378 ipa_agg_jf_item structures out of it, of sourse only if there are
1379 any known constants to begin with. */
1380
1381 if (const_count)
1382 {
1383 jfunc->agg.by_ref = by_ref;
1384 jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
1385 while (list)
1386 {
1387 if (list->constant)
1388 {
1389 struct ipa_agg_jf_item item;
1390 item.offset = list->offset - arg_offset;
1391 item.value = prune_expression_for_jf (list->constant);
1392 VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
1393 }
1394 list = list->next;
1395 }
1396 }
1397 }
1398
1399 /* Compute jump function for all arguments of callsite CS and insert the
1400 information in the jump_functions array in the ipa_edge_args corresponding
1401 to this callsite. */
1402
1403 static void
1404 ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
1405 struct cgraph_edge *cs)
1406 {
1407 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1408 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1409 gimple call = cs->call_stmt;
1410 int n, arg_num = gimple_call_num_args (call);
1411
1412 if (arg_num == 0 || args->jump_functions)
1413 return;
1414 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
1415
1416 for (n = 0; n < arg_num; n++)
1417 {
1418 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1419 tree arg = gimple_call_arg (call, n);
1420
1421 if (is_gimple_ip_invariant (arg))
1422 ipa_set_jf_constant (jfunc, arg);
1423 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1424 && TREE_CODE (arg) == PARM_DECL)
1425 {
1426 int index = ipa_get_param_decl_index (info, arg);
1427
1428 gcc_assert (index >=0);
1429 /* Aggregate passed by value, check for pass-through, otherwise we
1430 will attempt to fill in aggregate contents later in this
1431 for cycle. */
1432 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1433 {
1434 ipa_set_jf_simple_pass_through (jfunc, index, false);
1435 continue;
1436 }
1437 }
1438 else if (TREE_CODE (arg) == SSA_NAME)
1439 {
1440 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1441 {
1442 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1443 if (index >= 0
1444 && !detect_type_change_ssa (arg, call, jfunc))
1445 {
1446 bool agg_p;
1447 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1448 call, arg);
1449 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1450 }
1451 }
1452 else
1453 {
1454 gimple stmt = SSA_NAME_DEF_STMT (arg);
1455 if (is_gimple_assign (stmt))
1456 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1457 call, stmt, arg);
1458 else if (gimple_code (stmt) == GIMPLE_PHI)
1459 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1460 call, stmt);
1461 }
1462 }
1463 else
1464 compute_known_type_jump_func (arg, jfunc, call);
1465
1466 if ((jfunc->type != IPA_JF_PASS_THROUGH
1467 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1468 && (jfunc->type != IPA_JF_ANCESTOR
1469 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1470 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1471 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1472 determine_known_aggregate_parts (call, arg, jfunc);
1473 }
1474 }
1475
1476 /* Compute jump functions for all edges - both direct and indirect - outgoing
1477 from NODE. Also count the actual arguments in the process. */
1478
1479 static void
1480 ipa_compute_jump_functions (struct cgraph_node *node,
1481 struct param_analysis_info *parms_ainfo)
1482 {
1483 struct cgraph_edge *cs;
1484
1485 for (cs = node->callees; cs; cs = cs->next_callee)
1486 {
1487 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1488 NULL);
1489 /* We do not need to bother analyzing calls to unknown
1490 functions unless they may become known during lto/whopr. */
1491 if (!callee->analyzed && !flag_lto)
1492 continue;
1493 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1494 }
1495
1496 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
1497 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
1498 }
1499
1500 /* If STMT looks like a statement loading a value from a member pointer formal
1501 parameter, return that parameter and store the offset of the field to
1502 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1503 might be clobbered). If USE_DELTA, then we look for a use of the delta
1504 field rather than the pfn. */
1505
1506 static tree
1507 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1508 HOST_WIDE_INT *offset_p)
1509 {
1510 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1511
1512 if (!gimple_assign_single_p (stmt))
1513 return NULL_TREE;
1514
1515 rhs = gimple_assign_rhs1 (stmt);
1516 if (TREE_CODE (rhs) == COMPONENT_REF)
1517 {
1518 ref_field = TREE_OPERAND (rhs, 1);
1519 rhs = TREE_OPERAND (rhs, 0);
1520 }
1521 else
1522 ref_field = NULL_TREE;
1523 if (TREE_CODE (rhs) != MEM_REF)
1524 return NULL_TREE;
1525 rec = TREE_OPERAND (rhs, 0);
1526 if (TREE_CODE (rec) != ADDR_EXPR)
1527 return NULL_TREE;
1528 rec = TREE_OPERAND (rec, 0);
1529 if (TREE_CODE (rec) != PARM_DECL
1530 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1531 return NULL_TREE;
1532 ref_offset = TREE_OPERAND (rhs, 1);
1533
1534 if (use_delta)
1535 fld = delta_field;
1536 else
1537 fld = ptr_field;
1538 if (offset_p)
1539 *offset_p = int_bit_position (fld);
1540
1541 if (ref_field)
1542 {
1543 if (integer_nonzerop (ref_offset))
1544 return NULL_TREE;
1545 return ref_field == fld ? rec : NULL_TREE;
1546 }
1547 else
1548 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1549 : NULL_TREE;
1550 }
1551
1552 /* Returns true iff T is an SSA_NAME defined by a statement. */
1553
1554 static bool
1555 ipa_is_ssa_with_stmt_def (tree t)
1556 {
1557 if (TREE_CODE (t) == SSA_NAME
1558 && !SSA_NAME_IS_DEFAULT_DEF (t))
1559 return true;
1560 else
1561 return false;
1562 }
1563
1564 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1565 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1566 indirect call graph edge. */
1567
1568 static struct cgraph_edge *
1569 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1570 {
1571 struct cgraph_edge *cs;
1572
1573 cs = cgraph_edge (node, stmt);
1574 cs->indirect_info->param_index = param_index;
1575 cs->indirect_info->offset = 0;
1576 cs->indirect_info->polymorphic = 0;
1577 cs->indirect_info->agg_contents = 0;
1578 return cs;
1579 }
1580
1581 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1582 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1583 intermediate information about each formal parameter. Currently it checks
1584 whether the call calls a pointer that is a formal parameter and if so, the
1585 parameter is marked with the called flag and an indirect call graph edge
1586 describing the call is created. This is very simple for ordinary pointers
1587 represented in SSA but not-so-nice when it comes to member pointers. The
1588 ugly part of this function does nothing more than trying to match the
1589 pattern of such a call. An example of such a pattern is the gimple dump
1590 below, the call is on the last line:
1591
1592 <bb 2>:
1593 f$__delta_5 = f.__delta;
1594 f$__pfn_24 = f.__pfn;
1595
1596 or
1597 <bb 2>:
1598 f$__delta_5 = MEM[(struct *)&f];
1599 f$__pfn_24 = MEM[(struct *)&f + 4B];
1600
1601 and a few lines below:
1602
1603 <bb 5>
1604 D.2496_3 = (int) f$__pfn_24;
1605 D.2497_4 = D.2496_3 & 1;
1606 if (D.2497_4 != 0)
1607 goto <bb 3>;
1608 else
1609 goto <bb 4>;
1610
1611 <bb 6>:
1612 D.2500_7 = (unsigned int) f$__delta_5;
1613 D.2501_8 = &S + D.2500_7;
1614 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1615 D.2503_10 = *D.2502_9;
1616 D.2504_12 = f$__pfn_24 + -1;
1617 D.2505_13 = (unsigned int) D.2504_12;
1618 D.2506_14 = D.2503_10 + D.2505_13;
1619 D.2507_15 = *D.2506_14;
1620 iftmp.11_16 = (String:: *) D.2507_15;
1621
1622 <bb 7>:
1623 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1624 D.2500_19 = (unsigned int) f$__delta_5;
1625 D.2508_20 = &S + D.2500_19;
1626 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1627
1628 Such patterns are results of simple calls to a member pointer:
1629
1630 int doprinting (int (MyString::* f)(int) const)
1631 {
1632 MyString S ("somestring");
1633
1634 return (S.*f)(4);
1635 }
1636
1637 Moreover, the function also looks for called pointers loaded from aggregates
1638 passed by value or reference. */
1639
1640 static void
1641 ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1642 struct ipa_node_params *info,
1643 struct param_analysis_info *parms_ainfo,
1644 gimple call, tree target)
1645 {
1646 gimple def;
1647 tree n1, n2;
1648 gimple d1, d2;
1649 tree rec, rec2, cond;
1650 gimple branch;
1651 int index;
1652 basic_block bb, virt_bb, join;
1653 HOST_WIDE_INT offset;
1654 bool by_ref;
1655
1656 if (SSA_NAME_IS_DEFAULT_DEF (target))
1657 {
1658 tree var = SSA_NAME_VAR (target);
1659 index = ipa_get_param_decl_index (info, var);
1660 if (index >= 0)
1661 ipa_note_param_call (node, index, call);
1662 return;
1663 }
1664
1665 def = SSA_NAME_DEF_STMT (target);
1666 if (gimple_assign_single_p (def)
1667 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
1668 gimple_assign_rhs1 (def), &index, &offset,
1669 &by_ref))
1670 {
1671 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1672 cs->indirect_info->offset = offset;
1673 cs->indirect_info->agg_contents = 1;
1674 cs->indirect_info->by_ref = by_ref;
1675 return;
1676 }
1677
1678 /* Now we need to try to match the complex pattern of calling a member
1679 pointer. */
1680 if (gimple_code (def) != GIMPLE_PHI
1681 || gimple_phi_num_args (def) != 2
1682 || !POINTER_TYPE_P (TREE_TYPE (target))
1683 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1684 return;
1685
1686 /* First, we need to check whether one of these is a load from a member
1687 pointer that is a parameter to this function. */
1688 n1 = PHI_ARG_DEF (def, 0);
1689 n2 = PHI_ARG_DEF (def, 1);
1690 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1691 return;
1692 d1 = SSA_NAME_DEF_STMT (n1);
1693 d2 = SSA_NAME_DEF_STMT (n2);
1694
1695 join = gimple_bb (def);
1696 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1697 {
1698 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1699 return;
1700
1701 bb = EDGE_PRED (join, 0)->src;
1702 virt_bb = gimple_bb (d2);
1703 }
1704 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1705 {
1706 bb = EDGE_PRED (join, 1)->src;
1707 virt_bb = gimple_bb (d1);
1708 }
1709 else
1710 return;
1711
1712 /* Second, we need to check that the basic blocks are laid out in the way
1713 corresponding to the pattern. */
1714
1715 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1716 || single_pred (virt_bb) != bb
1717 || single_succ (virt_bb) != join)
1718 return;
1719
1720 /* Third, let's see that the branching is done depending on the least
1721 significant bit of the pfn. */
1722
1723 branch = last_stmt (bb);
1724 if (!branch || gimple_code (branch) != GIMPLE_COND)
1725 return;
1726
1727 if ((gimple_cond_code (branch) != NE_EXPR
1728 && gimple_cond_code (branch) != EQ_EXPR)
1729 || !integer_zerop (gimple_cond_rhs (branch)))
1730 return;
1731
1732 cond = gimple_cond_lhs (branch);
1733 if (!ipa_is_ssa_with_stmt_def (cond))
1734 return;
1735
1736 def = SSA_NAME_DEF_STMT (cond);
1737 if (!is_gimple_assign (def)
1738 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1739 || !integer_onep (gimple_assign_rhs2 (def)))
1740 return;
1741
1742 cond = gimple_assign_rhs1 (def);
1743 if (!ipa_is_ssa_with_stmt_def (cond))
1744 return;
1745
1746 def = SSA_NAME_DEF_STMT (cond);
1747
1748 if (is_gimple_assign (def)
1749 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1750 {
1751 cond = gimple_assign_rhs1 (def);
1752 if (!ipa_is_ssa_with_stmt_def (cond))
1753 return;
1754 def = SSA_NAME_DEF_STMT (cond);
1755 }
1756
1757 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1758 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1759 == ptrmemfunc_vbit_in_delta),
1760 NULL);
1761 if (rec != rec2)
1762 return;
1763
1764 index = ipa_get_param_decl_index (info, rec);
1765 if (index >= 0
1766 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1767 {
1768 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1769 cs->indirect_info->offset = offset;
1770 cs->indirect_info->agg_contents = 1;
1771 }
1772
1773 return;
1774 }
1775
1776 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1777 object referenced in the expression is a formal parameter of the caller
1778 (described by INFO), create a call note for the statement. */
1779
1780 static void
1781 ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1782 struct ipa_node_params *info, gimple call,
1783 tree target)
1784 {
1785 struct cgraph_edge *cs;
1786 struct cgraph_indirect_call_info *ii;
1787 struct ipa_jump_func jfunc;
1788 tree obj = OBJ_TYPE_REF_OBJECT (target);
1789 int index;
1790 HOST_WIDE_INT anc_offset;
1791
1792 if (!flag_devirtualize)
1793 return;
1794
1795 if (TREE_CODE (obj) != SSA_NAME)
1796 return;
1797
1798 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1799 {
1800 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1801 return;
1802
1803 anc_offset = 0;
1804 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1805 gcc_assert (index >= 0);
1806 if (detect_type_change_ssa (obj, call, &jfunc))
1807 return;
1808 }
1809 else
1810 {
1811 gimple stmt = SSA_NAME_DEF_STMT (obj);
1812 tree expr;
1813
1814 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1815 if (!expr)
1816 return;
1817 index = ipa_get_param_decl_index (info,
1818 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1819 gcc_assert (index >= 0);
1820 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1821 return;
1822 }
1823
1824 cs = ipa_note_param_call (node, index, call);
1825 ii = cs->indirect_info;
1826 ii->offset = anc_offset;
1827 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1828 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1829 ii->polymorphic = 1;
1830 }
1831
1832 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1833 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1834 containing intermediate information about each formal parameter. */
1835
1836 static void
1837 ipa_analyze_call_uses (struct cgraph_node *node,
1838 struct ipa_node_params *info,
1839 struct param_analysis_info *parms_ainfo, gimple call)
1840 {
1841 tree target = gimple_call_fn (call);
1842
1843 if (!target)
1844 return;
1845 if (TREE_CODE (target) == SSA_NAME)
1846 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1847 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1848 ipa_analyze_virtual_call_uses (node, info, call, target);
1849 }
1850
1851
1852 /* Analyze the call statement STMT with respect to formal parameters (described
1853 in INFO) of caller given by NODE. Currently it only checks whether formal
1854 parameters are called. PARMS_AINFO is a pointer to a vector containing
1855 intermediate information about each formal parameter. */
1856
1857 static void
1858 ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
1859 struct param_analysis_info *parms_ainfo, gimple stmt)
1860 {
1861 if (is_gimple_call (stmt))
1862 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
1863 }
1864
1865 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1866 If OP is a parameter declaration, mark it as used in the info structure
1867 passed in DATA. */
1868
1869 static bool
1870 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1871 tree op, void *data)
1872 {
1873 struct ipa_node_params *info = (struct ipa_node_params *) data;
1874
1875 op = get_base_address (op);
1876 if (op
1877 && TREE_CODE (op) == PARM_DECL)
1878 {
1879 int index = ipa_get_param_decl_index (info, op);
1880 gcc_assert (index >= 0);
1881 ipa_set_param_used (info, index, true);
1882 }
1883
1884 return false;
1885 }
1886
1887 /* Scan the function body of NODE and inspect the uses of formal parameters.
1888 Store the findings in various structures of the associated ipa_node_params
1889 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1890 vector containing intermediate information about each formal parameter. */
1891
1892 static void
1893 ipa_analyze_params_uses (struct cgraph_node *node,
1894 struct param_analysis_info *parms_ainfo)
1895 {
1896 tree decl = node->symbol.decl;
1897 basic_block bb;
1898 struct function *func;
1899 gimple_stmt_iterator gsi;
1900 struct ipa_node_params *info = IPA_NODE_REF (node);
1901 int i;
1902
1903 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
1904 return;
1905
1906 for (i = 0; i < ipa_get_param_count (info); i++)
1907 {
1908 tree parm = ipa_get_param (info, i);
1909 tree ddef;
1910 /* For SSA regs see if parameter is used. For non-SSA we compute
1911 the flag during modification analysis. */
1912 if (is_gimple_reg (parm)
1913 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1914 parm)) != NULL_TREE
1915 && !has_zero_uses (ddef))
1916 ipa_set_param_used (info, i, true);
1917 }
1918
1919 func = DECL_STRUCT_FUNCTION (decl);
1920 FOR_EACH_BB_FN (bb, func)
1921 {
1922 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1923 {
1924 gimple stmt = gsi_stmt (gsi);
1925
1926 if (is_gimple_debug (stmt))
1927 continue;
1928
1929 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
1930 walk_stmt_load_store_addr_ops (stmt, info,
1931 visit_ref_for_mod_analysis,
1932 visit_ref_for_mod_analysis,
1933 visit_ref_for_mod_analysis);
1934 }
1935 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1936 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1937 visit_ref_for_mod_analysis,
1938 visit_ref_for_mod_analysis,
1939 visit_ref_for_mod_analysis);
1940 }
1941
1942 info->uses_analysis_done = 1;
1943 }
1944
1945 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
1946
1947 static void
1948 free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
1949 {
1950 int i;
1951
1952 for (i = 0; i < param_count; i++)
1953 {
1954 if (parms_ainfo[i].parm_visited_statements)
1955 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1956 if (parms_ainfo[i].pt_visited_statements)
1957 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1958 }
1959 }
1960
1961 /* Initialize the array describing properties of of formal parameters
1962 of NODE, analyze their uses and compute jump functions associated
1963 with actual arguments of calls from within NODE. */
1964
1965 void
1966 ipa_analyze_node (struct cgraph_node *node)
1967 {
1968 struct ipa_node_params *info;
1969 struct param_analysis_info *parms_ainfo;
1970 int param_count;
1971
1972 ipa_check_create_node_params ();
1973 ipa_check_create_edge_args ();
1974 info = IPA_NODE_REF (node);
1975 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
1976 ipa_initialize_node_params (node);
1977
1978 param_count = ipa_get_param_count (info);
1979 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1980 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
1981
1982 ipa_analyze_params_uses (node, parms_ainfo);
1983 ipa_compute_jump_functions (node, parms_ainfo);
1984
1985 free_parms_ainfo (parms_ainfo, param_count);
1986 pop_cfun ();
1987 }
1988
1989
1990 /* Update the jump function DST when the call graph edge corresponding to SRC is
1991 is being inlined, knowing that DST is of type ancestor and src of known
1992 type. */
1993
1994 static void
1995 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
1996 struct ipa_jump_func *dst)
1997 {
1998 HOST_WIDE_INT combined_offset;
1999 tree combined_type;
2000
2001 combined_offset = ipa_get_jf_known_type_offset (src)
2002 + ipa_get_jf_ancestor_offset (dst);
2003 combined_type = ipa_get_jf_ancestor_type (dst);
2004
2005 ipa_set_jf_known_type (dst, combined_offset,
2006 ipa_get_jf_known_type_base_type (src),
2007 combined_type);
2008 }
2009
2010 /* Update the jump functions associated with call graph edge E when the call
2011 graph edge CS is being inlined, assuming that E->caller is already (possibly
2012 indirectly) inlined into CS->callee and that E has not been inlined. */
2013
2014 static void
2015 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2016 struct cgraph_edge *e)
2017 {
2018 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2019 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2020 int count = ipa_get_cs_argument_count (args);
2021 int i;
2022
2023 for (i = 0; i < count; i++)
2024 {
2025 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2026
2027 if (dst->type == IPA_JF_ANCESTOR)
2028 {
2029 struct ipa_jump_func *src;
2030 int dst_fid = dst->value.ancestor.formal_id;
2031
2032 /* Variable number of arguments can cause havoc if we try to access
2033 one that does not exist in the inlined edge. So make sure we
2034 don't. */
2035 if (dst_fid >= ipa_get_cs_argument_count (top))
2036 {
2037 dst->type = IPA_JF_UNKNOWN;
2038 continue;
2039 }
2040
2041 src = ipa_get_ith_jump_func (top, dst_fid);
2042
2043 if (src->agg.items
2044 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2045 {
2046 struct ipa_agg_jf_item *item;
2047 int j;
2048
2049 /* Currently we do not produce clobber aggregate jump functions,
2050 replace with merging when we do. */
2051 gcc_assert (!dst->agg.items);
2052
2053 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
2054 dst->agg.by_ref = src->agg.by_ref;
2055 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
2056 item->offset -= dst->value.ancestor.offset;
2057 }
2058
2059 if (src->type == IPA_JF_KNOWN_TYPE)
2060 combine_known_type_and_ancestor_jfs (src, dst);
2061 else if (src->type == IPA_JF_PASS_THROUGH
2062 && src->value.pass_through.operation == NOP_EXPR)
2063 {
2064 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2065 dst->value.ancestor.agg_preserved &=
2066 src->value.pass_through.agg_preserved;
2067 }
2068 else if (src->type == IPA_JF_ANCESTOR)
2069 {
2070 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2071 dst->value.ancestor.offset += src->value.ancestor.offset;
2072 dst->value.ancestor.agg_preserved &=
2073 src->value.ancestor.agg_preserved;
2074 }
2075 else
2076 dst->type = IPA_JF_UNKNOWN;
2077 }
2078 else if (dst->type == IPA_JF_PASS_THROUGH)
2079 {
2080 struct ipa_jump_func *src;
2081 /* We must check range due to calls with variable number of arguments
2082 and we cannot combine jump functions with operations. */
2083 if (dst->value.pass_through.operation == NOP_EXPR
2084 && (dst->value.pass_through.formal_id
2085 < ipa_get_cs_argument_count (top)))
2086 {
2087 bool agg_p;
2088 int dst_fid = dst->value.pass_through.formal_id;
2089 src = ipa_get_ith_jump_func (top, dst_fid);
2090 agg_p = dst->value.pass_through.agg_preserved;
2091
2092 dst->type = src->type;
2093 dst->value = src->value;
2094
2095 if (src->agg.items
2096 && (agg_p || !src->agg.by_ref))
2097 {
2098 /* Currently we do not produce clobber aggregate jump
2099 functions, replace with merging when we do. */
2100 gcc_assert (!dst->agg.items);
2101
2102 dst->agg.by_ref = src->agg.by_ref;
2103 dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
2104 src->agg.items);
2105 }
2106
2107 if (!agg_p)
2108 {
2109 if (dst->type == IPA_JF_PASS_THROUGH)
2110 dst->value.pass_through.agg_preserved = false;
2111 else if (dst->type == IPA_JF_ANCESTOR)
2112 dst->value.ancestor.agg_preserved = false;
2113 }
2114 }
2115 else
2116 dst->type = IPA_JF_UNKNOWN;
2117 }
2118 }
2119 }
2120
2121 /* If TARGET is an addr_expr of a function declaration, make it the destination
2122 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2123
2124 struct cgraph_edge *
2125 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2126 {
2127 struct cgraph_node *callee;
2128 struct inline_edge_summary *es = inline_edge_summary (ie);
2129
2130 if (TREE_CODE (target) == ADDR_EXPR)
2131 target = TREE_OPERAND (target, 0);
2132 if (TREE_CODE (target) != FUNCTION_DECL)
2133 return NULL;
2134 callee = cgraph_get_node (target);
2135 if (!callee)
2136 return NULL;
2137 ipa_check_create_node_params ();
2138
2139 /* We can not make edges to inline clones. It is bug that someone removed
2140 the cgraph node too early. */
2141 gcc_assert (!callee->global.inlined_to);
2142
2143 cgraph_make_edge_direct (ie, callee);
2144 es = inline_edge_summary (ie);
2145 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2146 - eni_size_weights.call_cost);
2147 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2148 - eni_time_weights.call_cost);
2149 if (dump_file)
2150 {
2151 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2152 "(%s/%i -> %s/%i), for stmt ",
2153 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2154 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2155 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2156 if (ie->call_stmt)
2157 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2158 else
2159 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2160 }
2161 callee = cgraph_function_or_thunk_node (callee, NULL);
2162
2163 return ie;
2164 }
2165
2166 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2167 return NULL if there is not any. BY_REF specifies whether the value has to
2168 be passed by reference or by value. */
2169
2170 tree
2171 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2172 HOST_WIDE_INT offset, bool by_ref)
2173 {
2174 struct ipa_agg_jf_item *item;
2175 int i;
2176
2177 if (by_ref != agg->by_ref)
2178 return NULL;
2179
2180 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
2181 if (item->offset == offset)
2182 {
2183 /* Currently we do not have clobber values, return NULL for them once
2184 we do. */
2185 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2186 return item->value;
2187 }
2188 return NULL;
2189 }
2190
2191 /* Try to find a destination for indirect edge IE that corresponds to a simple
2192 call or a call of a member function pointer and where the destination is a
2193 pointer formal parameter described by jump function JFUNC. If it can be
2194 determined, return the newly direct edge, otherwise return NULL. */
2195
2196 static struct cgraph_edge *
2197 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2198 struct ipa_jump_func *jfunc)
2199 {
2200 tree target;
2201
2202 if (ie->indirect_info->agg_contents)
2203 {
2204 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2205 ie->indirect_info->offset,
2206 ie->indirect_info->by_ref);
2207 if (!target)
2208 return NULL;
2209 }
2210 else
2211 {
2212 if (jfunc->type != IPA_JF_CONST)
2213 return NULL;
2214 target = ipa_get_jf_constant (jfunc);
2215 }
2216 return ipa_make_edge_direct_to_target (ie, target);
2217 }
2218
2219 /* Try to find a destination for indirect edge IE that corresponds to a
2220 virtual call based on a formal parameter which is described by jump
2221 function JFUNC and if it can be determined, make it direct and return the
2222 direct edge. Otherwise, return NULL. */
2223
2224 static struct cgraph_edge *
2225 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2226 struct ipa_jump_func *jfunc)
2227 {
2228 tree binfo, target;
2229
2230 if (jfunc->type != IPA_JF_KNOWN_TYPE)
2231 return NULL;
2232
2233 binfo = TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc));
2234 gcc_checking_assert (binfo);
2235 binfo = get_binfo_at_offset (binfo, ipa_get_jf_known_type_offset (jfunc)
2236 + ie->indirect_info->offset,
2237 ie->indirect_info->otr_type);
2238 if (binfo)
2239 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2240 binfo);
2241 else
2242 return NULL;
2243
2244 if (target)
2245 return ipa_make_edge_direct_to_target (ie, target);
2246 else
2247 return NULL;
2248 }
2249
2250 /* Update the param called notes associated with NODE when CS is being inlined,
2251 assuming NODE is (potentially indirectly) inlined into CS->callee.
2252 Moreover, if the callee is discovered to be constant, create a new cgraph
2253 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2254 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2255
2256 static bool
2257 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2258 struct cgraph_node *node,
2259 VEC (cgraph_edge_p, heap) **new_edges)
2260 {
2261 struct ipa_edge_args *top;
2262 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2263 bool res = false;
2264
2265 ipa_check_create_edge_args ();
2266 top = IPA_EDGE_REF (cs);
2267
2268 for (ie = node->indirect_calls; ie; ie = next_ie)
2269 {
2270 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2271 struct ipa_jump_func *jfunc;
2272 int param_index;
2273
2274 next_ie = ie->next_callee;
2275
2276 if (ici->param_index == -1)
2277 continue;
2278
2279 /* We must check range due to calls with variable number of arguments: */
2280 if (ici->param_index >= ipa_get_cs_argument_count (top))
2281 {
2282 ici->param_index = -1;
2283 continue;
2284 }
2285
2286 param_index = ici->param_index;
2287 jfunc = ipa_get_ith_jump_func (top, param_index);
2288 if (jfunc->type == IPA_JF_PASS_THROUGH
2289 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2290 {
2291 if (ici->agg_contents
2292 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2293 ici->param_index = -1;
2294 else
2295 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2296 }
2297 else if (jfunc->type == IPA_JF_ANCESTOR)
2298 {
2299 if (ici->agg_contents
2300 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2301 ici->param_index = -1;
2302 else
2303 {
2304 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2305 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2306 }
2307 }
2308 else
2309 /* Either we can find a destination for this edge now or never. */
2310 ici->param_index = -1;
2311
2312 if (!flag_indirect_inlining)
2313 continue;
2314
2315 if (ici->polymorphic)
2316 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
2317 else
2318 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
2319
2320 if (new_direct_edge)
2321 {
2322 new_direct_edge->indirect_inlining_edge = 1;
2323 if (new_direct_edge->call_stmt)
2324 new_direct_edge->call_stmt_cannot_inline_p
2325 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
2326 new_direct_edge->callee->symbol.decl);
2327 if (new_edges)
2328 {
2329 VEC_safe_push (cgraph_edge_p, heap, *new_edges,
2330 new_direct_edge);
2331 top = IPA_EDGE_REF (cs);
2332 res = true;
2333 }
2334 }
2335 }
2336
2337 return res;
2338 }
2339
2340 /* Recursively traverse subtree of NODE (including node) made of inlined
2341 cgraph_edges when CS has been inlined and invoke
2342 update_indirect_edges_after_inlining on all nodes and
2343 update_jump_functions_after_inlining on all non-inlined edges that lead out
2344 of this subtree. Newly discovered indirect edges will be added to
2345 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2346 created. */
2347
2348 static bool
2349 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2350 struct cgraph_node *node,
2351 VEC (cgraph_edge_p, heap) **new_edges)
2352 {
2353 struct cgraph_edge *e;
2354 bool res;
2355
2356 res = update_indirect_edges_after_inlining (cs, node, new_edges);
2357
2358 for (e = node->callees; e; e = e->next_callee)
2359 if (!e->inline_failed)
2360 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
2361 else
2362 update_jump_functions_after_inlining (cs, e);
2363 for (e = node->indirect_calls; e; e = e->next_callee)
2364 update_jump_functions_after_inlining (cs, e);
2365
2366 return res;
2367 }
2368
2369 /* Update jump functions and call note functions on inlining the call site CS.
2370 CS is expected to lead to a node already cloned by
2371 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2372 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2373 created. */
2374
2375 bool
2376 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
2377 VEC (cgraph_edge_p, heap) **new_edges)
2378 {
2379 bool changed;
2380 /* Do nothing if the preparation phase has not been carried out yet
2381 (i.e. during early inlining). */
2382 if (!ipa_node_params_vector)
2383 return false;
2384 gcc_assert (ipa_edge_args_vector);
2385
2386 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2387
2388 /* We do not keep jump functions of inlined edges up to date. Better to free
2389 them so we do not access them accidentally. */
2390 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2391 return changed;
2392 }
2393
2394 /* Frees all dynamically allocated structures that the argument info points
2395 to. */
2396
2397 void
2398 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
2399 {
2400 if (args->jump_functions)
2401 ggc_free (args->jump_functions);
2402
2403 memset (args, 0, sizeof (*args));
2404 }
2405
2406 /* Free all ipa_edge structures. */
2407
2408 void
2409 ipa_free_all_edge_args (void)
2410 {
2411 int i;
2412 struct ipa_edge_args *args;
2413
2414 FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
2415 ipa_free_edge_args_substructures (args);
2416
2417 VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
2418 ipa_edge_args_vector = NULL;
2419 }
2420
2421 /* Frees all dynamically allocated structures that the param info points
2422 to. */
2423
2424 void
2425 ipa_free_node_params_substructures (struct ipa_node_params *info)
2426 {
2427 VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
2428 free (info->lattices);
2429 /* Lattice values and their sources are deallocated with their alocation
2430 pool. */
2431 VEC_free (tree, heap, info->known_vals);
2432 memset (info, 0, sizeof (*info));
2433 }
2434
2435 /* Free all ipa_node_params structures. */
2436
2437 void
2438 ipa_free_all_node_params (void)
2439 {
2440 int i;
2441 struct ipa_node_params *info;
2442
2443 FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
2444 ipa_free_node_params_substructures (info);
2445
2446 VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
2447 ipa_node_params_vector = NULL;
2448 }
2449
2450 /* Set the aggregate replacements of NODE to be AGGVALS. */
2451
2452 void
2453 ipa_set_node_agg_value_chain (struct cgraph_node *node,
2454 struct ipa_agg_replacement_value *aggvals)
2455 {
2456 if (VEC_length (ipa_agg_replacement_value_p, ipa_node_agg_replacements)
2457 <= (unsigned) cgraph_max_uid)
2458 VEC_safe_grow_cleared (ipa_agg_replacement_value_p, gc,
2459 ipa_node_agg_replacements, cgraph_max_uid + 1);
2460
2461 VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
2462 node->uid, aggvals);
2463 }
2464
2465 /* Hook that is called by cgraph.c when an edge is removed. */
2466
2467 static void
2468 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
2469 {
2470 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2471 if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
2472 <= (unsigned)cs->uid)
2473 return;
2474 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2475 }
2476
2477 /* Hook that is called by cgraph.c when a node is removed. */
2478
2479 static void
2480 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2481 {
2482 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2483 if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
2484 > (unsigned)node->uid)
2485 ipa_free_node_params_substructures (IPA_NODE_REF (node));
2486 if (VEC_length (ipa_agg_replacement_value_p, ipa_node_agg_replacements)
2487 > (unsigned)node->uid)
2488 VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
2489 (unsigned)node->uid, NULL);
2490 }
2491
2492 /* Hook that is called by cgraph.c when an edge is duplicated. */
2493
2494 static void
2495 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
2496 __attribute__((unused)) void *data)
2497 {
2498 struct ipa_edge_args *old_args, *new_args;
2499 unsigned int i;
2500
2501 ipa_check_create_edge_args ();
2502
2503 old_args = IPA_EDGE_REF (src);
2504 new_args = IPA_EDGE_REF (dst);
2505
2506 new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
2507 old_args->jump_functions);
2508
2509 for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
2510 VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
2511 = VEC_copy (ipa_agg_jf_item_t, gc,
2512 VEC_index (ipa_jump_func_t,
2513 old_args->jump_functions, i).agg.items);
2514 }
2515
2516 /* Hook that is called by cgraph.c when a node is duplicated. */
2517
2518 static void
2519 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
2520 ATTRIBUTE_UNUSED void *data)
2521 {
2522 struct ipa_node_params *old_info, *new_info;
2523 struct ipa_agg_replacement_value *old_av, *new_av;
2524
2525 ipa_check_create_node_params ();
2526 old_info = IPA_NODE_REF (src);
2527 new_info = IPA_NODE_REF (dst);
2528
2529 new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
2530 old_info->descriptors);
2531 new_info->lattices = NULL;
2532 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
2533
2534 new_info->uses_analysis_done = old_info->uses_analysis_done;
2535 new_info->node_enqueued = old_info->node_enqueued;
2536
2537 old_av = ipa_get_agg_replacements_for_node (src);
2538 if (!old_av)
2539 return;
2540
2541 new_av = NULL;
2542 while (old_av)
2543 {
2544 struct ipa_agg_replacement_value *v;
2545
2546 v = ggc_alloc_ipa_agg_replacement_value ();
2547 memcpy (v, old_av, sizeof (*v));
2548 v->next = new_av;
2549 new_av = v;
2550 old_av = old_av->next;
2551 }
2552 ipa_set_node_agg_value_chain (dst, new_av);
2553 }
2554
2555
2556 /* Analyze newly added function into callgraph. */
2557
2558 static void
2559 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2560 {
2561 ipa_analyze_node (node);
2562 }
2563
2564 /* Register our cgraph hooks if they are not already there. */
2565
2566 void
2567 ipa_register_cgraph_hooks (void)
2568 {
2569 if (!edge_removal_hook_holder)
2570 edge_removal_hook_holder =
2571 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2572 if (!node_removal_hook_holder)
2573 node_removal_hook_holder =
2574 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2575 if (!edge_duplication_hook_holder)
2576 edge_duplication_hook_holder =
2577 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2578 if (!node_duplication_hook_holder)
2579 node_duplication_hook_holder =
2580 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
2581 function_insertion_hook_holder =
2582 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
2583 }
2584
2585 /* Unregister our cgraph hooks if they are not already there. */
2586
2587 static void
2588 ipa_unregister_cgraph_hooks (void)
2589 {
2590 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2591 edge_removal_hook_holder = NULL;
2592 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2593 node_removal_hook_holder = NULL;
2594 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2595 edge_duplication_hook_holder = NULL;
2596 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2597 node_duplication_hook_holder = NULL;
2598 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2599 function_insertion_hook_holder = NULL;
2600 }
2601
2602 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2603 longer needed after ipa-cp. */
2604
2605 void
2606 ipa_free_all_structures_after_ipa_cp (void)
2607 {
2608 if (!optimize)
2609 {
2610 ipa_free_all_edge_args ();
2611 ipa_free_all_node_params ();
2612 free_alloc_pool (ipcp_sources_pool);
2613 free_alloc_pool (ipcp_values_pool);
2614 free_alloc_pool (ipcp_agg_lattice_pool);
2615 ipa_unregister_cgraph_hooks ();
2616 }
2617 }
2618
2619 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2620 longer needed after indirect inlining. */
2621
2622 void
2623 ipa_free_all_structures_after_iinln (void)
2624 {
2625 ipa_free_all_edge_args ();
2626 ipa_free_all_node_params ();
2627 ipa_unregister_cgraph_hooks ();
2628 if (ipcp_sources_pool)
2629 free_alloc_pool (ipcp_sources_pool);
2630 if (ipcp_values_pool)
2631 free_alloc_pool (ipcp_values_pool);
2632 if (ipcp_agg_lattice_pool)
2633 free_alloc_pool (ipcp_agg_lattice_pool);
2634 }
2635
2636 /* Print ipa_tree_map data structures of all functions in the
2637 callgraph to F. */
2638
2639 void
2640 ipa_print_node_params (FILE *f, struct cgraph_node *node)
2641 {
2642 int i, count;
2643 tree temp;
2644 struct ipa_node_params *info;
2645
2646 if (!node->analyzed)
2647 return;
2648 info = IPA_NODE_REF (node);
2649 fprintf (f, " function %s parameter descriptors:\n",
2650 cgraph_node_name (node));
2651 count = ipa_get_param_count (info);
2652 for (i = 0; i < count; i++)
2653 {
2654 temp = ipa_get_param (info, i);
2655 if (TREE_CODE (temp) == PARM_DECL)
2656 fprintf (f, " param %d : %s", i,
2657 (DECL_NAME (temp)
2658 ? (*lang_hooks.decl_printable_name) (temp, 2)
2659 : "(unnamed)"));
2660 if (ipa_is_param_used (info, i))
2661 fprintf (f, " used");
2662 fprintf (f, "\n");
2663 }
2664 }
2665
2666 /* Print ipa_tree_map data structures of all functions in the
2667 callgraph to F. */
2668
2669 void
2670 ipa_print_all_params (FILE * f)
2671 {
2672 struct cgraph_node *node;
2673
2674 fprintf (f, "\nFunction parameters:\n");
2675 FOR_EACH_FUNCTION (node)
2676 ipa_print_node_params (f, node);
2677 }
2678
2679 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2680
2681 VEC(tree, heap) *
2682 ipa_get_vector_of_formal_parms (tree fndecl)
2683 {
2684 VEC(tree, heap) *args;
2685 int count;
2686 tree parm;
2687
2688 count = count_formal_params (fndecl);
2689 args = VEC_alloc (tree, heap, count);
2690 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2691 VEC_quick_push (tree, args, parm);
2692
2693 return args;
2694 }
2695
2696 /* Return a heap allocated vector containing types of formal parameters of
2697 function type FNTYPE. */
2698
2699 static inline VEC(tree, heap) *
2700 get_vector_of_formal_parm_types (tree fntype)
2701 {
2702 VEC(tree, heap) *types;
2703 int count = 0;
2704 tree t;
2705
2706 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2707 count++;
2708
2709 types = VEC_alloc (tree, heap, count);
2710 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2711 VEC_quick_push (tree, types, TREE_VALUE (t));
2712
2713 return types;
2714 }
2715
2716 /* Modify the function declaration FNDECL and its type according to the plan in
2717 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2718 to reflect the actual parameters being modified which are determined by the
2719 base_index field. */
2720
2721 void
2722 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2723 const char *synth_parm_prefix)
2724 {
2725 VEC(tree, heap) *oparms, *otypes;
2726 tree orig_type, new_type = NULL;
2727 tree old_arg_types, t, new_arg_types = NULL;
2728 tree parm, *link = &DECL_ARGUMENTS (fndecl);
2729 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
2730 tree new_reversed = NULL;
2731 bool care_for_types, last_parm_void;
2732
2733 if (!synth_parm_prefix)
2734 synth_parm_prefix = "SYNTH";
2735
2736 oparms = ipa_get_vector_of_formal_parms (fndecl);
2737 orig_type = TREE_TYPE (fndecl);
2738 old_arg_types = TYPE_ARG_TYPES (orig_type);
2739
2740 /* The following test is an ugly hack, some functions simply don't have any
2741 arguments in their type. This is probably a bug but well... */
2742 care_for_types = (old_arg_types != NULL_TREE);
2743 if (care_for_types)
2744 {
2745 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2746 == void_type_node);
2747 otypes = get_vector_of_formal_parm_types (orig_type);
2748 if (last_parm_void)
2749 gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
2750 else
2751 gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
2752 }
2753 else
2754 {
2755 last_parm_void = false;
2756 otypes = NULL;
2757 }
2758
2759 for (i = 0; i < len; i++)
2760 {
2761 struct ipa_parm_adjustment *adj;
2762 gcc_assert (link);
2763
2764 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2765 parm = VEC_index (tree, oparms, adj->base_index);
2766 adj->base = parm;
2767
2768 if (adj->copy_param)
2769 {
2770 if (care_for_types)
2771 new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
2772 adj->base_index),
2773 new_arg_types);
2774 *link = parm;
2775 link = &DECL_CHAIN (parm);
2776 }
2777 else if (!adj->remove_param)
2778 {
2779 tree new_parm;
2780 tree ptype;
2781
2782 if (adj->by_ref)
2783 ptype = build_pointer_type (adj->type);
2784 else
2785 ptype = adj->type;
2786
2787 if (care_for_types)
2788 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2789
2790 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2791 ptype);
2792 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2793
2794 DECL_ARTIFICIAL (new_parm) = 1;
2795 DECL_ARG_TYPE (new_parm) = ptype;
2796 DECL_CONTEXT (new_parm) = fndecl;
2797 TREE_USED (new_parm) = 1;
2798 DECL_IGNORED_P (new_parm) = 1;
2799 layout_decl (new_parm, 0);
2800
2801 adj->base = parm;
2802 adj->reduction = new_parm;
2803
2804 *link = new_parm;
2805
2806 link = &DECL_CHAIN (new_parm);
2807 }
2808 }
2809
2810 *link = NULL_TREE;
2811
2812 if (care_for_types)
2813 {
2814 new_reversed = nreverse (new_arg_types);
2815 if (last_parm_void)
2816 {
2817 if (new_reversed)
2818 TREE_CHAIN (new_arg_types) = void_list_node;
2819 else
2820 new_reversed = void_list_node;
2821 }
2822 }
2823
2824 /* Use copy_node to preserve as much as possible from original type
2825 (debug info, attribute lists etc.)
2826 Exception is METHOD_TYPEs must have THIS argument.
2827 When we are asked to remove it, we need to build new FUNCTION_TYPE
2828 instead. */
2829 if (TREE_CODE (orig_type) != METHOD_TYPE
2830 || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
2831 && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
2832 {
2833 new_type = build_distinct_type_copy (orig_type);
2834 TYPE_ARG_TYPES (new_type) = new_reversed;
2835 }
2836 else
2837 {
2838 new_type
2839 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2840 new_reversed));
2841 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2842 DECL_VINDEX (fndecl) = NULL_TREE;
2843 }
2844
2845 /* When signature changes, we need to clear builtin info. */
2846 if (DECL_BUILT_IN (fndecl))
2847 {
2848 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2849 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2850 }
2851
2852 /* This is a new type, not a copy of an old type. Need to reassociate
2853 variants. We can handle everything except the main variant lazily. */
2854 t = TYPE_MAIN_VARIANT (orig_type);
2855 if (orig_type != t)
2856 {
2857 TYPE_MAIN_VARIANT (new_type) = t;
2858 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2859 TYPE_NEXT_VARIANT (t) = new_type;
2860 }
2861 else
2862 {
2863 TYPE_MAIN_VARIANT (new_type) = new_type;
2864 TYPE_NEXT_VARIANT (new_type) = NULL;
2865 }
2866
2867 TREE_TYPE (fndecl) = new_type;
2868 DECL_VIRTUAL_P (fndecl) = 0;
2869 if (otypes)
2870 VEC_free (tree, heap, otypes);
2871 VEC_free (tree, heap, oparms);
2872 }
2873
2874 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2875 If this is a directly recursive call, CS must be NULL. Otherwise it must
2876 contain the corresponding call graph edge. */
2877
2878 void
2879 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2880 ipa_parm_adjustment_vec adjustments)
2881 {
2882 VEC(tree, heap) *vargs;
2883 VEC(tree, gc) **debug_args = NULL;
2884 gimple new_stmt;
2885 gimple_stmt_iterator gsi;
2886 tree callee_decl;
2887 int i, len;
2888
2889 len = VEC_length (ipa_parm_adjustment_t, adjustments);
2890 vargs = VEC_alloc (tree, heap, len);
2891 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
2892
2893 gsi = gsi_for_stmt (stmt);
2894 for (i = 0; i < len; i++)
2895 {
2896 struct ipa_parm_adjustment *adj;
2897
2898 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
2899
2900 if (adj->copy_param)
2901 {
2902 tree arg = gimple_call_arg (stmt, adj->base_index);
2903
2904 VEC_quick_push (tree, vargs, arg);
2905 }
2906 else if (!adj->remove_param)
2907 {
2908 tree expr, base, off;
2909 location_t loc;
2910
2911 /* We create a new parameter out of the value of the old one, we can
2912 do the following kind of transformations:
2913
2914 - A scalar passed by reference is converted to a scalar passed by
2915 value. (adj->by_ref is false and the type of the original
2916 actual argument is a pointer to a scalar).
2917
2918 - A part of an aggregate is passed instead of the whole aggregate.
2919 The part can be passed either by value or by reference, this is
2920 determined by value of adj->by_ref. Moreover, the code below
2921 handles both situations when the original aggregate is passed by
2922 value (its type is not a pointer) and when it is passed by
2923 reference (it is a pointer to an aggregate).
2924
2925 When the new argument is passed by reference (adj->by_ref is true)
2926 it must be a part of an aggregate and therefore we form it by
2927 simply taking the address of a reference inside the original
2928 aggregate. */
2929
2930 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2931 base = gimple_call_arg (stmt, adj->base_index);
2932 loc = EXPR_LOCATION (base);
2933
2934 if (TREE_CODE (base) != ADDR_EXPR
2935 && POINTER_TYPE_P (TREE_TYPE (base)))
2936 off = build_int_cst (adj->alias_ptr_type,
2937 adj->offset / BITS_PER_UNIT);
2938 else
2939 {
2940 HOST_WIDE_INT base_offset;
2941 tree prev_base;
2942
2943 if (TREE_CODE (base) == ADDR_EXPR)
2944 base = TREE_OPERAND (base, 0);
2945 prev_base = base;
2946 base = get_addr_base_and_unit_offset (base, &base_offset);
2947 /* Aggregate arguments can have non-invariant addresses. */
2948 if (!base)
2949 {
2950 base = build_fold_addr_expr (prev_base);
2951 off = build_int_cst (adj->alias_ptr_type,
2952 adj->offset / BITS_PER_UNIT);
2953 }
2954 else if (TREE_CODE (base) == MEM_REF)
2955 {
2956 off = build_int_cst (adj->alias_ptr_type,
2957 base_offset
2958 + adj->offset / BITS_PER_UNIT);
2959 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
2960 off);
2961 base = TREE_OPERAND (base, 0);
2962 }
2963 else
2964 {
2965 off = build_int_cst (adj->alias_ptr_type,
2966 base_offset
2967 + adj->offset / BITS_PER_UNIT);
2968 base = build_fold_addr_expr (base);
2969 }
2970 }
2971
2972 if (!adj->by_ref)
2973 {
2974 tree type = adj->type;
2975 unsigned int align;
2976 unsigned HOST_WIDE_INT misalign;
2977
2978 get_pointer_alignment_1 (base, &align, &misalign);
2979 misalign += (tree_to_double_int (off)
2980 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
2981 * BITS_PER_UNIT);
2982 misalign = misalign & (align - 1);
2983 if (misalign != 0)
2984 align = (misalign & -misalign);
2985 if (align < TYPE_ALIGN (type))
2986 type = build_aligned_type (type, align);
2987 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
2988 }
2989 else
2990 {
2991 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
2992 expr = build_fold_addr_expr (expr);
2993 }
2994
2995 expr = force_gimple_operand_gsi (&gsi, expr,
2996 adj->by_ref
2997 || is_gimple_reg_type (adj->type),
2998 NULL, true, GSI_SAME_STMT);
2999 VEC_quick_push (tree, vargs, expr);
3000 }
3001 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3002 {
3003 unsigned int ix;
3004 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3005 gimple def_temp;
3006
3007 arg = gimple_call_arg (stmt, adj->base_index);
3008 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3009 {
3010 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3011 continue;
3012 arg = fold_convert_loc (gimple_location (stmt),
3013 TREE_TYPE (origin), arg);
3014 }
3015 if (debug_args == NULL)
3016 debug_args = decl_debug_args_insert (callee_decl);
3017 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
3018 if (ddecl == origin)
3019 {
3020 ddecl = VEC_index (tree, *debug_args, ix + 1);
3021 break;
3022 }
3023 if (ddecl == NULL)
3024 {
3025 ddecl = make_node (DEBUG_EXPR_DECL);
3026 DECL_ARTIFICIAL (ddecl) = 1;
3027 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3028 DECL_MODE (ddecl) = DECL_MODE (origin);
3029
3030 VEC_safe_push (tree, gc, *debug_args, origin);
3031 VEC_safe_push (tree, gc, *debug_args, ddecl);
3032 }
3033 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
3034 stmt);
3035 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3036 }
3037 }
3038
3039 if (dump_file && (dump_flags & TDF_DETAILS))
3040 {
3041 fprintf (dump_file, "replacing stmt:");
3042 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3043 }
3044
3045 new_stmt = gimple_build_call_vec (callee_decl, vargs);
3046 VEC_free (tree, heap, vargs);
3047 if (gimple_call_lhs (stmt))
3048 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3049
3050 gimple_set_block (new_stmt, gimple_block (stmt));
3051 if (gimple_has_location (stmt))
3052 gimple_set_location (new_stmt, gimple_location (stmt));
3053 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3054 gimple_call_copy_flags (new_stmt, stmt);
3055
3056 if (dump_file && (dump_flags & TDF_DETAILS))
3057 {
3058 fprintf (dump_file, "with stmt:");
3059 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3060 fprintf (dump_file, "\n");
3061 }
3062 gsi_replace (&gsi, new_stmt, true);
3063 if (cs)
3064 cgraph_set_call_stmt (cs, new_stmt);
3065 update_ssa (TODO_update_ssa);
3066 free_dominance_info (CDI_DOMINATORS);
3067 }
3068
3069 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3070
3071 static bool
3072 index_in_adjustments_multiple_times_p (int base_index,
3073 ipa_parm_adjustment_vec adjustments)
3074 {
3075 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3076 bool one = false;
3077
3078 for (i = 0; i < len; i++)
3079 {
3080 struct ipa_parm_adjustment *adj;
3081 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3082
3083 if (adj->base_index == base_index)
3084 {
3085 if (one)
3086 return true;
3087 else
3088 one = true;
3089 }
3090 }
3091 return false;
3092 }
3093
3094
3095 /* Return adjustments that should have the same effect on function parameters
3096 and call arguments as if they were first changed according to adjustments in
3097 INNER and then by adjustments in OUTER. */
3098
3099 ipa_parm_adjustment_vec
3100 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3101 ipa_parm_adjustment_vec outer)
3102 {
3103 int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
3104 int inlen = VEC_length (ipa_parm_adjustment_t, inner);
3105 int removals = 0;
3106 ipa_parm_adjustment_vec adjustments, tmp;
3107
3108 tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
3109 for (i = 0; i < inlen; i++)
3110 {
3111 struct ipa_parm_adjustment *n;
3112 n = &VEC_index (ipa_parm_adjustment_t, inner, i);
3113
3114 if (n->remove_param)
3115 removals++;
3116 else
3117 VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
3118 }
3119
3120 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
3121 for (i = 0; i < outlen; i++)
3122 {
3123 struct ipa_parm_adjustment r;
3124 struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
3125 outer, i);
3126 struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
3127 out->base_index);
3128
3129 memset (&r, 0, sizeof (r));
3130 gcc_assert (!in->remove_param);
3131 if (out->remove_param)
3132 {
3133 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3134 {
3135 r.remove_param = true;
3136 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3137 }
3138 continue;
3139 }
3140
3141 r.base_index = in->base_index;
3142 r.type = out->type;
3143
3144 /* FIXME: Create nonlocal value too. */
3145
3146 if (in->copy_param && out->copy_param)
3147 r.copy_param = true;
3148 else if (in->copy_param)
3149 r.offset = out->offset;
3150 else if (out->copy_param)
3151 r.offset = in->offset;
3152 else
3153 r.offset = in->offset + out->offset;
3154 VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
3155 }
3156
3157 for (i = 0; i < inlen; i++)
3158 {
3159 struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
3160 inner, i);
3161
3162 if (n->remove_param)
3163 VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
3164 }
3165
3166 VEC_free (ipa_parm_adjustment_t, heap, tmp);
3167 return adjustments;
3168 }
3169
3170 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3171 friendly way, assuming they are meant to be applied to FNDECL. */
3172
3173 void
3174 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3175 tree fndecl)
3176 {
3177 int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
3178 bool first = true;
3179 VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
3180
3181 fprintf (file, "IPA param adjustments: ");
3182 for (i = 0; i < len; i++)
3183 {
3184 struct ipa_parm_adjustment *adj;
3185 adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
3186
3187 if (!first)
3188 fprintf (file, " ");
3189 else
3190 first = false;
3191
3192 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
3193 print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
3194 if (adj->base)
3195 {
3196 fprintf (file, ", base: ");
3197 print_generic_expr (file, adj->base, 0);
3198 }
3199 if (adj->reduction)
3200 {
3201 fprintf (file, ", reduction: ");
3202 print_generic_expr (file, adj->reduction, 0);
3203 }
3204 if (adj->new_ssa_base)
3205 {
3206 fprintf (file, ", new_ssa_base: ");
3207 print_generic_expr (file, adj->new_ssa_base, 0);
3208 }
3209
3210 if (adj->copy_param)
3211 fprintf (file, ", copy_param");
3212 else if (adj->remove_param)
3213 fprintf (file, ", remove_param");
3214 else
3215 fprintf (file, ", offset %li", (long) adj->offset);
3216 if (adj->by_ref)
3217 fprintf (file, ", by_ref");
3218 print_node_brief (file, ", type: ", adj->type, 0);
3219 fprintf (file, "\n");
3220 }
3221 VEC_free (tree, heap, parms);
3222 }
3223
3224 /* Dump the AV linked list. */
3225
3226 void
3227 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3228 {
3229 bool comma = false;
3230 fprintf (f, " Aggregate replacements:");
3231 for (; av; av = av->next)
3232 {
3233 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3234 av->index, av->offset);
3235 print_generic_expr (f, av->value, 0);
3236 comma = true;
3237 }
3238 fprintf (f, "\n");
3239 }
3240
3241 /* Stream out jump function JUMP_FUNC to OB. */
3242
3243 static void
3244 ipa_write_jump_function (struct output_block *ob,
3245 struct ipa_jump_func *jump_func)
3246 {
3247 struct ipa_agg_jf_item *item;
3248 struct bitpack_d bp;
3249 int i, count;
3250
3251 streamer_write_uhwi (ob, jump_func->type);
3252 switch (jump_func->type)
3253 {
3254 case IPA_JF_UNKNOWN:
3255 break;
3256 case IPA_JF_KNOWN_TYPE:
3257 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3258 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3259 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
3260 break;
3261 case IPA_JF_CONST:
3262 gcc_assert (
3263 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
3264 stream_write_tree (ob, jump_func->value.constant, true);
3265 break;
3266 case IPA_JF_PASS_THROUGH:
3267 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3268 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3269 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
3270 bp = bitpack_create (ob->main_stream);
3271 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3272 streamer_write_bitpack (&bp);
3273 break;
3274 case IPA_JF_ANCESTOR:
3275 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
3276 stream_write_tree (ob, jump_func->value.ancestor.type, true);
3277 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
3278 bp = bitpack_create (ob->main_stream);
3279 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3280 streamer_write_bitpack (&bp);
3281 break;
3282 }
3283
3284 count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
3285 streamer_write_uhwi (ob, count);
3286 if (count)
3287 {
3288 bp = bitpack_create (ob->main_stream);
3289 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3290 streamer_write_bitpack (&bp);
3291 }
3292
3293 FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
3294 {
3295 streamer_write_uhwi (ob, item->offset);
3296 stream_write_tree (ob, item->value, true);
3297 }
3298 }
3299
3300 /* Read in jump function JUMP_FUNC from IB. */
3301
3302 static void
3303 ipa_read_jump_function (struct lto_input_block *ib,
3304 struct ipa_jump_func *jump_func,
3305 struct data_in *data_in)
3306 {
3307 struct bitpack_d bp;
3308 int i, count;
3309
3310 jump_func->type = (enum jump_func_type) streamer_read_uhwi (ib);
3311 switch (jump_func->type)
3312 {
3313 case IPA_JF_UNKNOWN:
3314 break;
3315 case IPA_JF_KNOWN_TYPE:
3316 jump_func->value.known_type.offset = streamer_read_uhwi (ib);
3317 jump_func->value.known_type.base_type = stream_read_tree (ib, data_in);
3318 jump_func->value.known_type.component_type = stream_read_tree (ib,
3319 data_in);
3320 break;
3321 case IPA_JF_CONST:
3322 jump_func->value.constant = stream_read_tree (ib, data_in);
3323 break;
3324 case IPA_JF_PASS_THROUGH:
3325 jump_func->value.pass_through.operand = stream_read_tree (ib, data_in);
3326 jump_func->value.pass_through.formal_id = streamer_read_uhwi (ib);
3327 jump_func->value.pass_through.operation
3328 = (enum tree_code) streamer_read_uhwi (ib);
3329 bp = streamer_read_bitpack (ib);
3330 jump_func->value.pass_through.agg_preserved = bp_unpack_value (&bp, 1);
3331 break;
3332 case IPA_JF_ANCESTOR:
3333 jump_func->value.ancestor.offset = streamer_read_uhwi (ib);
3334 jump_func->value.ancestor.type = stream_read_tree (ib, data_in);
3335 jump_func->value.ancestor.formal_id = streamer_read_uhwi (ib);
3336 bp = streamer_read_bitpack (ib);
3337 jump_func->value.ancestor.agg_preserved = bp_unpack_value (&bp, 1);
3338 break;
3339 }
3340
3341 count = streamer_read_uhwi (ib);
3342 jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
3343 if (count)
3344 {
3345 bp = streamer_read_bitpack (ib);
3346 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3347 }
3348 for (i = 0; i < count; i++)
3349 {
3350 struct ipa_agg_jf_item item;
3351 item.offset = streamer_read_uhwi (ib);
3352 item.value = stream_read_tree (ib, data_in);
3353 VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
3354 }
3355 }
3356
3357 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3358 relevant to indirect inlining to OB. */
3359
3360 static void
3361 ipa_write_indirect_edge_info (struct output_block *ob,
3362 struct cgraph_edge *cs)
3363 {
3364 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3365 struct bitpack_d bp;
3366
3367 streamer_write_hwi (ob, ii->param_index);
3368 streamer_write_hwi (ob, ii->offset);
3369 bp = bitpack_create (ob->main_stream);
3370 bp_pack_value (&bp, ii->polymorphic, 1);
3371 bp_pack_value (&bp, ii->agg_contents, 1);
3372 bp_pack_value (&bp, ii->by_ref, 1);
3373 streamer_write_bitpack (&bp);
3374
3375 if (ii->polymorphic)
3376 {
3377 streamer_write_hwi (ob, ii->otr_token);
3378 stream_write_tree (ob, ii->otr_type, true);
3379 }
3380 }
3381
3382 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3383 relevant to indirect inlining from IB. */
3384
3385 static void
3386 ipa_read_indirect_edge_info (struct lto_input_block *ib,
3387 struct data_in *data_in ATTRIBUTE_UNUSED,
3388 struct cgraph_edge *cs)
3389 {
3390 struct cgraph_indirect_call_info *ii = cs->indirect_info;
3391 struct bitpack_d bp;
3392
3393 ii->param_index = (int) streamer_read_hwi (ib);
3394 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
3395 bp = streamer_read_bitpack (ib);
3396 ii->polymorphic = bp_unpack_value (&bp, 1);
3397 ii->agg_contents = bp_unpack_value (&bp, 1);
3398 ii->by_ref = bp_unpack_value (&bp, 1);
3399 if (ii->polymorphic)
3400 {
3401 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
3402 ii->otr_type = stream_read_tree (ib, data_in);
3403 }
3404 }
3405
3406 /* Stream out NODE info to OB. */
3407
3408 static void
3409 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3410 {
3411 int node_ref;
3412 lto_symtab_encoder_t encoder;
3413 struct ipa_node_params *info = IPA_NODE_REF (node);
3414 int j;
3415 struct cgraph_edge *e;
3416 struct bitpack_d bp;
3417
3418 encoder = ob->decl_state->symtab_node_encoder;
3419 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3420 streamer_write_uhwi (ob, node_ref);
3421
3422 bp = bitpack_create (ob->main_stream);
3423 gcc_assert (info->uses_analysis_done
3424 || ipa_get_param_count (info) == 0);
3425 gcc_assert (!info->node_enqueued);
3426 gcc_assert (!info->ipcp_orig_node);
3427 for (j = 0; j < ipa_get_param_count (info); j++)
3428 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
3429 streamer_write_bitpack (&bp);
3430 for (e = node->callees; e; e = e->next_callee)
3431 {
3432 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3433
3434 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3435 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3436 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3437 }
3438 for (e = node->indirect_calls; e; e = e->next_callee)
3439 {
3440 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3441
3442 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
3443 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3444 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3445 ipa_write_indirect_edge_info (ob, e);
3446 }
3447 }
3448
3449 /* Stream in NODE info from IB. */
3450
3451 static void
3452 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3453 struct data_in *data_in)
3454 {
3455 struct ipa_node_params *info = IPA_NODE_REF (node);
3456 int k;
3457 struct cgraph_edge *e;
3458 struct bitpack_d bp;
3459
3460 ipa_initialize_node_params (node);
3461
3462 bp = streamer_read_bitpack (ib);
3463 if (ipa_get_param_count (info) != 0)
3464 info->uses_analysis_done = true;
3465 info->node_enqueued = false;
3466 for (k = 0; k < ipa_get_param_count (info); k++)
3467 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
3468 for (e = node->callees; e; e = e->next_callee)
3469 {
3470 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3471 int count = streamer_read_uhwi (ib);
3472
3473 if (!count)
3474 continue;
3475 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
3476
3477 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3478 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3479 }
3480 for (e = node->indirect_calls; e; e = e->next_callee)
3481 {
3482 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3483 int count = streamer_read_uhwi (ib);
3484
3485 if (count)
3486 {
3487 VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
3488 count);
3489 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3490 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3491 data_in);
3492 }
3493 ipa_read_indirect_edge_info (ib, data_in, e);
3494 }
3495 }
3496
3497 /* Write jump functions for nodes in SET. */
3498
3499 void
3500 ipa_prop_write_jump_functions (void)
3501 {
3502 struct cgraph_node *node;
3503 struct output_block *ob;
3504 unsigned int count = 0;
3505 lto_symtab_encoder_iterator lsei;
3506 lto_symtab_encoder_t encoder;
3507
3508
3509 if (!ipa_node_params_vector)
3510 return;
3511
3512 ob = create_output_block (LTO_section_jump_functions);
3513 encoder = ob->decl_state->symtab_node_encoder;
3514 ob->cgraph_node = NULL;
3515 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3516 lsei_next_function_in_partition (&lsei))
3517 {
3518 node = lsei_cgraph_node (lsei);
3519 if (cgraph_function_with_gimple_body_p (node)
3520 && IPA_NODE_REF (node) != NULL)
3521 count++;
3522 }
3523
3524 streamer_write_uhwi (ob, count);
3525
3526 /* Process all of the functions. */
3527 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3528 lsei_next_function_in_partition (&lsei))
3529 {
3530 node = lsei_cgraph_node (lsei);
3531 if (cgraph_function_with_gimple_body_p (node)
3532 && IPA_NODE_REF (node) != NULL)
3533 ipa_write_node_info (ob, node);
3534 }
3535 streamer_write_char_stream (ob->main_stream, 0);
3536 produce_asm (ob, NULL);
3537 destroy_output_block (ob);
3538 }
3539
3540 /* Read section in file FILE_DATA of length LEN with data DATA. */
3541
3542 static void
3543 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3544 size_t len)
3545 {
3546 const struct lto_function_header *header =
3547 (const struct lto_function_header *) data;
3548 const int cfg_offset = sizeof (struct lto_function_header);
3549 const int main_offset = cfg_offset + header->cfg_size;
3550 const int string_offset = main_offset + header->main_size;
3551 struct data_in *data_in;
3552 struct lto_input_block ib_main;
3553 unsigned int i;
3554 unsigned int count;
3555
3556 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3557 header->main_size);
3558
3559 data_in =
3560 lto_data_in_create (file_data, (const char *) data + string_offset,
3561 header->string_size, NULL);
3562 count = streamer_read_uhwi (&ib_main);
3563
3564 for (i = 0; i < count; i++)
3565 {
3566 unsigned int index;
3567 struct cgraph_node *node;
3568 lto_symtab_encoder_t encoder;
3569
3570 index = streamer_read_uhwi (&ib_main);
3571 encoder = file_data->symtab_node_encoder;
3572 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3573 gcc_assert (node->analyzed);
3574 ipa_read_node_info (&ib_main, node, data_in);
3575 }
3576 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3577 len);
3578 lto_data_in_delete (data_in);
3579 }
3580
3581 /* Read ipcp jump functions. */
3582
3583 void
3584 ipa_prop_read_jump_functions (void)
3585 {
3586 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3587 struct lto_file_decl_data *file_data;
3588 unsigned int j = 0;
3589
3590 ipa_check_create_node_params ();
3591 ipa_check_create_edge_args ();
3592 ipa_register_cgraph_hooks ();
3593
3594 while ((file_data = file_data_vec[j++]))
3595 {
3596 size_t len;
3597 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3598
3599 if (data)
3600 ipa_prop_read_section (file_data, data, len);
3601 }
3602 }
3603
3604 /* After merging units, we can get mismatch in argument counts.
3605 Also decl merging might've rendered parameter lists obsolete.
3606 Also compute called_with_variable_arg info. */
3607
3608 void
3609 ipa_update_after_lto_read (void)
3610 {
3611 struct cgraph_node *node;
3612
3613 ipa_check_create_node_params ();
3614 ipa_check_create_edge_args ();
3615
3616 FOR_EACH_DEFINED_FUNCTION (node)
3617 if (node->analyzed)
3618 ipa_initialize_node_params (node);
3619 }
3620
3621 void
3622 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
3623 {
3624 int node_ref;
3625 unsigned int count = 0;
3626 lto_symtab_encoder_t encoder;
3627 struct ipa_agg_replacement_value *aggvals, *av;
3628
3629 aggvals = ipa_get_agg_replacements_for_node (node);
3630 encoder = ob->decl_state->symtab_node_encoder;
3631 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3632 streamer_write_uhwi (ob, node_ref);
3633
3634 for (av = aggvals; av; av = av->next)
3635 count++;
3636 streamer_write_uhwi (ob, count);
3637
3638 for (av = aggvals; av; av = av->next)
3639 {
3640 streamer_write_uhwi (ob, av->offset);
3641 streamer_write_uhwi (ob, av->index);
3642 stream_write_tree (ob, av->value, true);
3643 }
3644 }
3645
3646 /* Stream in the aggregate value replacement chain for NODE from IB. */
3647
3648 static void
3649 read_agg_replacement_chain (struct lto_input_block *ib,
3650 struct cgraph_node *node,
3651 struct data_in *data_in)
3652 {
3653 struct ipa_agg_replacement_value *aggvals = NULL;
3654 unsigned int count, i;
3655
3656 count = streamer_read_uhwi (ib);
3657 for (i = 0; i <count; i++)
3658 {
3659 struct ipa_agg_replacement_value *av;
3660
3661 av = ggc_alloc_ipa_agg_replacement_value ();
3662 av->offset = streamer_read_uhwi (ib);
3663 av->index = streamer_read_uhwi (ib);
3664 av->value = stream_read_tree (ib, data_in);
3665 av->next = aggvals;
3666 aggvals = av;
3667 }
3668 ipa_set_node_agg_value_chain (node, aggvals);
3669 }
3670
3671 /* Write all aggregate replacement for nodes in set. */
3672
3673 void
3674 ipa_prop_write_all_agg_replacement (void)
3675 {
3676 struct cgraph_node *node;
3677 struct output_block *ob;
3678 unsigned int count = 0;
3679 lto_symtab_encoder_iterator lsei;
3680 lto_symtab_encoder_t encoder;
3681
3682 if (!ipa_node_agg_replacements)
3683 return;
3684
3685 ob = create_output_block (LTO_section_ipcp_transform);
3686 encoder = ob->decl_state->symtab_node_encoder;
3687 ob->cgraph_node = NULL;
3688 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3689 lsei_next_function_in_partition (&lsei))
3690 {
3691 node = lsei_cgraph_node (lsei);
3692 if (cgraph_function_with_gimple_body_p (node)
3693 && ipa_get_agg_replacements_for_node (node) != NULL)
3694 count++;
3695 }
3696
3697 streamer_write_uhwi (ob, count);
3698
3699 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3700 lsei_next_function_in_partition (&lsei))
3701 {
3702 node = lsei_cgraph_node (lsei);
3703 if (cgraph_function_with_gimple_body_p (node)
3704 && ipa_get_agg_replacements_for_node (node) != NULL)
3705 write_agg_replacement_chain (ob, node);
3706 }
3707 streamer_write_char_stream (ob->main_stream, 0);
3708 produce_asm (ob, NULL);
3709 destroy_output_block (ob);
3710 }
3711
3712 /* Read replacements section in file FILE_DATA of length LEN with data
3713 DATA. */
3714
3715 static void
3716 read_replacements_section (struct lto_file_decl_data *file_data,
3717 const char *data,
3718 size_t len)
3719 {
3720 const struct lto_function_header *header =
3721 (const struct lto_function_header *) data;
3722 const int cfg_offset = sizeof (struct lto_function_header);
3723 const int main_offset = cfg_offset + header->cfg_size;
3724 const int string_offset = main_offset + header->main_size;
3725 struct data_in *data_in;
3726 struct lto_input_block ib_main;
3727 unsigned int i;
3728 unsigned int count;
3729
3730 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3731 header->main_size);
3732
3733 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
3734 header->string_size, NULL);
3735 count = streamer_read_uhwi (&ib_main);
3736
3737 for (i = 0; i < count; i++)
3738 {
3739 unsigned int index;
3740 struct cgraph_node *node;
3741 lto_symtab_encoder_t encoder;
3742
3743 index = streamer_read_uhwi (&ib_main);
3744 encoder = file_data->symtab_node_encoder;
3745 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3746 gcc_assert (node->analyzed);
3747 read_agg_replacement_chain (&ib_main, node, data_in);
3748 }
3749 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3750 len);
3751 lto_data_in_delete (data_in);
3752 }
3753
3754 /* Read IPA-CP aggregate replacements. */
3755
3756 void
3757 ipa_prop_read_all_agg_replacement (void)
3758 {
3759 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3760 struct lto_file_decl_data *file_data;
3761 unsigned int j = 0;
3762
3763 while ((file_data = file_data_vec[j++]))
3764 {
3765 size_t len;
3766 const char *data = lto_get_section_data (file_data,
3767 LTO_section_ipcp_transform,
3768 NULL, &len);
3769 if (data)
3770 read_replacements_section (file_data, data, len);
3771 }
3772 }
3773
3774 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
3775 NODE. */
3776
3777 static void
3778 adjust_agg_replacement_values (struct cgraph_node *node,
3779 struct ipa_agg_replacement_value *aggval)
3780 {
3781 struct ipa_agg_replacement_value *v;
3782 int i, c = 0, d = 0, *adj;
3783
3784 if (!node->clone.combined_args_to_skip)
3785 return;
3786
3787 for (v = aggval; v; v = v->next)
3788 {
3789 gcc_assert (v->index >= 0);
3790 if (c < v->index)
3791 c = v->index;
3792 }
3793 c++;
3794
3795 adj = XALLOCAVEC (int, c);
3796 for (i = 0; i < c; i++)
3797 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
3798 {
3799 adj[i] = -1;
3800 d++;
3801 }
3802 else
3803 adj[i] = i - d;
3804
3805 for (v = aggval; v; v = v->next)
3806 v->index = adj[v->index];
3807 }
3808
3809
3810 /* Function body transformation phase. */
3811
3812 unsigned int
3813 ipcp_transform_function (struct cgraph_node *node)
3814 {
3815 VEC (ipa_param_descriptor_t, heap) *descriptors = NULL;
3816 struct param_analysis_info *parms_ainfo;
3817 struct ipa_agg_replacement_value *aggval;
3818 gimple_stmt_iterator gsi;
3819 basic_block bb;
3820 int param_count;
3821 bool cfg_changed = false, something_changed = false;
3822
3823 gcc_checking_assert (cfun);
3824 gcc_checking_assert (current_function_decl);
3825
3826 if (dump_file)
3827 fprintf (dump_file, "Modification phase of node %s/%i\n",
3828 cgraph_node_name (node), node->uid);
3829
3830 aggval = ipa_get_agg_replacements_for_node (node);
3831 if (!aggval)
3832 return 0;
3833 param_count = count_formal_params (node->symbol.decl);
3834 if (param_count == 0)
3835 return 0;
3836 adjust_agg_replacement_values (node, aggval);
3837 if (dump_file)
3838 ipa_dump_agg_replacement_values (dump_file, aggval);
3839 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
3840 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
3841 VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
3842 descriptors, param_count);
3843 ipa_populate_param_decls (node, descriptors);
3844
3845 FOR_EACH_BB (bb)
3846 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3847 {
3848 struct ipa_agg_replacement_value *v;
3849 gimple stmt = gsi_stmt (gsi);
3850 tree rhs, val, t;
3851 HOST_WIDE_INT offset;
3852 int index;
3853 bool by_ref, vce;
3854
3855 if (!gimple_assign_load_p (stmt))
3856 continue;
3857 rhs = gimple_assign_rhs1 (stmt);
3858 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
3859 continue;
3860
3861 vce = false;
3862 t = rhs;
3863 while (handled_component_p (t))
3864 {
3865 /* V_C_E can do things like convert an array of integers to one
3866 bigger integer and similar things we do not handle below. */
3867 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
3868 {
3869 vce = true;
3870 break;
3871 }
3872 t = TREE_OPERAND (t, 0);
3873 }
3874 if (vce)
3875 continue;
3876
3877 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3878 rhs, &index, &offset, &by_ref))
3879 continue;
3880 for (v = aggval; v; v = v->next)
3881 if (v->index == index
3882 && v->offset == offset)
3883 break;
3884 if (!v)
3885 continue;
3886
3887 gcc_checking_assert (is_gimple_ip_invariant (v->value));
3888 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
3889 {
3890 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
3891 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
3892 else if (TYPE_SIZE (TREE_TYPE (rhs))
3893 == TYPE_SIZE (TREE_TYPE (v->value)))
3894 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
3895 else
3896 {
3897 if (dump_file)
3898 {
3899 fprintf (dump_file, " const ");
3900 print_generic_expr (dump_file, v->value, 0);
3901 fprintf (dump_file, " can't be converted to type of ");
3902 print_generic_expr (dump_file, rhs, 0);
3903 fprintf (dump_file, "\n");
3904 }
3905 continue;
3906 }
3907 }
3908 else
3909 val = v->value;
3910
3911 if (dump_file && (dump_flags & TDF_DETAILS))
3912 {
3913 fprintf (dump_file, "Modifying stmt:\n ");
3914 print_gimple_stmt (dump_file, stmt, 0, 0);
3915 }
3916 gimple_assign_set_rhs_from_tree (&gsi, val);
3917 update_stmt (stmt);
3918
3919 if (dump_file && (dump_flags & TDF_DETAILS))
3920 {
3921 fprintf (dump_file, "into:\n ");
3922 print_gimple_stmt (dump_file, stmt, 0, 0);
3923 fprintf (dump_file, "\n");
3924 }
3925
3926 something_changed = true;
3927 if (maybe_clean_eh_stmt (stmt)
3928 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3929 cfg_changed = true;
3930 }
3931
3932 VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
3933 node->uid, NULL);
3934 free_parms_ainfo (parms_ainfo, param_count);
3935 VEC_free (ipa_param_descriptor_t, heap, descriptors);
3936
3937 if (!something_changed)
3938 return 0;
3939 else if (cfg_changed)
3940 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
3941 else
3942 return TODO_update_ssa_only_virtuals;
3943 }