Give struct param_aa_atatus and struct func_body_info an ipa_ prefix.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "alias.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "options.h"
30 #include "fold-const.h"
31 #include "internal-fn.h"
32 #include "gimple-fold.h"
33 #include "tree-eh.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "cgraph.h"
53 #include "alloc-pool.h"
54 #include "symbol-summary.h"
55 #include "ipa-prop.h"
56 #include "tree-cfg.h"
57 #include "tree-into-ssa.h"
58 #include "tree-dfa.h"
59 #include "tree-pass.h"
60 #include "tree-inline.h"
61 #include "ipa-inline.h"
62 #include "diagnostic.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-streamer.h"
65 #include "params.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "domwalk.h"
69 #include "builtins.h"
70
71 /* Function summary where the parameter infos are actually stored. */
72 ipa_node_params_t *ipa_node_params_sum = NULL;
73 /* Vector of IPA-CP transformation data for each clone. */
74 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
75 /* Vector where the parameter infos are actually stored. */
76 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
77
78 /* Holders of ipa cgraph hooks: */
79 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
80 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81 static struct cgraph_node_hook_list *function_insertion_hook_holder;
82
83 /* Description of a reference to an IPA constant. */
84 struct ipa_cst_ref_desc
85 {
86 /* Edge that corresponds to the statement which took the reference. */
87 struct cgraph_edge *cs;
88 /* Linked list of duplicates created when call graph edges are cloned. */
89 struct ipa_cst_ref_desc *next_duplicate;
90 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
91 if out of control. */
92 int refcount;
93 };
94
95 /* Allocation pool for reference descriptions. */
96
97 static pool_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
98 ("IPA-PROP ref descriptions", 32);
99
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103 static bool
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105 {
106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
107
108 if (!fs_opts)
109 return false;
110 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
111 }
112
113 /* Return index of the formal whose tree is PTREE in function which corresponds
114 to INFO. */
115
116 static int
117 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
118 {
119 int i, count;
120
121 count = descriptors.length ();
122 for (i = 0; i < count; i++)
123 if (descriptors[i].decl == ptree)
124 return i;
125
126 return -1;
127 }
128
129 /* Return index of the formal whose tree is PTREE in function which corresponds
130 to INFO. */
131
132 int
133 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
134 {
135 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
136 }
137
138 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
139 NODE. */
140
141 static void
142 ipa_populate_param_decls (struct cgraph_node *node,
143 vec<ipa_param_descriptor> &descriptors)
144 {
145 tree fndecl;
146 tree fnargs;
147 tree parm;
148 int param_num;
149
150 fndecl = node->decl;
151 gcc_assert (gimple_has_body_p (fndecl));
152 fnargs = DECL_ARGUMENTS (fndecl);
153 param_num = 0;
154 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
155 {
156 descriptors[param_num].decl = parm;
157 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
158 true);
159 param_num++;
160 }
161 }
162
163 /* Return how many formal parameters FNDECL has. */
164
165 int
166 count_formal_params (tree fndecl)
167 {
168 tree parm;
169 int count = 0;
170 gcc_assert (gimple_has_body_p (fndecl));
171
172 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
173 count++;
174
175 return count;
176 }
177
178 /* Return the declaration of Ith formal parameter of the function corresponding
179 to INFO. Note there is no setter function as this array is built just once
180 using ipa_initialize_node_params. */
181
182 void
183 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
184 {
185 fprintf (file, "param #%i", i);
186 if (info->descriptors[i].decl)
187 {
188 fprintf (file, " ");
189 print_generic_expr (file, info->descriptors[i].decl, 0);
190 }
191 }
192
193 /* Initialize the ipa_node_params structure associated with NODE
194 to hold PARAM_COUNT parameters. */
195
196 void
197 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
198 {
199 struct ipa_node_params *info = IPA_NODE_REF (node);
200
201 if (!info->descriptors.exists () && param_count)
202 info->descriptors.safe_grow_cleared (param_count);
203 }
204
205 /* Initialize the ipa_node_params structure associated with NODE by counting
206 the function parameters, creating the descriptors and populating their
207 param_decls. */
208
209 void
210 ipa_initialize_node_params (struct cgraph_node *node)
211 {
212 struct ipa_node_params *info = IPA_NODE_REF (node);
213
214 if (!info->descriptors.exists ())
215 {
216 ipa_alloc_node_params (node, count_formal_params (node->decl));
217 ipa_populate_param_decls (node, info->descriptors);
218 }
219 }
220
221 /* Print the jump functions associated with call graph edge CS to file F. */
222
223 static void
224 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
225 {
226 int i, count;
227
228 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
229 for (i = 0; i < count; i++)
230 {
231 struct ipa_jump_func *jump_func;
232 enum jump_func_type type;
233
234 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
235 type = jump_func->type;
236
237 fprintf (f, " param %d: ", i);
238 if (type == IPA_JF_UNKNOWN)
239 fprintf (f, "UNKNOWN\n");
240 else if (type == IPA_JF_CONST)
241 {
242 tree val = jump_func->value.constant.value;
243 fprintf (f, "CONST: ");
244 print_generic_expr (f, val, 0);
245 if (TREE_CODE (val) == ADDR_EXPR
246 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
247 {
248 fprintf (f, " -> ");
249 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
250 0);
251 }
252 fprintf (f, "\n");
253 }
254 else if (type == IPA_JF_PASS_THROUGH)
255 {
256 fprintf (f, "PASS THROUGH: ");
257 fprintf (f, "%d, op %s",
258 jump_func->value.pass_through.formal_id,
259 get_tree_code_name(jump_func->value.pass_through.operation));
260 if (jump_func->value.pass_through.operation != NOP_EXPR)
261 {
262 fprintf (f, " ");
263 print_generic_expr (f,
264 jump_func->value.pass_through.operand, 0);
265 }
266 if (jump_func->value.pass_through.agg_preserved)
267 fprintf (f, ", agg_preserved");
268 fprintf (f, "\n");
269 }
270 else if (type == IPA_JF_ANCESTOR)
271 {
272 fprintf (f, "ANCESTOR: ");
273 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
274 jump_func->value.ancestor.formal_id,
275 jump_func->value.ancestor.offset);
276 if (jump_func->value.ancestor.agg_preserved)
277 fprintf (f, ", agg_preserved");
278 fprintf (f, "\n");
279 }
280
281 if (jump_func->agg.items)
282 {
283 struct ipa_agg_jf_item *item;
284 int j;
285
286 fprintf (f, " Aggregate passed by %s:\n",
287 jump_func->agg.by_ref ? "reference" : "value");
288 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
289 {
290 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
291 item->offset);
292 if (TYPE_P (item->value))
293 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
294 tree_to_uhwi (TYPE_SIZE (item->value)));
295 else
296 {
297 fprintf (f, "cst: ");
298 print_generic_expr (f, item->value, 0);
299 }
300 fprintf (f, "\n");
301 }
302 }
303
304 struct ipa_polymorphic_call_context *ctx
305 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
306 if (ctx && !ctx->useless_p ())
307 {
308 fprintf (f, " Context: ");
309 ctx->dump (dump_file);
310 }
311
312 if (jump_func->alignment.known)
313 {
314 fprintf (f, " Alignment: %u, misalignment: %u\n",
315 jump_func->alignment.align,
316 jump_func->alignment.misalign);
317 }
318 else
319 fprintf (f, " Unknown alignment\n");
320 }
321 }
322
323
324 /* Print the jump functions of all arguments on all call graph edges going from
325 NODE to file F. */
326
327 void
328 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
329 {
330 struct cgraph_edge *cs;
331
332 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
333 node->order);
334 for (cs = node->callees; cs; cs = cs->next_callee)
335 {
336 if (!ipa_edge_args_info_available_for_edge_p (cs))
337 continue;
338
339 fprintf (f, " callsite %s/%i -> %s/%i : \n",
340 xstrdup_for_dump (node->name ()), node->order,
341 xstrdup_for_dump (cs->callee->name ()),
342 cs->callee->order);
343 ipa_print_node_jump_functions_for_edge (f, cs);
344 }
345
346 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
347 {
348 struct cgraph_indirect_call_info *ii;
349 if (!ipa_edge_args_info_available_for_edge_p (cs))
350 continue;
351
352 ii = cs->indirect_info;
353 if (ii->agg_contents)
354 fprintf (f, " indirect %s callsite, calling param %i, "
355 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
356 ii->member_ptr ? "member ptr" : "aggregate",
357 ii->param_index, ii->offset,
358 ii->by_ref ? "by reference" : "by_value");
359 else
360 fprintf (f, " indirect %s callsite, calling param %i, "
361 "offset " HOST_WIDE_INT_PRINT_DEC,
362 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
363 ii->offset);
364
365 if (cs->call_stmt)
366 {
367 fprintf (f, ", for stmt ");
368 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
369 }
370 else
371 fprintf (f, "\n");
372 if (ii->polymorphic)
373 ii->context.dump (f);
374 ipa_print_node_jump_functions_for_edge (f, cs);
375 }
376 }
377
378 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
379
380 void
381 ipa_print_all_jump_functions (FILE *f)
382 {
383 struct cgraph_node *node;
384
385 fprintf (f, "\nJump functions:\n");
386 FOR_EACH_FUNCTION (node)
387 {
388 ipa_print_node_jump_functions (f, node);
389 }
390 }
391
392 /* Set jfunc to be a know-really nothing jump function. */
393
394 static void
395 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
396 {
397 jfunc->type = IPA_JF_UNKNOWN;
398 jfunc->alignment.known = false;
399 }
400
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404 static void
405 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408 {
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412 }
413
414 /* Set JFUNC to be a constant jmp function. */
415
416 static void
417 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
419 {
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
423 jfunc->type = IPA_JF_CONST;
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430
431 rdesc = ipa_refdesc_pool.allocate ();
432 rdesc->cs = cs;
433 rdesc->next_duplicate = NULL;
434 rdesc->refcount = 1;
435 jfunc->value.constant.rdesc = rdesc;
436 }
437 else
438 jfunc->value.constant.rdesc = NULL;
439 }
440
441 /* Set JFUNC to be a simple pass-through jump function. */
442 static void
443 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
444 bool agg_preserved)
445 {
446 jfunc->type = IPA_JF_PASS_THROUGH;
447 jfunc->value.pass_through.operand = NULL_TREE;
448 jfunc->value.pass_through.formal_id = formal_id;
449 jfunc->value.pass_through.operation = NOP_EXPR;
450 jfunc->value.pass_through.agg_preserved = agg_preserved;
451 }
452
453 /* Set JFUNC to be an arithmetic pass through jump function. */
454
455 static void
456 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
457 tree operand, enum tree_code operation)
458 {
459 jfunc->type = IPA_JF_PASS_THROUGH;
460 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
461 jfunc->value.pass_through.formal_id = formal_id;
462 jfunc->value.pass_through.operation = operation;
463 jfunc->value.pass_through.agg_preserved = false;
464 }
465
466 /* Set JFUNC to be an ancestor jump function. */
467
468 static void
469 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
470 int formal_id, bool agg_preserved)
471 {
472 jfunc->type = IPA_JF_ANCESTOR;
473 jfunc->value.ancestor.formal_id = formal_id;
474 jfunc->value.ancestor.offset = offset;
475 jfunc->value.ancestor.agg_preserved = agg_preserved;
476 }
477
478 /* Get IPA BB information about the given BB. FBI is the context of analyzis
479 of this function body. */
480
481 static struct ipa_bb_info *
482 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
483 {
484 gcc_checking_assert (fbi);
485 return &fbi->bb_infos[bb->index];
486 }
487
488 /* Structure to be passed in between detect_type_change and
489 check_stmt_for_type_change. */
490
491 struct prop_type_change_info
492 {
493 /* Offset into the object where there is the virtual method pointer we are
494 looking for. */
495 HOST_WIDE_INT offset;
496 /* The declaration or SSA_NAME pointer of the base that we are checking for
497 type change. */
498 tree object;
499 /* Set to true if dynamic type change has been detected. */
500 bool type_maybe_changed;
501 };
502
503 /* Return true if STMT can modify a virtual method table pointer.
504
505 This function makes special assumptions about both constructors and
506 destructors which are all the functions that are allowed to alter the VMT
507 pointers. It assumes that destructors begin with assignment into all VMT
508 pointers and that constructors essentially look in the following way:
509
510 1) The very first thing they do is that they call constructors of ancestor
511 sub-objects that have them.
512
513 2) Then VMT pointers of this and all its ancestors is set to new values
514 corresponding to the type corresponding to the constructor.
515
516 3) Only afterwards, other stuff such as constructor of member sub-objects
517 and the code written by the user is run. Only this may include calling
518 virtual functions, directly or indirectly.
519
520 There is no way to call a constructor of an ancestor sub-object in any
521 other way.
522
523 This means that we do not have to care whether constructors get the correct
524 type information because they will always change it (in fact, if we define
525 the type to be given by the VMT pointer, it is undefined).
526
527 The most important fact to derive from the above is that if, for some
528 statement in the section 3, we try to detect whether the dynamic type has
529 changed, we can safely ignore all calls as we examine the function body
530 backwards until we reach statements in section 2 because these calls cannot
531 be ancestor constructors or destructors (if the input is not bogus) and so
532 do not change the dynamic type (this holds true only for automatically
533 allocated objects but at the moment we devirtualize only these). We then
534 must detect that statements in section 2 change the dynamic type and can try
535 to derive the new type. That is enough and we can stop, we will never see
536 the calls into constructors of sub-objects in this code. Therefore we can
537 safely ignore all call statements that we traverse.
538 */
539
540 static bool
541 stmt_may_be_vtbl_ptr_store (gimple stmt)
542 {
543 if (is_gimple_call (stmt))
544 return false;
545 if (gimple_clobber_p (stmt))
546 return false;
547 else if (is_gimple_assign (stmt))
548 {
549 tree lhs = gimple_assign_lhs (stmt);
550
551 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
552 {
553 if (flag_strict_aliasing
554 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
555 return false;
556
557 if (TREE_CODE (lhs) == COMPONENT_REF
558 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
559 return false;
560 /* In the future we might want to use get_base_ref_and_offset to find
561 if there is a field corresponding to the offset and if so, proceed
562 almost like if it was a component ref. */
563 }
564 }
565 return true;
566 }
567
568 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
569 to check whether a particular statement may modify the virtual table
570 pointerIt stores its result into DATA, which points to a
571 prop_type_change_info structure. */
572
573 static bool
574 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
575 {
576 gimple stmt = SSA_NAME_DEF_STMT (vdef);
577 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
578
579 if (stmt_may_be_vtbl_ptr_store (stmt))
580 {
581 tci->type_maybe_changed = true;
582 return true;
583 }
584 else
585 return false;
586 }
587
588 /* See if ARG is PARAM_DECl describing instance passed by pointer
589 or reference in FUNCTION. Return false if the dynamic type may change
590 in between beggining of the function until CALL is invoked.
591
592 Generally functions are not allowed to change type of such instances,
593 but they call destructors. We assume that methods can not destroy the THIS
594 pointer. Also as a special cases, constructor and destructors may change
595 type of the THIS pointer. */
596
597 static bool
598 param_type_may_change_p (tree function, tree arg, gimple call)
599 {
600 /* Pure functions can not do any changes on the dynamic type;
601 that require writting to memory. */
602 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
603 return false;
604 /* We need to check if we are within inlined consturctor
605 or destructor (ideally we would have way to check that the
606 inline cdtor is actually working on ARG, but we don't have
607 easy tie on this, so punt on all non-pure cdtors.
608 We may also record the types of cdtors and once we know type
609 of the instance match them.
610
611 Also code unification optimizations may merge calls from
612 different blocks making return values unreliable. So
613 do nothing during late optimization. */
614 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
615 return true;
616 if (TREE_CODE (arg) == SSA_NAME
617 && SSA_NAME_IS_DEFAULT_DEF (arg)
618 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
619 {
620 /* Normal (non-THIS) argument. */
621 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
622 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
623 /* THIS pointer of an method - here we want to watch constructors
624 and destructors as those definitely may change the dynamic
625 type. */
626 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
627 && !DECL_CXX_CONSTRUCTOR_P (function)
628 && !DECL_CXX_DESTRUCTOR_P (function)
629 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
630 {
631 /* Walk the inline stack and watch out for ctors/dtors. */
632 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
633 block = BLOCK_SUPERCONTEXT (block))
634 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
635 return true;
636 return false;
637 }
638 }
639 return true;
640 }
641
642 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
643 callsite CALL) by looking for assignments to its virtual table pointer. If
644 it is, return true and fill in the jump function JFUNC with relevant type
645 information or set it to unknown. ARG is the object itself (not a pointer
646 to it, unless dereferenced). BASE is the base of the memory access as
647 returned by get_ref_base_and_extent, as is the offset.
648
649 This is helper function for detect_type_change and detect_type_change_ssa
650 that does the heavy work which is usually unnecesary. */
651
652 static bool
653 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
654 gcall *call, struct ipa_jump_func *jfunc,
655 HOST_WIDE_INT offset)
656 {
657 struct prop_type_change_info tci;
658 ao_ref ao;
659 bool entry_reached = false;
660
661 gcc_checking_assert (DECL_P (arg)
662 || TREE_CODE (arg) == MEM_REF
663 || handled_component_p (arg));
664
665 comp_type = TYPE_MAIN_VARIANT (comp_type);
666
667 /* Const calls cannot call virtual methods through VMT and so type changes do
668 not matter. */
669 if (!flag_devirtualize || !gimple_vuse (call)
670 /* Be sure expected_type is polymorphic. */
671 || !comp_type
672 || TREE_CODE (comp_type) != RECORD_TYPE
673 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
674 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
675 return true;
676
677 ao_ref_init (&ao, arg);
678 ao.base = base;
679 ao.offset = offset;
680 ao.size = POINTER_SIZE;
681 ao.max_size = ao.size;
682
683 tci.offset = offset;
684 tci.object = get_base_address (arg);
685 tci.type_maybe_changed = false;
686
687 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
688 &tci, NULL, &entry_reached);
689 if (!tci.type_maybe_changed)
690 return false;
691
692 ipa_set_jf_unknown (jfunc);
693 return true;
694 }
695
696 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
697 If it is, return true and fill in the jump function JFUNC with relevant type
698 information or set it to unknown. ARG is the object itself (not a pointer
699 to it, unless dereferenced). BASE is the base of the memory access as
700 returned by get_ref_base_and_extent, as is the offset. */
701
702 static bool
703 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
704 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
705 {
706 if (!flag_devirtualize)
707 return false;
708
709 if (TREE_CODE (base) == MEM_REF
710 && !param_type_may_change_p (current_function_decl,
711 TREE_OPERAND (base, 0),
712 call))
713 return false;
714 return detect_type_change_from_memory_writes (arg, base, comp_type,
715 call, jfunc, offset);
716 }
717
718 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
719 SSA name (its dereference will become the base and the offset is assumed to
720 be zero). */
721
722 static bool
723 detect_type_change_ssa (tree arg, tree comp_type,
724 gcall *call, struct ipa_jump_func *jfunc)
725 {
726 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
727 if (!flag_devirtualize
728 || !POINTER_TYPE_P (TREE_TYPE (arg)))
729 return false;
730
731 if (!param_type_may_change_p (current_function_decl, arg, call))
732 return false;
733
734 arg = build2 (MEM_REF, ptr_type_node, arg,
735 build_int_cst (ptr_type_node, 0));
736
737 return detect_type_change_from_memory_writes (arg, arg, comp_type,
738 call, jfunc, 0);
739 }
740
741 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
742 boolean variable pointed to by DATA. */
743
744 static bool
745 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
746 void *data)
747 {
748 bool *b = (bool *) data;
749 *b = true;
750 return true;
751 }
752
753 /* Return true if we have already walked so many statements in AA that we
754 should really just start giving up. */
755
756 static bool
757 aa_overwalked (struct ipa_func_body_info *fbi)
758 {
759 gcc_checking_assert (fbi);
760 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
761 }
762
763 /* Find the nearest valid aa status for parameter specified by INDEX that
764 dominates BB. */
765
766 static struct ipa_param_aa_status *
767 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
768 int index)
769 {
770 while (true)
771 {
772 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
773 if (!bb)
774 return NULL;
775 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
776 if (!bi->param_aa_statuses.is_empty ()
777 && bi->param_aa_statuses[index].valid)
778 return &bi->param_aa_statuses[index];
779 }
780 }
781
782 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
783 structures and/or intialize the result with a dominating description as
784 necessary. */
785
786 static struct ipa_param_aa_status *
787 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
788 int index)
789 {
790 gcc_checking_assert (fbi);
791 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
792 if (bi->param_aa_statuses.is_empty ())
793 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
794 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
795 if (!paa->valid)
796 {
797 gcc_checking_assert (!paa->parm_modified
798 && !paa->ref_modified
799 && !paa->pt_modified);
800 struct ipa_param_aa_status *dom_paa;
801 dom_paa = find_dominating_aa_status (fbi, bb, index);
802 if (dom_paa)
803 *paa = *dom_paa;
804 else
805 paa->valid = true;
806 }
807
808 return paa;
809 }
810
811 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
812 a value known not to be modified in this function before reaching the
813 statement STMT. FBI holds information about the function we have so far
814 gathered but do not survive the summary building stage. */
815
816 static bool
817 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
818 gimple stmt, tree parm_load)
819 {
820 struct ipa_param_aa_status *paa;
821 bool modified = false;
822 ao_ref refd;
823
824 /* FIXME: FBI can be NULL if we are being called from outside
825 ipa_node_analysis or ipcp_transform_function, which currently happens
826 during inlining analysis. It would be great to extend fbi's lifetime and
827 always have it. Currently, we are just not afraid of too much walking in
828 that case. */
829 if (fbi)
830 {
831 if (aa_overwalked (fbi))
832 return false;
833 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
834 if (paa->parm_modified)
835 return false;
836 }
837 else
838 paa = NULL;
839
840 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
841 ao_ref_init (&refd, parm_load);
842 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
843 &modified, NULL);
844 if (fbi)
845 fbi->aa_walked += walked;
846 if (paa && modified)
847 paa->parm_modified = true;
848 return !modified;
849 }
850
851 /* If STMT is an assignment that loads a value from an parameter declaration,
852 return the index of the parameter in ipa_node_params which has not been
853 modified. Otherwise return -1. */
854
855 static int
856 load_from_unmodified_param (struct ipa_func_body_info *fbi,
857 vec<ipa_param_descriptor> descriptors,
858 gimple stmt)
859 {
860 int index;
861 tree op1;
862
863 if (!gimple_assign_single_p (stmt))
864 return -1;
865
866 op1 = gimple_assign_rhs1 (stmt);
867 if (TREE_CODE (op1) != PARM_DECL)
868 return -1;
869
870 index = ipa_get_param_decl_index_1 (descriptors, op1);
871 if (index < 0
872 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
873 return -1;
874
875 return index;
876 }
877
878 /* Return true if memory reference REF (which must be a load through parameter
879 with INDEX) loads data that are known to be unmodified in this function
880 before reaching statement STMT. */
881
882 static bool
883 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
884 int index, gimple stmt, tree ref)
885 {
886 struct ipa_param_aa_status *paa;
887 bool modified = false;
888 ao_ref refd;
889
890 /* FIXME: FBI can be NULL if we are being called from outside
891 ipa_node_analysis or ipcp_transform_function, which currently happens
892 during inlining analysis. It would be great to extend fbi's lifetime and
893 always have it. Currently, we are just not afraid of too much walking in
894 that case. */
895 if (fbi)
896 {
897 if (aa_overwalked (fbi))
898 return false;
899 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
900 if (paa->ref_modified)
901 return false;
902 }
903 else
904 paa = NULL;
905
906 gcc_checking_assert (gimple_vuse (stmt));
907 ao_ref_init (&refd, ref);
908 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
909 &modified, NULL);
910 if (fbi)
911 fbi->aa_walked += walked;
912 if (paa && modified)
913 paa->ref_modified = true;
914 return !modified;
915 }
916
917 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
918 is known to be unmodified in this function before reaching call statement
919 CALL into which it is passed. FBI describes the function body. */
920
921 static bool
922 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
923 gimple call, tree parm)
924 {
925 bool modified = false;
926 ao_ref refd;
927
928 /* It's unnecessary to calculate anything about memory contnets for a const
929 function because it is not goin to use it. But do not cache the result
930 either. Also, no such calculations for non-pointers. */
931 if (!gimple_vuse (call)
932 || !POINTER_TYPE_P (TREE_TYPE (parm))
933 || aa_overwalked (fbi))
934 return false;
935
936 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
937 gimple_bb (call),
938 index);
939 if (paa->pt_modified)
940 return false;
941
942 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
943 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
944 &modified, NULL);
945 fbi->aa_walked += walked;
946 if (modified)
947 paa->pt_modified = true;
948 return !modified;
949 }
950
951 /* Return true if we can prove that OP is a memory reference loading unmodified
952 data from an aggregate passed as a parameter and if the aggregate is passed
953 by reference, that the alias type of the load corresponds to the type of the
954 formal parameter (so that we can rely on this type for TBAA in callers).
955 INFO and PARMS_AINFO describe parameters of the current function (but the
956 latter can be NULL), STMT is the load statement. If function returns true,
957 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
958 within the aggregate and whether it is a load from a value passed by
959 reference respectively. */
960
961 bool
962 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
963 vec<ipa_param_descriptor> descriptors,
964 gimple stmt, tree op, int *index_p,
965 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
966 bool *by_ref_p)
967 {
968 int index;
969 HOST_WIDE_INT size, max_size;
970 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
971
972 if (max_size == -1 || max_size != size || *offset_p < 0)
973 return false;
974
975 if (DECL_P (base))
976 {
977 int index = ipa_get_param_decl_index_1 (descriptors, base);
978 if (index >= 0
979 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
980 {
981 *index_p = index;
982 *by_ref_p = false;
983 if (size_p)
984 *size_p = size;
985 return true;
986 }
987 return false;
988 }
989
990 if (TREE_CODE (base) != MEM_REF
991 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
992 || !integer_zerop (TREE_OPERAND (base, 1)))
993 return false;
994
995 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
996 {
997 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
998 index = ipa_get_param_decl_index_1 (descriptors, parm);
999 }
1000 else
1001 {
1002 /* This branch catches situations where a pointer parameter is not a
1003 gimple register, for example:
1004
1005 void hip7(S*) (struct S * p)
1006 {
1007 void (*<T2e4>) (struct S *) D.1867;
1008 struct S * p.1;
1009
1010 <bb 2>:
1011 p.1_1 = p;
1012 D.1867_2 = p.1_1->f;
1013 D.1867_2 ();
1014 gdp = &p;
1015 */
1016
1017 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1018 index = load_from_unmodified_param (fbi, descriptors, def);
1019 }
1020
1021 if (index >= 0
1022 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1023 {
1024 *index_p = index;
1025 *by_ref_p = true;
1026 if (size_p)
1027 *size_p = size;
1028 return true;
1029 }
1030 return false;
1031 }
1032
1033 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1034 of an assignment statement STMT, try to determine whether we are actually
1035 handling any of the following cases and construct an appropriate jump
1036 function into JFUNC if so:
1037
1038 1) The passed value is loaded from a formal parameter which is not a gimple
1039 register (most probably because it is addressable, the value has to be
1040 scalar) and we can guarantee the value has not changed. This case can
1041 therefore be described by a simple pass-through jump function. For example:
1042
1043 foo (int a)
1044 {
1045 int a.0;
1046
1047 a.0_2 = a;
1048 bar (a.0_2);
1049
1050 2) The passed value can be described by a simple arithmetic pass-through
1051 jump function. E.g.
1052
1053 foo (int a)
1054 {
1055 int D.2064;
1056
1057 D.2064_4 = a.1(D) + 4;
1058 bar (D.2064_4);
1059
1060 This case can also occur in combination of the previous one, e.g.:
1061
1062 foo (int a, int z)
1063 {
1064 int a.0;
1065 int D.2064;
1066
1067 a.0_3 = a;
1068 D.2064_4 = a.0_3 + 4;
1069 foo (D.2064_4);
1070
1071 3) The passed value is an address of an object within another one (which
1072 also passed by reference). Such situations are described by an ancestor
1073 jump function and describe situations such as:
1074
1075 B::foo() (struct B * const this)
1076 {
1077 struct A * D.1845;
1078
1079 D.1845_2 = &this_1(D)->D.1748;
1080 A::bar (D.1845_2);
1081
1082 INFO is the structure describing individual parameters access different
1083 stages of IPA optimizations. PARMS_AINFO contains the information that is
1084 only needed for intraprocedural analysis. */
1085
1086 static void
1087 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1088 struct ipa_node_params *info,
1089 struct ipa_jump_func *jfunc,
1090 gcall *call, gimple stmt, tree name,
1091 tree param_type)
1092 {
1093 HOST_WIDE_INT offset, size, max_size;
1094 tree op1, tc_ssa, base, ssa;
1095 int index;
1096
1097 op1 = gimple_assign_rhs1 (stmt);
1098
1099 if (TREE_CODE (op1) == SSA_NAME)
1100 {
1101 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1102 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1103 else
1104 index = load_from_unmodified_param (fbi, info->descriptors,
1105 SSA_NAME_DEF_STMT (op1));
1106 tc_ssa = op1;
1107 }
1108 else
1109 {
1110 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1111 tc_ssa = gimple_assign_lhs (stmt);
1112 }
1113
1114 if (index >= 0)
1115 {
1116 tree op2 = gimple_assign_rhs2 (stmt);
1117
1118 if (op2)
1119 {
1120 if (!is_gimple_ip_invariant (op2)
1121 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1122 && !useless_type_conversion_p (TREE_TYPE (name),
1123 TREE_TYPE (op1))))
1124 return;
1125
1126 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1127 gimple_assign_rhs_code (stmt));
1128 }
1129 else if (gimple_assign_single_p (stmt))
1130 {
1131 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1132 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1133 }
1134 return;
1135 }
1136
1137 if (TREE_CODE (op1) != ADDR_EXPR)
1138 return;
1139 op1 = TREE_OPERAND (op1, 0);
1140 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1141 return;
1142 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1143 if (TREE_CODE (base) != MEM_REF
1144 /* If this is a varying address, punt. */
1145 || max_size == -1
1146 || max_size != size)
1147 return;
1148 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1149 ssa = TREE_OPERAND (base, 0);
1150 if (TREE_CODE (ssa) != SSA_NAME
1151 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1152 || offset < 0)
1153 return;
1154
1155 /* Dynamic types are changed in constructors and destructors. */
1156 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1157 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1158 ipa_set_ancestor_jf (jfunc, offset, index,
1159 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1160 }
1161
1162 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1163 it looks like:
1164
1165 iftmp.1_3 = &obj_2(D)->D.1762;
1166
1167 The base of the MEM_REF must be a default definition SSA NAME of a
1168 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1169 whole MEM_REF expression is returned and the offset calculated from any
1170 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1171 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1172
1173 static tree
1174 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1175 {
1176 HOST_WIDE_INT size, max_size;
1177 tree expr, parm, obj;
1178
1179 if (!gimple_assign_single_p (assign))
1180 return NULL_TREE;
1181 expr = gimple_assign_rhs1 (assign);
1182
1183 if (TREE_CODE (expr) != ADDR_EXPR)
1184 return NULL_TREE;
1185 expr = TREE_OPERAND (expr, 0);
1186 obj = expr;
1187 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1188
1189 if (TREE_CODE (expr) != MEM_REF
1190 /* If this is a varying address, punt. */
1191 || max_size == -1
1192 || max_size != size
1193 || *offset < 0)
1194 return NULL_TREE;
1195 parm = TREE_OPERAND (expr, 0);
1196 if (TREE_CODE (parm) != SSA_NAME
1197 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1198 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1199 return NULL_TREE;
1200
1201 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1202 *obj_p = obj;
1203 return expr;
1204 }
1205
1206
1207 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1208 statement PHI, try to find out whether NAME is in fact a
1209 multiple-inheritance typecast from a descendant into an ancestor of a formal
1210 parameter and thus can be described by an ancestor jump function and if so,
1211 write the appropriate function into JFUNC.
1212
1213 Essentially we want to match the following pattern:
1214
1215 if (obj_2(D) != 0B)
1216 goto <bb 3>;
1217 else
1218 goto <bb 4>;
1219
1220 <bb 3>:
1221 iftmp.1_3 = &obj_2(D)->D.1762;
1222
1223 <bb 4>:
1224 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1225 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1226 return D.1879_6; */
1227
1228 static void
1229 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1230 struct ipa_node_params *info,
1231 struct ipa_jump_func *jfunc,
1232 gcall *call, gphi *phi)
1233 {
1234 HOST_WIDE_INT offset;
1235 gimple assign, cond;
1236 basic_block phi_bb, assign_bb, cond_bb;
1237 tree tmp, parm, expr, obj;
1238 int index, i;
1239
1240 if (gimple_phi_num_args (phi) != 2)
1241 return;
1242
1243 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1244 tmp = PHI_ARG_DEF (phi, 0);
1245 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1246 tmp = PHI_ARG_DEF (phi, 1);
1247 else
1248 return;
1249 if (TREE_CODE (tmp) != SSA_NAME
1250 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1251 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1252 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1253 return;
1254
1255 assign = SSA_NAME_DEF_STMT (tmp);
1256 assign_bb = gimple_bb (assign);
1257 if (!single_pred_p (assign_bb))
1258 return;
1259 expr = get_ancestor_addr_info (assign, &obj, &offset);
1260 if (!expr)
1261 return;
1262 parm = TREE_OPERAND (expr, 0);
1263 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1264 if (index < 0)
1265 return;
1266
1267 cond_bb = single_pred (assign_bb);
1268 cond = last_stmt (cond_bb);
1269 if (!cond
1270 || gimple_code (cond) != GIMPLE_COND
1271 || gimple_cond_code (cond) != NE_EXPR
1272 || gimple_cond_lhs (cond) != parm
1273 || !integer_zerop (gimple_cond_rhs (cond)))
1274 return;
1275
1276 phi_bb = gimple_bb (phi);
1277 for (i = 0; i < 2; i++)
1278 {
1279 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1280 if (pred != assign_bb && pred != cond_bb)
1281 return;
1282 }
1283
1284 ipa_set_ancestor_jf (jfunc, offset, index,
1285 parm_ref_data_pass_through_p (fbi, index, call, parm));
1286 }
1287
1288 /* Inspect the given TYPE and return true iff it has the same structure (the
1289 same number of fields of the same types) as a C++ member pointer. If
1290 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1291 corresponding fields there. */
1292
1293 static bool
1294 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1295 {
1296 tree fld;
1297
1298 if (TREE_CODE (type) != RECORD_TYPE)
1299 return false;
1300
1301 fld = TYPE_FIELDS (type);
1302 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1303 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1304 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1305 return false;
1306
1307 if (method_ptr)
1308 *method_ptr = fld;
1309
1310 fld = DECL_CHAIN (fld);
1311 if (!fld || INTEGRAL_TYPE_P (fld)
1312 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1313 return false;
1314 if (delta)
1315 *delta = fld;
1316
1317 if (DECL_CHAIN (fld))
1318 return false;
1319
1320 return true;
1321 }
1322
1323 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1324 return the rhs of its defining statement. Otherwise return RHS as it
1325 is. */
1326
1327 static inline tree
1328 get_ssa_def_if_simple_copy (tree rhs)
1329 {
1330 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1331 {
1332 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1333
1334 if (gimple_assign_single_p (def_stmt))
1335 rhs = gimple_assign_rhs1 (def_stmt);
1336 else
1337 break;
1338 }
1339 return rhs;
1340 }
1341
1342 /* Simple linked list, describing known contents of an aggregate beforere
1343 call. */
1344
1345 struct ipa_known_agg_contents_list
1346 {
1347 /* Offset and size of the described part of the aggregate. */
1348 HOST_WIDE_INT offset, size;
1349 /* Known constant value or NULL if the contents is known to be unknown. */
1350 tree constant;
1351 /* Pointer to the next structure in the list. */
1352 struct ipa_known_agg_contents_list *next;
1353 };
1354
1355 /* Find the proper place in linked list of ipa_known_agg_contents_list
1356 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1357 unless there is a partial overlap, in which case return NULL, or such
1358 element is already there, in which case set *ALREADY_THERE to true. */
1359
1360 static struct ipa_known_agg_contents_list **
1361 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1362 HOST_WIDE_INT lhs_offset,
1363 HOST_WIDE_INT lhs_size,
1364 bool *already_there)
1365 {
1366 struct ipa_known_agg_contents_list **p = list;
1367 while (*p && (*p)->offset < lhs_offset)
1368 {
1369 if ((*p)->offset + (*p)->size > lhs_offset)
1370 return NULL;
1371 p = &(*p)->next;
1372 }
1373
1374 if (*p && (*p)->offset < lhs_offset + lhs_size)
1375 {
1376 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1377 /* We already know this value is subsequently overwritten with
1378 something else. */
1379 *already_there = true;
1380 else
1381 /* Otherwise this is a partial overlap which we cannot
1382 represent. */
1383 return NULL;
1384 }
1385 return p;
1386 }
1387
1388 /* Build aggregate jump function from LIST, assuming there are exactly
1389 CONST_COUNT constant entries there and that th offset of the passed argument
1390 is ARG_OFFSET and store it into JFUNC. */
1391
1392 static void
1393 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1394 int const_count, HOST_WIDE_INT arg_offset,
1395 struct ipa_jump_func *jfunc)
1396 {
1397 vec_alloc (jfunc->agg.items, const_count);
1398 while (list)
1399 {
1400 if (list->constant)
1401 {
1402 struct ipa_agg_jf_item item;
1403 item.offset = list->offset - arg_offset;
1404 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1405 item.value = unshare_expr_without_location (list->constant);
1406 jfunc->agg.items->quick_push (item);
1407 }
1408 list = list->next;
1409 }
1410 }
1411
1412 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1413 in ARG is filled in with constant values. ARG can either be an aggregate
1414 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1415 aggregate. JFUNC is the jump function into which the constants are
1416 subsequently stored. */
1417
1418 static void
1419 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1420 tree arg_type,
1421 struct ipa_jump_func *jfunc)
1422 {
1423 struct ipa_known_agg_contents_list *list = NULL;
1424 int item_count = 0, const_count = 0;
1425 HOST_WIDE_INT arg_offset, arg_size;
1426 gimple_stmt_iterator gsi;
1427 tree arg_base;
1428 bool check_ref, by_ref;
1429 ao_ref r;
1430
1431 /* The function operates in three stages. First, we prepare check_ref, r,
1432 arg_base and arg_offset based on what is actually passed as an actual
1433 argument. */
1434
1435 if (POINTER_TYPE_P (arg_type))
1436 {
1437 by_ref = true;
1438 if (TREE_CODE (arg) == SSA_NAME)
1439 {
1440 tree type_size;
1441 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1442 return;
1443 check_ref = true;
1444 arg_base = arg;
1445 arg_offset = 0;
1446 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1447 arg_size = tree_to_uhwi (type_size);
1448 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1449 }
1450 else if (TREE_CODE (arg) == ADDR_EXPR)
1451 {
1452 HOST_WIDE_INT arg_max_size;
1453
1454 arg = TREE_OPERAND (arg, 0);
1455 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1456 &arg_max_size);
1457 if (arg_max_size == -1
1458 || arg_max_size != arg_size
1459 || arg_offset < 0)
1460 return;
1461 if (DECL_P (arg_base))
1462 {
1463 check_ref = false;
1464 ao_ref_init (&r, arg_base);
1465 }
1466 else
1467 return;
1468 }
1469 else
1470 return;
1471 }
1472 else
1473 {
1474 HOST_WIDE_INT arg_max_size;
1475
1476 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1477
1478 by_ref = false;
1479 check_ref = false;
1480 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1481 &arg_max_size);
1482 if (arg_max_size == -1
1483 || arg_max_size != arg_size
1484 || arg_offset < 0)
1485 return;
1486
1487 ao_ref_init (&r, arg);
1488 }
1489
1490 /* Second stage walks back the BB, looks at individual statements and as long
1491 as it is confident of how the statements affect contents of the
1492 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1493 describing it. */
1494 gsi = gsi_for_stmt (call);
1495 gsi_prev (&gsi);
1496 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1497 {
1498 struct ipa_known_agg_contents_list *n, **p;
1499 gimple stmt = gsi_stmt (gsi);
1500 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1501 tree lhs, rhs, lhs_base;
1502
1503 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1504 continue;
1505 if (!gimple_assign_single_p (stmt))
1506 break;
1507
1508 lhs = gimple_assign_lhs (stmt);
1509 rhs = gimple_assign_rhs1 (stmt);
1510 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1511 || TREE_CODE (lhs) == BIT_FIELD_REF
1512 || contains_bitfld_component_ref_p (lhs))
1513 break;
1514
1515 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1516 &lhs_max_size);
1517 if (lhs_max_size == -1
1518 || lhs_max_size != lhs_size)
1519 break;
1520
1521 if (check_ref)
1522 {
1523 if (TREE_CODE (lhs_base) != MEM_REF
1524 || TREE_OPERAND (lhs_base, 0) != arg_base
1525 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1526 break;
1527 }
1528 else if (lhs_base != arg_base)
1529 {
1530 if (DECL_P (lhs_base))
1531 continue;
1532 else
1533 break;
1534 }
1535
1536 bool already_there = false;
1537 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1538 &already_there);
1539 if (!p)
1540 break;
1541 if (already_there)
1542 continue;
1543
1544 rhs = get_ssa_def_if_simple_copy (rhs);
1545 n = XALLOCA (struct ipa_known_agg_contents_list);
1546 n->size = lhs_size;
1547 n->offset = lhs_offset;
1548 if (is_gimple_ip_invariant (rhs))
1549 {
1550 n->constant = rhs;
1551 const_count++;
1552 }
1553 else
1554 n->constant = NULL_TREE;
1555 n->next = *p;
1556 *p = n;
1557
1558 item_count++;
1559 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1560 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1561 break;
1562 }
1563
1564 /* Third stage just goes over the list and creates an appropriate vector of
1565 ipa_agg_jf_item structures out of it, of sourse only if there are
1566 any known constants to begin with. */
1567
1568 if (const_count)
1569 {
1570 jfunc->agg.by_ref = by_ref;
1571 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1572 }
1573 }
1574
1575 static tree
1576 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1577 {
1578 int n;
1579 tree type = (e->callee
1580 ? TREE_TYPE (e->callee->decl)
1581 : gimple_call_fntype (e->call_stmt));
1582 tree t = TYPE_ARG_TYPES (type);
1583
1584 for (n = 0; n < i; n++)
1585 {
1586 if (!t)
1587 break;
1588 t = TREE_CHAIN (t);
1589 }
1590 if (t)
1591 return TREE_VALUE (t);
1592 if (!e->callee)
1593 return NULL;
1594 t = DECL_ARGUMENTS (e->callee->decl);
1595 for (n = 0; n < i; n++)
1596 {
1597 if (!t)
1598 return NULL;
1599 t = TREE_CHAIN (t);
1600 }
1601 if (t)
1602 return TREE_TYPE (t);
1603 return NULL;
1604 }
1605
1606 /* Compute jump function for all arguments of callsite CS and insert the
1607 information in the jump_functions array in the ipa_edge_args corresponding
1608 to this callsite. */
1609
1610 static void
1611 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1612 struct cgraph_edge *cs)
1613 {
1614 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1615 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1616 gcall *call = cs->call_stmt;
1617 int n, arg_num = gimple_call_num_args (call);
1618 bool useful_context = false;
1619
1620 if (arg_num == 0 || args->jump_functions)
1621 return;
1622 vec_safe_grow_cleared (args->jump_functions, arg_num);
1623 if (flag_devirtualize)
1624 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1625
1626 if (gimple_call_internal_p (call))
1627 return;
1628 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1629 return;
1630
1631 for (n = 0; n < arg_num; n++)
1632 {
1633 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1634 tree arg = gimple_call_arg (call, n);
1635 tree param_type = ipa_get_callee_param_type (cs, n);
1636 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1637 {
1638 tree instance;
1639 struct ipa_polymorphic_call_context context (cs->caller->decl,
1640 arg, cs->call_stmt,
1641 &instance);
1642 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1643 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1644 if (!context.useless_p ())
1645 useful_context = true;
1646 }
1647
1648 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1649 {
1650 unsigned HOST_WIDE_INT hwi_bitpos;
1651 unsigned align;
1652
1653 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1654 && align % BITS_PER_UNIT == 0
1655 && hwi_bitpos % BITS_PER_UNIT == 0)
1656 {
1657 jfunc->alignment.known = true;
1658 jfunc->alignment.align = align / BITS_PER_UNIT;
1659 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1660 }
1661 else
1662 gcc_assert (!jfunc->alignment.known);
1663 }
1664 else
1665 gcc_assert (!jfunc->alignment.known);
1666
1667 if (is_gimple_ip_invariant (arg))
1668 ipa_set_jf_constant (jfunc, arg, cs);
1669 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1670 && TREE_CODE (arg) == PARM_DECL)
1671 {
1672 int index = ipa_get_param_decl_index (info, arg);
1673
1674 gcc_assert (index >=0);
1675 /* Aggregate passed by value, check for pass-through, otherwise we
1676 will attempt to fill in aggregate contents later in this
1677 for cycle. */
1678 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1679 {
1680 ipa_set_jf_simple_pass_through (jfunc, index, false);
1681 continue;
1682 }
1683 }
1684 else if (TREE_CODE (arg) == SSA_NAME)
1685 {
1686 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1687 {
1688 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1689 if (index >= 0)
1690 {
1691 bool agg_p;
1692 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1693 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1694 }
1695 }
1696 else
1697 {
1698 gimple stmt = SSA_NAME_DEF_STMT (arg);
1699 if (is_gimple_assign (stmt))
1700 compute_complex_assign_jump_func (fbi, info, jfunc,
1701 call, stmt, arg, param_type);
1702 else if (gimple_code (stmt) == GIMPLE_PHI)
1703 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1704 call,
1705 as_a <gphi *> (stmt));
1706 }
1707 }
1708
1709 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1710 passed (because type conversions are ignored in gimple). Usually we can
1711 safely get type from function declaration, but in case of K&R prototypes or
1712 variadic functions we can try our luck with type of the pointer passed.
1713 TODO: Since we look for actual initialization of the memory object, we may better
1714 work out the type based on the memory stores we find. */
1715 if (!param_type)
1716 param_type = TREE_TYPE (arg);
1717
1718 if ((jfunc->type != IPA_JF_PASS_THROUGH
1719 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1720 && (jfunc->type != IPA_JF_ANCESTOR
1721 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1722 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1723 || POINTER_TYPE_P (param_type)))
1724 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1725 }
1726 if (!useful_context)
1727 vec_free (args->polymorphic_call_contexts);
1728 }
1729
1730 /* Compute jump functions for all edges - both direct and indirect - outgoing
1731 from BB. */
1732
1733 static void
1734 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1735 {
1736 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1737 int i;
1738 struct cgraph_edge *cs;
1739
1740 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1741 {
1742 struct cgraph_node *callee = cs->callee;
1743
1744 if (callee)
1745 {
1746 callee->ultimate_alias_target ();
1747 /* We do not need to bother analyzing calls to unknown functions
1748 unless they may become known during lto/whopr. */
1749 if (!callee->definition && !flag_lto)
1750 continue;
1751 }
1752 ipa_compute_jump_functions_for_edge (fbi, cs);
1753 }
1754 }
1755
1756 /* If STMT looks like a statement loading a value from a member pointer formal
1757 parameter, return that parameter and store the offset of the field to
1758 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1759 might be clobbered). If USE_DELTA, then we look for a use of the delta
1760 field rather than the pfn. */
1761
1762 static tree
1763 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1764 HOST_WIDE_INT *offset_p)
1765 {
1766 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1767
1768 if (!gimple_assign_single_p (stmt))
1769 return NULL_TREE;
1770
1771 rhs = gimple_assign_rhs1 (stmt);
1772 if (TREE_CODE (rhs) == COMPONENT_REF)
1773 {
1774 ref_field = TREE_OPERAND (rhs, 1);
1775 rhs = TREE_OPERAND (rhs, 0);
1776 }
1777 else
1778 ref_field = NULL_TREE;
1779 if (TREE_CODE (rhs) != MEM_REF)
1780 return NULL_TREE;
1781 rec = TREE_OPERAND (rhs, 0);
1782 if (TREE_CODE (rec) != ADDR_EXPR)
1783 return NULL_TREE;
1784 rec = TREE_OPERAND (rec, 0);
1785 if (TREE_CODE (rec) != PARM_DECL
1786 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1787 return NULL_TREE;
1788 ref_offset = TREE_OPERAND (rhs, 1);
1789
1790 if (use_delta)
1791 fld = delta_field;
1792 else
1793 fld = ptr_field;
1794 if (offset_p)
1795 *offset_p = int_bit_position (fld);
1796
1797 if (ref_field)
1798 {
1799 if (integer_nonzerop (ref_offset))
1800 return NULL_TREE;
1801 return ref_field == fld ? rec : NULL_TREE;
1802 }
1803 else
1804 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1805 : NULL_TREE;
1806 }
1807
1808 /* Returns true iff T is an SSA_NAME defined by a statement. */
1809
1810 static bool
1811 ipa_is_ssa_with_stmt_def (tree t)
1812 {
1813 if (TREE_CODE (t) == SSA_NAME
1814 && !SSA_NAME_IS_DEFAULT_DEF (t))
1815 return true;
1816 else
1817 return false;
1818 }
1819
1820 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1821 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1822 indirect call graph edge. */
1823
1824 static struct cgraph_edge *
1825 ipa_note_param_call (struct cgraph_node *node, int param_index,
1826 gcall *stmt)
1827 {
1828 struct cgraph_edge *cs;
1829
1830 cs = node->get_edge (stmt);
1831 cs->indirect_info->param_index = param_index;
1832 cs->indirect_info->agg_contents = 0;
1833 cs->indirect_info->member_ptr = 0;
1834 return cs;
1835 }
1836
1837 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1838 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1839 intermediate information about each formal parameter. Currently it checks
1840 whether the call calls a pointer that is a formal parameter and if so, the
1841 parameter is marked with the called flag and an indirect call graph edge
1842 describing the call is created. This is very simple for ordinary pointers
1843 represented in SSA but not-so-nice when it comes to member pointers. The
1844 ugly part of this function does nothing more than trying to match the
1845 pattern of such a call. An example of such a pattern is the gimple dump
1846 below, the call is on the last line:
1847
1848 <bb 2>:
1849 f$__delta_5 = f.__delta;
1850 f$__pfn_24 = f.__pfn;
1851
1852 or
1853 <bb 2>:
1854 f$__delta_5 = MEM[(struct *)&f];
1855 f$__pfn_24 = MEM[(struct *)&f + 4B];
1856
1857 and a few lines below:
1858
1859 <bb 5>
1860 D.2496_3 = (int) f$__pfn_24;
1861 D.2497_4 = D.2496_3 & 1;
1862 if (D.2497_4 != 0)
1863 goto <bb 3>;
1864 else
1865 goto <bb 4>;
1866
1867 <bb 6>:
1868 D.2500_7 = (unsigned int) f$__delta_5;
1869 D.2501_8 = &S + D.2500_7;
1870 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1871 D.2503_10 = *D.2502_9;
1872 D.2504_12 = f$__pfn_24 + -1;
1873 D.2505_13 = (unsigned int) D.2504_12;
1874 D.2506_14 = D.2503_10 + D.2505_13;
1875 D.2507_15 = *D.2506_14;
1876 iftmp.11_16 = (String:: *) D.2507_15;
1877
1878 <bb 7>:
1879 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1880 D.2500_19 = (unsigned int) f$__delta_5;
1881 D.2508_20 = &S + D.2500_19;
1882 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1883
1884 Such patterns are results of simple calls to a member pointer:
1885
1886 int doprinting (int (MyString::* f)(int) const)
1887 {
1888 MyString S ("somestring");
1889
1890 return (S.*f)(4);
1891 }
1892
1893 Moreover, the function also looks for called pointers loaded from aggregates
1894 passed by value or reference. */
1895
1896 static void
1897 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1898 tree target)
1899 {
1900 struct ipa_node_params *info = fbi->info;
1901 HOST_WIDE_INT offset;
1902 bool by_ref;
1903
1904 if (SSA_NAME_IS_DEFAULT_DEF (target))
1905 {
1906 tree var = SSA_NAME_VAR (target);
1907 int index = ipa_get_param_decl_index (info, var);
1908 if (index >= 0)
1909 ipa_note_param_call (fbi->node, index, call);
1910 return;
1911 }
1912
1913 int index;
1914 gimple def = SSA_NAME_DEF_STMT (target);
1915 if (gimple_assign_single_p (def)
1916 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1917 gimple_assign_rhs1 (def), &index, &offset,
1918 NULL, &by_ref))
1919 {
1920 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1921 cs->indirect_info->offset = offset;
1922 cs->indirect_info->agg_contents = 1;
1923 cs->indirect_info->by_ref = by_ref;
1924 return;
1925 }
1926
1927 /* Now we need to try to match the complex pattern of calling a member
1928 pointer. */
1929 if (gimple_code (def) != GIMPLE_PHI
1930 || gimple_phi_num_args (def) != 2
1931 || !POINTER_TYPE_P (TREE_TYPE (target))
1932 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1933 return;
1934
1935 /* First, we need to check whether one of these is a load from a member
1936 pointer that is a parameter to this function. */
1937 tree n1 = PHI_ARG_DEF (def, 0);
1938 tree n2 = PHI_ARG_DEF (def, 1);
1939 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1940 return;
1941 gimple d1 = SSA_NAME_DEF_STMT (n1);
1942 gimple d2 = SSA_NAME_DEF_STMT (n2);
1943
1944 tree rec;
1945 basic_block bb, virt_bb;
1946 basic_block join = gimple_bb (def);
1947 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1948 {
1949 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1950 return;
1951
1952 bb = EDGE_PRED (join, 0)->src;
1953 virt_bb = gimple_bb (d2);
1954 }
1955 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1956 {
1957 bb = EDGE_PRED (join, 1)->src;
1958 virt_bb = gimple_bb (d1);
1959 }
1960 else
1961 return;
1962
1963 /* Second, we need to check that the basic blocks are laid out in the way
1964 corresponding to the pattern. */
1965
1966 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1967 || single_pred (virt_bb) != bb
1968 || single_succ (virt_bb) != join)
1969 return;
1970
1971 /* Third, let's see that the branching is done depending on the least
1972 significant bit of the pfn. */
1973
1974 gimple branch = last_stmt (bb);
1975 if (!branch || gimple_code (branch) != GIMPLE_COND)
1976 return;
1977
1978 if ((gimple_cond_code (branch) != NE_EXPR
1979 && gimple_cond_code (branch) != EQ_EXPR)
1980 || !integer_zerop (gimple_cond_rhs (branch)))
1981 return;
1982
1983 tree cond = gimple_cond_lhs (branch);
1984 if (!ipa_is_ssa_with_stmt_def (cond))
1985 return;
1986
1987 def = SSA_NAME_DEF_STMT (cond);
1988 if (!is_gimple_assign (def)
1989 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1990 || !integer_onep (gimple_assign_rhs2 (def)))
1991 return;
1992
1993 cond = gimple_assign_rhs1 (def);
1994 if (!ipa_is_ssa_with_stmt_def (cond))
1995 return;
1996
1997 def = SSA_NAME_DEF_STMT (cond);
1998
1999 if (is_gimple_assign (def)
2000 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2001 {
2002 cond = gimple_assign_rhs1 (def);
2003 if (!ipa_is_ssa_with_stmt_def (cond))
2004 return;
2005 def = SSA_NAME_DEF_STMT (cond);
2006 }
2007
2008 tree rec2;
2009 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2010 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2011 == ptrmemfunc_vbit_in_delta),
2012 NULL);
2013 if (rec != rec2)
2014 return;
2015
2016 index = ipa_get_param_decl_index (info, rec);
2017 if (index >= 0
2018 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2019 {
2020 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2021 cs->indirect_info->offset = offset;
2022 cs->indirect_info->agg_contents = 1;
2023 cs->indirect_info->member_ptr = 1;
2024 }
2025
2026 return;
2027 }
2028
2029 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2030 object referenced in the expression is a formal parameter of the caller
2031 FBI->node (described by FBI->info), create a call note for the
2032 statement. */
2033
2034 static void
2035 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2036 gcall *call, tree target)
2037 {
2038 tree obj = OBJ_TYPE_REF_OBJECT (target);
2039 int index;
2040 HOST_WIDE_INT anc_offset;
2041
2042 if (!flag_devirtualize)
2043 return;
2044
2045 if (TREE_CODE (obj) != SSA_NAME)
2046 return;
2047
2048 struct ipa_node_params *info = fbi->info;
2049 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2050 {
2051 struct ipa_jump_func jfunc;
2052 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2053 return;
2054
2055 anc_offset = 0;
2056 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2057 gcc_assert (index >= 0);
2058 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2059 call, &jfunc))
2060 return;
2061 }
2062 else
2063 {
2064 struct ipa_jump_func jfunc;
2065 gimple stmt = SSA_NAME_DEF_STMT (obj);
2066 tree expr;
2067
2068 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2069 if (!expr)
2070 return;
2071 index = ipa_get_param_decl_index (info,
2072 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2073 gcc_assert (index >= 0);
2074 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2075 call, &jfunc, anc_offset))
2076 return;
2077 }
2078
2079 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2080 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2081 ii->offset = anc_offset;
2082 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2083 ii->otr_type = obj_type_ref_class (target);
2084 ii->polymorphic = 1;
2085 }
2086
2087 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2088 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2089 containing intermediate information about each formal parameter. */
2090
2091 static void
2092 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2093 {
2094 tree target = gimple_call_fn (call);
2095
2096 if (!target
2097 || (TREE_CODE (target) != SSA_NAME
2098 && !virtual_method_call_p (target)))
2099 return;
2100
2101 struct cgraph_edge *cs = fbi->node->get_edge (call);
2102 /* If we previously turned the call into a direct call, there is
2103 no need to analyze. */
2104 if (cs && !cs->indirect_unknown_callee)
2105 return;
2106
2107 if (cs->indirect_info->polymorphic && flag_devirtualize)
2108 {
2109 tree instance;
2110 tree target = gimple_call_fn (call);
2111 ipa_polymorphic_call_context context (current_function_decl,
2112 target, call, &instance);
2113
2114 gcc_checking_assert (cs->indirect_info->otr_type
2115 == obj_type_ref_class (target));
2116 gcc_checking_assert (cs->indirect_info->otr_token
2117 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2118
2119 cs->indirect_info->vptr_changed
2120 = !context.get_dynamic_type (instance,
2121 OBJ_TYPE_REF_OBJECT (target),
2122 obj_type_ref_class (target), call);
2123 cs->indirect_info->context = context;
2124 }
2125
2126 if (TREE_CODE (target) == SSA_NAME)
2127 ipa_analyze_indirect_call_uses (fbi, call, target);
2128 else if (virtual_method_call_p (target))
2129 ipa_analyze_virtual_call_uses (fbi, call, target);
2130 }
2131
2132
2133 /* Analyze the call statement STMT with respect to formal parameters (described
2134 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2135 formal parameters are called. */
2136
2137 static void
2138 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple stmt)
2139 {
2140 if (is_gimple_call (stmt))
2141 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2142 }
2143
2144 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2145 If OP is a parameter declaration, mark it as used in the info structure
2146 passed in DATA. */
2147
2148 static bool
2149 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2150 {
2151 struct ipa_node_params *info = (struct ipa_node_params *) data;
2152
2153 op = get_base_address (op);
2154 if (op
2155 && TREE_CODE (op) == PARM_DECL)
2156 {
2157 int index = ipa_get_param_decl_index (info, op);
2158 gcc_assert (index >= 0);
2159 ipa_set_param_used (info, index, true);
2160 }
2161
2162 return false;
2163 }
2164
2165 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2166 the findings in various structures of the associated ipa_node_params
2167 structure, such as parameter flags, notes etc. FBI holds various data about
2168 the function being analyzed. */
2169
2170 static void
2171 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2172 {
2173 gimple_stmt_iterator gsi;
2174 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2175 {
2176 gimple stmt = gsi_stmt (gsi);
2177
2178 if (is_gimple_debug (stmt))
2179 continue;
2180
2181 ipa_analyze_stmt_uses (fbi, stmt);
2182 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2183 visit_ref_for_mod_analysis,
2184 visit_ref_for_mod_analysis,
2185 visit_ref_for_mod_analysis);
2186 }
2187 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2188 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2189 visit_ref_for_mod_analysis,
2190 visit_ref_for_mod_analysis,
2191 visit_ref_for_mod_analysis);
2192 }
2193
2194 /* Calculate controlled uses of parameters of NODE. */
2195
2196 static void
2197 ipa_analyze_controlled_uses (struct cgraph_node *node)
2198 {
2199 struct ipa_node_params *info = IPA_NODE_REF (node);
2200
2201 for (int i = 0; i < ipa_get_param_count (info); i++)
2202 {
2203 tree parm = ipa_get_param (info, i);
2204 int controlled_uses = 0;
2205
2206 /* For SSA regs see if parameter is used. For non-SSA we compute
2207 the flag during modification analysis. */
2208 if (is_gimple_reg (parm))
2209 {
2210 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2211 parm);
2212 if (ddef && !has_zero_uses (ddef))
2213 {
2214 imm_use_iterator imm_iter;
2215 use_operand_p use_p;
2216
2217 ipa_set_param_used (info, i, true);
2218 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2219 if (!is_gimple_call (USE_STMT (use_p)))
2220 {
2221 if (!is_gimple_debug (USE_STMT (use_p)))
2222 {
2223 controlled_uses = IPA_UNDESCRIBED_USE;
2224 break;
2225 }
2226 }
2227 else
2228 controlled_uses++;
2229 }
2230 else
2231 controlled_uses = 0;
2232 }
2233 else
2234 controlled_uses = IPA_UNDESCRIBED_USE;
2235 ipa_set_controlled_uses (info, i, controlled_uses);
2236 }
2237 }
2238
2239 /* Free stuff in BI. */
2240
2241 static void
2242 free_ipa_bb_info (struct ipa_bb_info *bi)
2243 {
2244 bi->cg_edges.release ();
2245 bi->param_aa_statuses.release ();
2246 }
2247
2248 /* Dominator walker driving the analysis. */
2249
2250 class analysis_dom_walker : public dom_walker
2251 {
2252 public:
2253 analysis_dom_walker (struct ipa_func_body_info *fbi)
2254 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2255
2256 virtual void before_dom_children (basic_block);
2257
2258 private:
2259 struct ipa_func_body_info *m_fbi;
2260 };
2261
2262 void
2263 analysis_dom_walker::before_dom_children (basic_block bb)
2264 {
2265 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2266 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2267 }
2268
2269 /* Initialize the array describing properties of formal parameters
2270 of NODE, analyze their uses and compute jump functions associated
2271 with actual arguments of calls from within NODE. */
2272
2273 void
2274 ipa_analyze_node (struct cgraph_node *node)
2275 {
2276 struct ipa_func_body_info fbi;
2277 struct ipa_node_params *info;
2278
2279 ipa_check_create_node_params ();
2280 ipa_check_create_edge_args ();
2281 info = IPA_NODE_REF (node);
2282
2283 if (info->analysis_done)
2284 return;
2285 info->analysis_done = 1;
2286
2287 if (ipa_func_spec_opts_forbid_analysis_p (node))
2288 {
2289 for (int i = 0; i < ipa_get_param_count (info); i++)
2290 {
2291 ipa_set_param_used (info, i, true);
2292 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2293 }
2294 return;
2295 }
2296
2297 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2298 push_cfun (func);
2299 calculate_dominance_info (CDI_DOMINATORS);
2300 ipa_initialize_node_params (node);
2301 ipa_analyze_controlled_uses (node);
2302
2303 fbi.node = node;
2304 fbi.info = IPA_NODE_REF (node);
2305 fbi.bb_infos = vNULL;
2306 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2307 fbi.param_count = ipa_get_param_count (info);
2308 fbi.aa_walked = 0;
2309
2310 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2311 {
2312 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2313 bi->cg_edges.safe_push (cs);
2314 }
2315
2316 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2317 {
2318 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2319 bi->cg_edges.safe_push (cs);
2320 }
2321
2322 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2323
2324 int i;
2325 struct ipa_bb_info *bi;
2326 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2327 free_ipa_bb_info (bi);
2328 fbi.bb_infos.release ();
2329 free_dominance_info (CDI_DOMINATORS);
2330 pop_cfun ();
2331 }
2332
2333 /* Update the jump functions associated with call graph edge E when the call
2334 graph edge CS is being inlined, assuming that E->caller is already (possibly
2335 indirectly) inlined into CS->callee and that E has not been inlined. */
2336
2337 static void
2338 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2339 struct cgraph_edge *e)
2340 {
2341 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2342 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2343 int count = ipa_get_cs_argument_count (args);
2344 int i;
2345
2346 for (i = 0; i < count; i++)
2347 {
2348 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2349 struct ipa_polymorphic_call_context *dst_ctx
2350 = ipa_get_ith_polymorhic_call_context (args, i);
2351
2352 if (dst->type == IPA_JF_ANCESTOR)
2353 {
2354 struct ipa_jump_func *src;
2355 int dst_fid = dst->value.ancestor.formal_id;
2356 struct ipa_polymorphic_call_context *src_ctx
2357 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2358
2359 /* Variable number of arguments can cause havoc if we try to access
2360 one that does not exist in the inlined edge. So make sure we
2361 don't. */
2362 if (dst_fid >= ipa_get_cs_argument_count (top))
2363 {
2364 ipa_set_jf_unknown (dst);
2365 continue;
2366 }
2367
2368 src = ipa_get_ith_jump_func (top, dst_fid);
2369
2370 if (src_ctx && !src_ctx->useless_p ())
2371 {
2372 struct ipa_polymorphic_call_context ctx = *src_ctx;
2373
2374 /* TODO: Make type preserved safe WRT contexts. */
2375 if (!ipa_get_jf_ancestor_type_preserved (dst))
2376 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2377 ctx.offset_by (dst->value.ancestor.offset);
2378 if (!ctx.useless_p ())
2379 {
2380 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2381 count);
2382 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2383 }
2384 dst_ctx->combine_with (ctx);
2385 }
2386
2387 if (src->agg.items
2388 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2389 {
2390 struct ipa_agg_jf_item *item;
2391 int j;
2392
2393 /* Currently we do not produce clobber aggregate jump functions,
2394 replace with merging when we do. */
2395 gcc_assert (!dst->agg.items);
2396
2397 dst->agg.items = vec_safe_copy (src->agg.items);
2398 dst->agg.by_ref = src->agg.by_ref;
2399 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2400 item->offset -= dst->value.ancestor.offset;
2401 }
2402
2403 if (src->type == IPA_JF_PASS_THROUGH
2404 && src->value.pass_through.operation == NOP_EXPR)
2405 {
2406 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2407 dst->value.ancestor.agg_preserved &=
2408 src->value.pass_through.agg_preserved;
2409 }
2410 else if (src->type == IPA_JF_ANCESTOR)
2411 {
2412 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2413 dst->value.ancestor.offset += src->value.ancestor.offset;
2414 dst->value.ancestor.agg_preserved &=
2415 src->value.ancestor.agg_preserved;
2416 }
2417 else
2418 ipa_set_jf_unknown (dst);
2419 }
2420 else if (dst->type == IPA_JF_PASS_THROUGH)
2421 {
2422 struct ipa_jump_func *src;
2423 /* We must check range due to calls with variable number of arguments
2424 and we cannot combine jump functions with operations. */
2425 if (dst->value.pass_through.operation == NOP_EXPR
2426 && (dst->value.pass_through.formal_id
2427 < ipa_get_cs_argument_count (top)))
2428 {
2429 int dst_fid = dst->value.pass_through.formal_id;
2430 src = ipa_get_ith_jump_func (top, dst_fid);
2431 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2432 struct ipa_polymorphic_call_context *src_ctx
2433 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2434
2435 if (src_ctx && !src_ctx->useless_p ())
2436 {
2437 struct ipa_polymorphic_call_context ctx = *src_ctx;
2438
2439 /* TODO: Make type preserved safe WRT contexts. */
2440 if (!ipa_get_jf_pass_through_type_preserved (dst))
2441 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2442 if (!ctx.useless_p ())
2443 {
2444 if (!dst_ctx)
2445 {
2446 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2447 count);
2448 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2449 }
2450 dst_ctx->combine_with (ctx);
2451 }
2452 }
2453 switch (src->type)
2454 {
2455 case IPA_JF_UNKNOWN:
2456 ipa_set_jf_unknown (dst);
2457 break;
2458 case IPA_JF_CONST:
2459 ipa_set_jf_cst_copy (dst, src);
2460 break;
2461
2462 case IPA_JF_PASS_THROUGH:
2463 {
2464 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2465 enum tree_code operation;
2466 operation = ipa_get_jf_pass_through_operation (src);
2467
2468 if (operation == NOP_EXPR)
2469 {
2470 bool agg_p;
2471 agg_p = dst_agg_p
2472 && ipa_get_jf_pass_through_agg_preserved (src);
2473 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2474 }
2475 else
2476 {
2477 tree operand = ipa_get_jf_pass_through_operand (src);
2478 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2479 operation);
2480 }
2481 break;
2482 }
2483 case IPA_JF_ANCESTOR:
2484 {
2485 bool agg_p;
2486 agg_p = dst_agg_p
2487 && ipa_get_jf_ancestor_agg_preserved (src);
2488 ipa_set_ancestor_jf (dst,
2489 ipa_get_jf_ancestor_offset (src),
2490 ipa_get_jf_ancestor_formal_id (src),
2491 agg_p);
2492 break;
2493 }
2494 default:
2495 gcc_unreachable ();
2496 }
2497
2498 if (src->agg.items
2499 && (dst_agg_p || !src->agg.by_ref))
2500 {
2501 /* Currently we do not produce clobber aggregate jump
2502 functions, replace with merging when we do. */
2503 gcc_assert (!dst->agg.items);
2504
2505 dst->agg.by_ref = src->agg.by_ref;
2506 dst->agg.items = vec_safe_copy (src->agg.items);
2507 }
2508 }
2509 else
2510 ipa_set_jf_unknown (dst);
2511 }
2512 }
2513 }
2514
2515 /* If TARGET is an addr_expr of a function declaration, make it the
2516 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2517 Otherwise, return NULL. */
2518
2519 struct cgraph_edge *
2520 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2521 bool speculative)
2522 {
2523 struct cgraph_node *callee;
2524 struct inline_edge_summary *es = inline_edge_summary (ie);
2525 bool unreachable = false;
2526
2527 if (TREE_CODE (target) == ADDR_EXPR)
2528 target = TREE_OPERAND (target, 0);
2529 if (TREE_CODE (target) != FUNCTION_DECL)
2530 {
2531 target = canonicalize_constructor_val (target, NULL);
2532 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2533 {
2534 /* Member pointer call that goes through a VMT lookup. */
2535 if (ie->indirect_info->member_ptr
2536 /* Or if target is not an invariant expression and we do not
2537 know if it will evaulate to function at runtime.
2538 This can happen when folding through &VAR, where &VAR
2539 is IP invariant, but VAR itself is not.
2540
2541 TODO: Revisit this when GCC 5 is branched. It seems that
2542 member_ptr check is not needed and that we may try to fold
2543 the expression and see if VAR is readonly. */
2544 || !is_gimple_ip_invariant (target))
2545 {
2546 if (dump_enabled_p ())
2547 {
2548 location_t loc = gimple_location_safe (ie->call_stmt);
2549 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2550 "discovered direct call non-invariant "
2551 "%s/%i\n",
2552 ie->caller->name (), ie->caller->order);
2553 }
2554 return NULL;
2555 }
2556
2557
2558 if (dump_enabled_p ())
2559 {
2560 location_t loc = gimple_location_safe (ie->call_stmt);
2561 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2562 "discovered direct call to non-function in %s/%i, "
2563 "making it __builtin_unreachable\n",
2564 ie->caller->name (), ie->caller->order);
2565 }
2566
2567 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2568 callee = cgraph_node::get_create (target);
2569 unreachable = true;
2570 }
2571 else
2572 callee = cgraph_node::get (target);
2573 }
2574 else
2575 callee = cgraph_node::get (target);
2576
2577 /* Because may-edges are not explicitely represented and vtable may be external,
2578 we may create the first reference to the object in the unit. */
2579 if (!callee || callee->global.inlined_to)
2580 {
2581
2582 /* We are better to ensure we can refer to it.
2583 In the case of static functions we are out of luck, since we already
2584 removed its body. In the case of public functions we may or may
2585 not introduce the reference. */
2586 if (!canonicalize_constructor_val (target, NULL)
2587 || !TREE_PUBLIC (target))
2588 {
2589 if (dump_file)
2590 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2591 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2592 xstrdup_for_dump (ie->caller->name ()),
2593 ie->caller->order,
2594 xstrdup_for_dump (ie->callee->name ()),
2595 ie->callee->order);
2596 return NULL;
2597 }
2598 callee = cgraph_node::get_create (target);
2599 }
2600
2601 /* If the edge is already speculated. */
2602 if (speculative && ie->speculative)
2603 {
2604 struct cgraph_edge *e2;
2605 struct ipa_ref *ref;
2606 ie->speculative_call_info (e2, ie, ref);
2607 if (e2->callee->ultimate_alias_target ()
2608 != callee->ultimate_alias_target ())
2609 {
2610 if (dump_file)
2611 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2612 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2613 xstrdup_for_dump (ie->caller->name ()),
2614 ie->caller->order,
2615 xstrdup_for_dump (callee->name ()),
2616 callee->order,
2617 xstrdup_for_dump (e2->callee->name ()),
2618 e2->callee->order);
2619 }
2620 else
2621 {
2622 if (dump_file)
2623 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2624 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2625 xstrdup_for_dump (ie->caller->name ()),
2626 ie->caller->order,
2627 xstrdup_for_dump (callee->name ()),
2628 callee->order);
2629 }
2630 return NULL;
2631 }
2632
2633 if (!dbg_cnt (devirt))
2634 return NULL;
2635
2636 ipa_check_create_node_params ();
2637
2638 /* We can not make edges to inline clones. It is bug that someone removed
2639 the cgraph node too early. */
2640 gcc_assert (!callee->global.inlined_to);
2641
2642 if (dump_file && !unreachable)
2643 {
2644 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2645 "(%s/%i -> %s/%i), for stmt ",
2646 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2647 speculative ? "speculative" : "known",
2648 xstrdup_for_dump (ie->caller->name ()),
2649 ie->caller->order,
2650 xstrdup_for_dump (callee->name ()),
2651 callee->order);
2652 if (ie->call_stmt)
2653 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2654 else
2655 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2656 }
2657 if (dump_enabled_p ())
2658 {
2659 location_t loc = gimple_location_safe (ie->call_stmt);
2660
2661 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2662 "converting indirect call in %s to direct call to %s\n",
2663 ie->caller->name (), callee->name ());
2664 }
2665 if (!speculative)
2666 {
2667 struct cgraph_edge *orig = ie;
2668 ie = ie->make_direct (callee);
2669 /* If we resolved speculative edge the cost is already up to date
2670 for direct call (adjusted by inline_edge_duplication_hook). */
2671 if (ie == orig)
2672 {
2673 es = inline_edge_summary (ie);
2674 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2675 - eni_size_weights.call_cost);
2676 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2677 - eni_time_weights.call_cost);
2678 }
2679 }
2680 else
2681 {
2682 if (!callee->can_be_discarded_p ())
2683 {
2684 cgraph_node *alias;
2685 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2686 if (alias)
2687 callee = alias;
2688 }
2689 /* make_speculative will update ie's cost to direct call cost. */
2690 ie = ie->make_speculative
2691 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2692 }
2693
2694 return ie;
2695 }
2696
2697 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2698 return NULL if there is not any. BY_REF specifies whether the value has to
2699 be passed by reference or by value. */
2700
2701 tree
2702 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2703 HOST_WIDE_INT offset, bool by_ref)
2704 {
2705 struct ipa_agg_jf_item *item;
2706 int i;
2707
2708 if (by_ref != agg->by_ref)
2709 return NULL;
2710
2711 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2712 if (item->offset == offset)
2713 {
2714 /* Currently we do not have clobber values, return NULL for them once
2715 we do. */
2716 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2717 return item->value;
2718 }
2719 return NULL;
2720 }
2721
2722 /* Remove a reference to SYMBOL from the list of references of a node given by
2723 reference description RDESC. Return true if the reference has been
2724 successfully found and removed. */
2725
2726 static bool
2727 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2728 {
2729 struct ipa_ref *to_del;
2730 struct cgraph_edge *origin;
2731
2732 origin = rdesc->cs;
2733 if (!origin)
2734 return false;
2735 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2736 origin->lto_stmt_uid);
2737 if (!to_del)
2738 return false;
2739
2740 to_del->remove_reference ();
2741 if (dump_file)
2742 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2743 xstrdup_for_dump (origin->caller->name ()),
2744 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2745 return true;
2746 }
2747
2748 /* If JFUNC has a reference description with refcount different from
2749 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2750 NULL. JFUNC must be a constant jump function. */
2751
2752 static struct ipa_cst_ref_desc *
2753 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2754 {
2755 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2756 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2757 return rdesc;
2758 else
2759 return NULL;
2760 }
2761
2762 /* If the value of constant jump function JFUNC is an address of a function
2763 declaration, return the associated call graph node. Otherwise return
2764 NULL. */
2765
2766 static cgraph_node *
2767 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2768 {
2769 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2770 tree cst = ipa_get_jf_constant (jfunc);
2771 if (TREE_CODE (cst) != ADDR_EXPR
2772 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2773 return NULL;
2774
2775 return cgraph_node::get (TREE_OPERAND (cst, 0));
2776 }
2777
2778
2779 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2780 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2781 the edge specified in the rdesc. Return false if either the symbol or the
2782 reference could not be found, otherwise return true. */
2783
2784 static bool
2785 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2786 {
2787 struct ipa_cst_ref_desc *rdesc;
2788 if (jfunc->type == IPA_JF_CONST
2789 && (rdesc = jfunc_rdesc_usable (jfunc))
2790 && --rdesc->refcount == 0)
2791 {
2792 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2793 if (!symbol)
2794 return false;
2795
2796 return remove_described_reference (symbol, rdesc);
2797 }
2798 return true;
2799 }
2800
2801 /* Try to find a destination for indirect edge IE that corresponds to a simple
2802 call or a call of a member function pointer and where the destination is a
2803 pointer formal parameter described by jump function JFUNC. If it can be
2804 determined, return the newly direct edge, otherwise return NULL.
2805 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2806
2807 static struct cgraph_edge *
2808 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2809 struct ipa_jump_func *jfunc,
2810 struct ipa_node_params *new_root_info)
2811 {
2812 struct cgraph_edge *cs;
2813 tree target;
2814 bool agg_contents = ie->indirect_info->agg_contents;
2815
2816 if (ie->indirect_info->agg_contents)
2817 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2818 ie->indirect_info->offset,
2819 ie->indirect_info->by_ref);
2820 else
2821 target = ipa_value_from_jfunc (new_root_info, jfunc);
2822 if (!target)
2823 return NULL;
2824 cs = ipa_make_edge_direct_to_target (ie, target);
2825
2826 if (cs && !agg_contents)
2827 {
2828 bool ok;
2829 gcc_checking_assert (cs->callee
2830 && (cs != ie
2831 || jfunc->type != IPA_JF_CONST
2832 || !cgraph_node_for_jfunc (jfunc)
2833 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2834 ok = try_decrement_rdesc_refcount (jfunc);
2835 gcc_checking_assert (ok);
2836 }
2837
2838 return cs;
2839 }
2840
2841 /* Return the target to be used in cases of impossible devirtualization. IE
2842 and target (the latter can be NULL) are dumped when dumping is enabled. */
2843
2844 tree
2845 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2846 {
2847 if (dump_file)
2848 {
2849 if (target)
2850 fprintf (dump_file,
2851 "Type inconsistent devirtualization: %s/%i->%s\n",
2852 ie->caller->name (), ie->caller->order,
2853 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2854 else
2855 fprintf (dump_file,
2856 "No devirtualization target in %s/%i\n",
2857 ie->caller->name (), ie->caller->order);
2858 }
2859 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2860 cgraph_node::get_create (new_target);
2861 return new_target;
2862 }
2863
2864 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2865 call based on a formal parameter which is described by jump function JFUNC
2866 and if it can be determined, make it direct and return the direct edge.
2867 Otherwise, return NULL. CTX describes the polymorphic context that the
2868 parameter the call is based on brings along with it. */
2869
2870 static struct cgraph_edge *
2871 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2872 struct ipa_jump_func *jfunc,
2873 struct ipa_polymorphic_call_context ctx)
2874 {
2875 tree target = NULL;
2876 bool speculative = false;
2877
2878 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2879 return NULL;
2880
2881 gcc_assert (!ie->indirect_info->by_ref);
2882
2883 /* Try to do lookup via known virtual table pointer value. */
2884 if (!ie->indirect_info->vptr_changed
2885 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2886 {
2887 tree vtable;
2888 unsigned HOST_WIDE_INT offset;
2889 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2890 ie->indirect_info->offset,
2891 true);
2892 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2893 {
2894 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2895 vtable, offset);
2896 if (t)
2897 {
2898 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2899 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2900 || !possible_polymorphic_call_target_p
2901 (ie, cgraph_node::get (t)))
2902 {
2903 /* Do not speculate builtin_unreachable, it is stupid! */
2904 if (!ie->indirect_info->vptr_changed)
2905 target = ipa_impossible_devirt_target (ie, target);
2906 }
2907 else
2908 {
2909 target = t;
2910 speculative = ie->indirect_info->vptr_changed;
2911 }
2912 }
2913 }
2914 }
2915
2916 ipa_polymorphic_call_context ie_context (ie);
2917 vec <cgraph_node *>targets;
2918 bool final;
2919
2920 ctx.offset_by (ie->indirect_info->offset);
2921 if (ie->indirect_info->vptr_changed)
2922 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2923 ie->indirect_info->otr_type);
2924 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2925 targets = possible_polymorphic_call_targets
2926 (ie->indirect_info->otr_type,
2927 ie->indirect_info->otr_token,
2928 ctx, &final);
2929 if (final && targets.length () <= 1)
2930 {
2931 speculative = false;
2932 if (targets.length () == 1)
2933 target = targets[0]->decl;
2934 else
2935 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2936 }
2937 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2938 && !ie->speculative && ie->maybe_hot_p ())
2939 {
2940 cgraph_node *n;
2941 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2942 ie->indirect_info->otr_token,
2943 ie->indirect_info->context);
2944 if (n)
2945 {
2946 target = n->decl;
2947 speculative = true;
2948 }
2949 }
2950
2951 if (target)
2952 {
2953 if (!possible_polymorphic_call_target_p
2954 (ie, cgraph_node::get_create (target)))
2955 {
2956 if (speculative)
2957 return NULL;
2958 target = ipa_impossible_devirt_target (ie, target);
2959 }
2960 return ipa_make_edge_direct_to_target (ie, target, speculative);
2961 }
2962 else
2963 return NULL;
2964 }
2965
2966 /* Update the param called notes associated with NODE when CS is being inlined,
2967 assuming NODE is (potentially indirectly) inlined into CS->callee.
2968 Moreover, if the callee is discovered to be constant, create a new cgraph
2969 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2970 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2971
2972 static bool
2973 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2974 struct cgraph_node *node,
2975 vec<cgraph_edge *> *new_edges)
2976 {
2977 struct ipa_edge_args *top;
2978 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2979 struct ipa_node_params *new_root_info;
2980 bool res = false;
2981
2982 ipa_check_create_edge_args ();
2983 top = IPA_EDGE_REF (cs);
2984 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2985 ? cs->caller->global.inlined_to
2986 : cs->caller);
2987
2988 for (ie = node->indirect_calls; ie; ie = next_ie)
2989 {
2990 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2991 struct ipa_jump_func *jfunc;
2992 int param_index;
2993 cgraph_node *spec_target = NULL;
2994
2995 next_ie = ie->next_callee;
2996
2997 if (ici->param_index == -1)
2998 continue;
2999
3000 /* We must check range due to calls with variable number of arguments: */
3001 if (ici->param_index >= ipa_get_cs_argument_count (top))
3002 {
3003 ici->param_index = -1;
3004 continue;
3005 }
3006
3007 param_index = ici->param_index;
3008 jfunc = ipa_get_ith_jump_func (top, param_index);
3009
3010 if (ie->speculative)
3011 {
3012 struct cgraph_edge *de;
3013 struct ipa_ref *ref;
3014 ie->speculative_call_info (de, ie, ref);
3015 spec_target = de->callee;
3016 }
3017
3018 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3019 new_direct_edge = NULL;
3020 else if (ici->polymorphic)
3021 {
3022 ipa_polymorphic_call_context ctx;
3023 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3024 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3025 }
3026 else
3027 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3028 new_root_info);
3029 /* If speculation was removed, then we need to do nothing. */
3030 if (new_direct_edge && new_direct_edge != ie
3031 && new_direct_edge->callee == spec_target)
3032 {
3033 new_direct_edge->indirect_inlining_edge = 1;
3034 top = IPA_EDGE_REF (cs);
3035 res = true;
3036 if (!new_direct_edge->speculative)
3037 continue;
3038 }
3039 else if (new_direct_edge)
3040 {
3041 new_direct_edge->indirect_inlining_edge = 1;
3042 if (new_direct_edge->call_stmt)
3043 new_direct_edge->call_stmt_cannot_inline_p
3044 = !gimple_check_call_matching_types (
3045 new_direct_edge->call_stmt,
3046 new_direct_edge->callee->decl, false);
3047 if (new_edges)
3048 {
3049 new_edges->safe_push (new_direct_edge);
3050 res = true;
3051 }
3052 top = IPA_EDGE_REF (cs);
3053 /* If speculative edge was introduced we still need to update
3054 call info of the indirect edge. */
3055 if (!new_direct_edge->speculative)
3056 continue;
3057 }
3058 if (jfunc->type == IPA_JF_PASS_THROUGH
3059 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3060 {
3061 if (ici->agg_contents
3062 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3063 && !ici->polymorphic)
3064 ici->param_index = -1;
3065 else
3066 {
3067 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3068 if (ici->polymorphic
3069 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3070 ici->vptr_changed = true;
3071 }
3072 }
3073 else if (jfunc->type == IPA_JF_ANCESTOR)
3074 {
3075 if (ici->agg_contents
3076 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3077 && !ici->polymorphic)
3078 ici->param_index = -1;
3079 else
3080 {
3081 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3082 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3083 if (ici->polymorphic
3084 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3085 ici->vptr_changed = true;
3086 }
3087 }
3088 else
3089 /* Either we can find a destination for this edge now or never. */
3090 ici->param_index = -1;
3091 }
3092
3093 return res;
3094 }
3095
3096 /* Recursively traverse subtree of NODE (including node) made of inlined
3097 cgraph_edges when CS has been inlined and invoke
3098 update_indirect_edges_after_inlining on all nodes and
3099 update_jump_functions_after_inlining on all non-inlined edges that lead out
3100 of this subtree. Newly discovered indirect edges will be added to
3101 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3102 created. */
3103
3104 static bool
3105 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3106 struct cgraph_node *node,
3107 vec<cgraph_edge *> *new_edges)
3108 {
3109 struct cgraph_edge *e;
3110 bool res;
3111
3112 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3113
3114 for (e = node->callees; e; e = e->next_callee)
3115 if (!e->inline_failed)
3116 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3117 else
3118 update_jump_functions_after_inlining (cs, e);
3119 for (e = node->indirect_calls; e; e = e->next_callee)
3120 update_jump_functions_after_inlining (cs, e);
3121
3122 return res;
3123 }
3124
3125 /* Combine two controlled uses counts as done during inlining. */
3126
3127 static int
3128 combine_controlled_uses_counters (int c, int d)
3129 {
3130 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3131 return IPA_UNDESCRIBED_USE;
3132 else
3133 return c + d - 1;
3134 }
3135
3136 /* Propagate number of controlled users from CS->caleee to the new root of the
3137 tree of inlined nodes. */
3138
3139 static void
3140 propagate_controlled_uses (struct cgraph_edge *cs)
3141 {
3142 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3143 struct cgraph_node *new_root = cs->caller->global.inlined_to
3144 ? cs->caller->global.inlined_to : cs->caller;
3145 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3146 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3147 int count, i;
3148
3149 count = MIN (ipa_get_cs_argument_count (args),
3150 ipa_get_param_count (old_root_info));
3151 for (i = 0; i < count; i++)
3152 {
3153 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3154 struct ipa_cst_ref_desc *rdesc;
3155
3156 if (jf->type == IPA_JF_PASS_THROUGH)
3157 {
3158 int src_idx, c, d;
3159 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3160 c = ipa_get_controlled_uses (new_root_info, src_idx);
3161 d = ipa_get_controlled_uses (old_root_info, i);
3162
3163 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3164 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3165 c = combine_controlled_uses_counters (c, d);
3166 ipa_set_controlled_uses (new_root_info, src_idx, c);
3167 if (c == 0 && new_root_info->ipcp_orig_node)
3168 {
3169 struct cgraph_node *n;
3170 struct ipa_ref *ref;
3171 tree t = new_root_info->known_csts[src_idx];
3172
3173 if (t && TREE_CODE (t) == ADDR_EXPR
3174 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3175 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3176 && (ref = new_root->find_reference (n, NULL, 0)))
3177 {
3178 if (dump_file)
3179 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3180 "reference from %s/%i to %s/%i.\n",
3181 xstrdup_for_dump (new_root->name ()),
3182 new_root->order,
3183 xstrdup_for_dump (n->name ()), n->order);
3184 ref->remove_reference ();
3185 }
3186 }
3187 }
3188 else if (jf->type == IPA_JF_CONST
3189 && (rdesc = jfunc_rdesc_usable (jf)))
3190 {
3191 int d = ipa_get_controlled_uses (old_root_info, i);
3192 int c = rdesc->refcount;
3193 rdesc->refcount = combine_controlled_uses_counters (c, d);
3194 if (rdesc->refcount == 0)
3195 {
3196 tree cst = ipa_get_jf_constant (jf);
3197 struct cgraph_node *n;
3198 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3199 && TREE_CODE (TREE_OPERAND (cst, 0))
3200 == FUNCTION_DECL);
3201 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3202 if (n)
3203 {
3204 struct cgraph_node *clone;
3205 bool ok;
3206 ok = remove_described_reference (n, rdesc);
3207 gcc_checking_assert (ok);
3208
3209 clone = cs->caller;
3210 while (clone->global.inlined_to
3211 && clone != rdesc->cs->caller
3212 && IPA_NODE_REF (clone)->ipcp_orig_node)
3213 {
3214 struct ipa_ref *ref;
3215 ref = clone->find_reference (n, NULL, 0);
3216 if (ref)
3217 {
3218 if (dump_file)
3219 fprintf (dump_file, "ipa-prop: Removing "
3220 "cloning-created reference "
3221 "from %s/%i to %s/%i.\n",
3222 xstrdup_for_dump (clone->name ()),
3223 clone->order,
3224 xstrdup_for_dump (n->name ()),
3225 n->order);
3226 ref->remove_reference ();
3227 }
3228 clone = clone->callers->caller;
3229 }
3230 }
3231 }
3232 }
3233 }
3234
3235 for (i = ipa_get_param_count (old_root_info);
3236 i < ipa_get_cs_argument_count (args);
3237 i++)
3238 {
3239 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3240
3241 if (jf->type == IPA_JF_CONST)
3242 {
3243 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3244 if (rdesc)
3245 rdesc->refcount = IPA_UNDESCRIBED_USE;
3246 }
3247 else if (jf->type == IPA_JF_PASS_THROUGH)
3248 ipa_set_controlled_uses (new_root_info,
3249 jf->value.pass_through.formal_id,
3250 IPA_UNDESCRIBED_USE);
3251 }
3252 }
3253
3254 /* Update jump functions and call note functions on inlining the call site CS.
3255 CS is expected to lead to a node already cloned by
3256 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3257 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3258 created. */
3259
3260 bool
3261 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3262 vec<cgraph_edge *> *new_edges)
3263 {
3264 bool changed;
3265 /* Do nothing if the preparation phase has not been carried out yet
3266 (i.e. during early inlining). */
3267 if (!ipa_node_params_sum)
3268 return false;
3269 gcc_assert (ipa_edge_args_vector);
3270
3271 propagate_controlled_uses (cs);
3272 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3273
3274 return changed;
3275 }
3276
3277 /* Frees all dynamically allocated structures that the argument info points
3278 to. */
3279
3280 void
3281 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3282 {
3283 vec_free (args->jump_functions);
3284 memset (args, 0, sizeof (*args));
3285 }
3286
3287 /* Free all ipa_edge structures. */
3288
3289 void
3290 ipa_free_all_edge_args (void)
3291 {
3292 int i;
3293 struct ipa_edge_args *args;
3294
3295 if (!ipa_edge_args_vector)
3296 return;
3297
3298 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3299 ipa_free_edge_args_substructures (args);
3300
3301 vec_free (ipa_edge_args_vector);
3302 }
3303
3304 /* Frees all dynamically allocated structures that the param info points
3305 to. */
3306
3307 ipa_node_params::~ipa_node_params ()
3308 {
3309 descriptors.release ();
3310 free (lattices);
3311 /* Lattice values and their sources are deallocated with their alocation
3312 pool. */
3313 known_contexts.release ();
3314
3315 lattices = NULL;
3316 ipcp_orig_node = NULL;
3317 analysis_done = 0;
3318 node_enqueued = 0;
3319 do_clone_for_all_contexts = 0;
3320 is_all_contexts_clone = 0;
3321 node_dead = 0;
3322 }
3323
3324 /* Free all ipa_node_params structures. */
3325
3326 void
3327 ipa_free_all_node_params (void)
3328 {
3329 delete ipa_node_params_sum;
3330 ipa_node_params_sum = NULL;
3331 }
3332
3333 /* Grow ipcp_transformations if necessary. */
3334
3335 void
3336 ipcp_grow_transformations_if_necessary (void)
3337 {
3338 if (vec_safe_length (ipcp_transformations)
3339 <= (unsigned) symtab->cgraph_max_uid)
3340 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3341 }
3342
3343 /* Set the aggregate replacements of NODE to be AGGVALS. */
3344
3345 void
3346 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3347 struct ipa_agg_replacement_value *aggvals)
3348 {
3349 ipcp_grow_transformations_if_necessary ();
3350 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3351 }
3352
3353 /* Hook that is called by cgraph.c when an edge is removed. */
3354
3355 static void
3356 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3357 {
3358 struct ipa_edge_args *args;
3359
3360 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3361 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3362 return;
3363
3364 args = IPA_EDGE_REF (cs);
3365 if (args->jump_functions)
3366 {
3367 struct ipa_jump_func *jf;
3368 int i;
3369 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3370 {
3371 struct ipa_cst_ref_desc *rdesc;
3372 try_decrement_rdesc_refcount (jf);
3373 if (jf->type == IPA_JF_CONST
3374 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3375 && rdesc->cs == cs)
3376 rdesc->cs = NULL;
3377 }
3378 }
3379
3380 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3381 }
3382
3383 /* Hook that is called by cgraph.c when an edge is duplicated. */
3384
3385 static void
3386 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3387 void *)
3388 {
3389 struct ipa_edge_args *old_args, *new_args;
3390 unsigned int i;
3391
3392 ipa_check_create_edge_args ();
3393
3394 old_args = IPA_EDGE_REF (src);
3395 new_args = IPA_EDGE_REF (dst);
3396
3397 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3398 if (old_args->polymorphic_call_contexts)
3399 new_args->polymorphic_call_contexts
3400 = vec_safe_copy (old_args->polymorphic_call_contexts);
3401
3402 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3403 {
3404 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3405 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3406
3407 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3408
3409 if (src_jf->type == IPA_JF_CONST)
3410 {
3411 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3412
3413 if (!src_rdesc)
3414 dst_jf->value.constant.rdesc = NULL;
3415 else if (src->caller == dst->caller)
3416 {
3417 struct ipa_ref *ref;
3418 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3419 gcc_checking_assert (n);
3420 ref = src->caller->find_reference (n, src->call_stmt,
3421 src->lto_stmt_uid);
3422 gcc_checking_assert (ref);
3423 dst->caller->clone_reference (ref, ref->stmt);
3424
3425 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3426 dst_rdesc->cs = dst;
3427 dst_rdesc->refcount = src_rdesc->refcount;
3428 dst_rdesc->next_duplicate = NULL;
3429 dst_jf->value.constant.rdesc = dst_rdesc;
3430 }
3431 else if (src_rdesc->cs == src)
3432 {
3433 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3434 dst_rdesc->cs = dst;
3435 dst_rdesc->refcount = src_rdesc->refcount;
3436 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3437 src_rdesc->next_duplicate = dst_rdesc;
3438 dst_jf->value.constant.rdesc = dst_rdesc;
3439 }
3440 else
3441 {
3442 struct ipa_cst_ref_desc *dst_rdesc;
3443 /* This can happen during inlining, when a JFUNC can refer to a
3444 reference taken in a function up in the tree of inline clones.
3445 We need to find the duplicate that refers to our tree of
3446 inline clones. */
3447
3448 gcc_assert (dst->caller->global.inlined_to);
3449 for (dst_rdesc = src_rdesc->next_duplicate;
3450 dst_rdesc;
3451 dst_rdesc = dst_rdesc->next_duplicate)
3452 {
3453 struct cgraph_node *top;
3454 top = dst_rdesc->cs->caller->global.inlined_to
3455 ? dst_rdesc->cs->caller->global.inlined_to
3456 : dst_rdesc->cs->caller;
3457 if (dst->caller->global.inlined_to == top)
3458 break;
3459 }
3460 gcc_assert (dst_rdesc);
3461 dst_jf->value.constant.rdesc = dst_rdesc;
3462 }
3463 }
3464 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3465 && src->caller == dst->caller)
3466 {
3467 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3468 ? dst->caller->global.inlined_to : dst->caller;
3469 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3470 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3471
3472 int c = ipa_get_controlled_uses (root_info, idx);
3473 if (c != IPA_UNDESCRIBED_USE)
3474 {
3475 c++;
3476 ipa_set_controlled_uses (root_info, idx, c);
3477 }
3478 }
3479 }
3480 }
3481
3482 /* Analyze newly added function into callgraph. */
3483
3484 static void
3485 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3486 {
3487 if (node->has_gimple_body_p ())
3488 ipa_analyze_node (node);
3489 }
3490
3491 /* Hook that is called by summary when a node is duplicated. */
3492
3493 void
3494 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3495 ipa_node_params *old_info,
3496 ipa_node_params *new_info)
3497 {
3498 ipa_agg_replacement_value *old_av, *new_av;
3499
3500 new_info->descriptors = old_info->descriptors.copy ();
3501 new_info->lattices = NULL;
3502 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3503
3504 new_info->analysis_done = old_info->analysis_done;
3505 new_info->node_enqueued = old_info->node_enqueued;
3506
3507 old_av = ipa_get_agg_replacements_for_node (src);
3508 if (old_av)
3509 {
3510 new_av = NULL;
3511 while (old_av)
3512 {
3513 struct ipa_agg_replacement_value *v;
3514
3515 v = ggc_alloc<ipa_agg_replacement_value> ();
3516 memcpy (v, old_av, sizeof (*v));
3517 v->next = new_av;
3518 new_av = v;
3519 old_av = old_av->next;
3520 }
3521 ipa_set_node_agg_value_chain (dst, new_av);
3522 }
3523
3524 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3525
3526 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3527 {
3528 ipcp_grow_transformations_if_necessary ();
3529 src_trans = ipcp_get_transformation_summary (src);
3530 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3531 vec<ipa_alignment, va_gc> *&dst_alignments
3532 = ipcp_get_transformation_summary (dst)->alignments;
3533 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3534 for (unsigned i = 0; i < src_alignments->length (); ++i)
3535 dst_alignments->quick_push ((*src_alignments)[i]);
3536 }
3537 }
3538
3539 /* Register our cgraph hooks if they are not already there. */
3540
3541 void
3542 ipa_register_cgraph_hooks (void)
3543 {
3544 ipa_check_create_node_params ();
3545
3546 if (!edge_removal_hook_holder)
3547 edge_removal_hook_holder =
3548 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3549 if (!edge_duplication_hook_holder)
3550 edge_duplication_hook_holder =
3551 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3552 function_insertion_hook_holder =
3553 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3554 }
3555
3556 /* Unregister our cgraph hooks if they are not already there. */
3557
3558 static void
3559 ipa_unregister_cgraph_hooks (void)
3560 {
3561 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3562 edge_removal_hook_holder = NULL;
3563 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3564 edge_duplication_hook_holder = NULL;
3565 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3566 function_insertion_hook_holder = NULL;
3567 }
3568
3569 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3570 longer needed after ipa-cp. */
3571
3572 void
3573 ipa_free_all_structures_after_ipa_cp (void)
3574 {
3575 if (!optimize && !in_lto_p)
3576 {
3577 ipa_free_all_edge_args ();
3578 ipa_free_all_node_params ();
3579 ipcp_sources_pool.release ();
3580 ipcp_cst_values_pool.release ();
3581 ipcp_poly_ctx_values_pool.release ();
3582 ipcp_agg_lattice_pool.release ();
3583 ipa_unregister_cgraph_hooks ();
3584 ipa_refdesc_pool.release ();
3585 }
3586 }
3587
3588 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3589 longer needed after indirect inlining. */
3590
3591 void
3592 ipa_free_all_structures_after_iinln (void)
3593 {
3594 ipa_free_all_edge_args ();
3595 ipa_free_all_node_params ();
3596 ipa_unregister_cgraph_hooks ();
3597 ipcp_sources_pool.release ();
3598 ipcp_cst_values_pool.release ();
3599 ipcp_poly_ctx_values_pool.release ();
3600 ipcp_agg_lattice_pool.release ();
3601 ipa_refdesc_pool.release ();
3602 }
3603
3604 /* Print ipa_tree_map data structures of all functions in the
3605 callgraph to F. */
3606
3607 void
3608 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3609 {
3610 int i, count;
3611 struct ipa_node_params *info;
3612
3613 if (!node->definition)
3614 return;
3615 info = IPA_NODE_REF (node);
3616 fprintf (f, " function %s/%i parameter descriptors:\n",
3617 node->name (), node->order);
3618 count = ipa_get_param_count (info);
3619 for (i = 0; i < count; i++)
3620 {
3621 int c;
3622
3623 fprintf (f, " ");
3624 ipa_dump_param (f, info, i);
3625 if (ipa_is_param_used (info, i))
3626 fprintf (f, " used");
3627 c = ipa_get_controlled_uses (info, i);
3628 if (c == IPA_UNDESCRIBED_USE)
3629 fprintf (f, " undescribed_use");
3630 else
3631 fprintf (f, " controlled_uses=%i", c);
3632 fprintf (f, "\n");
3633 }
3634 }
3635
3636 /* Print ipa_tree_map data structures of all functions in the
3637 callgraph to F. */
3638
3639 void
3640 ipa_print_all_params (FILE * f)
3641 {
3642 struct cgraph_node *node;
3643
3644 fprintf (f, "\nFunction parameters:\n");
3645 FOR_EACH_FUNCTION (node)
3646 ipa_print_node_params (f, node);
3647 }
3648
3649 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3650
3651 vec<tree>
3652 ipa_get_vector_of_formal_parms (tree fndecl)
3653 {
3654 vec<tree> args;
3655 int count;
3656 tree parm;
3657
3658 gcc_assert (!flag_wpa);
3659 count = count_formal_params (fndecl);
3660 args.create (count);
3661 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3662 args.quick_push (parm);
3663
3664 return args;
3665 }
3666
3667 /* Return a heap allocated vector containing types of formal parameters of
3668 function type FNTYPE. */
3669
3670 vec<tree>
3671 ipa_get_vector_of_formal_parm_types (tree fntype)
3672 {
3673 vec<tree> types;
3674 int count = 0;
3675 tree t;
3676
3677 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3678 count++;
3679
3680 types.create (count);
3681 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3682 types.quick_push (TREE_VALUE (t));
3683
3684 return types;
3685 }
3686
3687 /* Modify the function declaration FNDECL and its type according to the plan in
3688 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3689 to reflect the actual parameters being modified which are determined by the
3690 base_index field. */
3691
3692 void
3693 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3694 {
3695 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3696 tree orig_type = TREE_TYPE (fndecl);
3697 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3698
3699 /* The following test is an ugly hack, some functions simply don't have any
3700 arguments in their type. This is probably a bug but well... */
3701 bool care_for_types = (old_arg_types != NULL_TREE);
3702 bool last_parm_void;
3703 vec<tree> otypes;
3704 if (care_for_types)
3705 {
3706 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3707 == void_type_node);
3708 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3709 if (last_parm_void)
3710 gcc_assert (oparms.length () + 1 == otypes.length ());
3711 else
3712 gcc_assert (oparms.length () == otypes.length ());
3713 }
3714 else
3715 {
3716 last_parm_void = false;
3717 otypes.create (0);
3718 }
3719
3720 int len = adjustments.length ();
3721 tree *link = &DECL_ARGUMENTS (fndecl);
3722 tree new_arg_types = NULL;
3723 for (int i = 0; i < len; i++)
3724 {
3725 struct ipa_parm_adjustment *adj;
3726 gcc_assert (link);
3727
3728 adj = &adjustments[i];
3729 tree parm;
3730 if (adj->op == IPA_PARM_OP_NEW)
3731 parm = NULL;
3732 else
3733 parm = oparms[adj->base_index];
3734 adj->base = parm;
3735
3736 if (adj->op == IPA_PARM_OP_COPY)
3737 {
3738 if (care_for_types)
3739 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3740 new_arg_types);
3741 *link = parm;
3742 link = &DECL_CHAIN (parm);
3743 }
3744 else if (adj->op != IPA_PARM_OP_REMOVE)
3745 {
3746 tree new_parm;
3747 tree ptype;
3748
3749 if (adj->by_ref)
3750 ptype = build_pointer_type (adj->type);
3751 else
3752 {
3753 ptype = adj->type;
3754 if (is_gimple_reg_type (ptype))
3755 {
3756 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3757 if (TYPE_ALIGN (ptype) < malign)
3758 ptype = build_aligned_type (ptype, malign);
3759 }
3760 }
3761
3762 if (care_for_types)
3763 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3764
3765 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3766 ptype);
3767 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3768 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3769 DECL_ARTIFICIAL (new_parm) = 1;
3770 DECL_ARG_TYPE (new_parm) = ptype;
3771 DECL_CONTEXT (new_parm) = fndecl;
3772 TREE_USED (new_parm) = 1;
3773 DECL_IGNORED_P (new_parm) = 1;
3774 layout_decl (new_parm, 0);
3775
3776 if (adj->op == IPA_PARM_OP_NEW)
3777 adj->base = NULL;
3778 else
3779 adj->base = parm;
3780 adj->new_decl = new_parm;
3781
3782 *link = new_parm;
3783 link = &DECL_CHAIN (new_parm);
3784 }
3785 }
3786
3787 *link = NULL_TREE;
3788
3789 tree new_reversed = NULL;
3790 if (care_for_types)
3791 {
3792 new_reversed = nreverse (new_arg_types);
3793 if (last_parm_void)
3794 {
3795 if (new_reversed)
3796 TREE_CHAIN (new_arg_types) = void_list_node;
3797 else
3798 new_reversed = void_list_node;
3799 }
3800 }
3801
3802 /* Use copy_node to preserve as much as possible from original type
3803 (debug info, attribute lists etc.)
3804 Exception is METHOD_TYPEs must have THIS argument.
3805 When we are asked to remove it, we need to build new FUNCTION_TYPE
3806 instead. */
3807 tree new_type = NULL;
3808 if (TREE_CODE (orig_type) != METHOD_TYPE
3809 || (adjustments[0].op == IPA_PARM_OP_COPY
3810 && adjustments[0].base_index == 0))
3811 {
3812 new_type = build_distinct_type_copy (orig_type);
3813 TYPE_ARG_TYPES (new_type) = new_reversed;
3814 }
3815 else
3816 {
3817 new_type
3818 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3819 new_reversed));
3820 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3821 DECL_VINDEX (fndecl) = NULL_TREE;
3822 }
3823
3824 /* When signature changes, we need to clear builtin info. */
3825 if (DECL_BUILT_IN (fndecl))
3826 {
3827 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3828 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3829 }
3830
3831 TREE_TYPE (fndecl) = new_type;
3832 DECL_VIRTUAL_P (fndecl) = 0;
3833 DECL_LANG_SPECIFIC (fndecl) = NULL;
3834 otypes.release ();
3835 oparms.release ();
3836 }
3837
3838 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3839 If this is a directly recursive call, CS must be NULL. Otherwise it must
3840 contain the corresponding call graph edge. */
3841
3842 void
3843 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3844 ipa_parm_adjustment_vec adjustments)
3845 {
3846 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3847 vec<tree> vargs;
3848 vec<tree, va_gc> **debug_args = NULL;
3849 gcall *new_stmt;
3850 gimple_stmt_iterator gsi, prev_gsi;
3851 tree callee_decl;
3852 int i, len;
3853
3854 len = adjustments.length ();
3855 vargs.create (len);
3856 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3857 current_node->remove_stmt_references (stmt);
3858
3859 gsi = gsi_for_stmt (stmt);
3860 prev_gsi = gsi;
3861 gsi_prev (&prev_gsi);
3862 for (i = 0; i < len; i++)
3863 {
3864 struct ipa_parm_adjustment *adj;
3865
3866 adj = &adjustments[i];
3867
3868 if (adj->op == IPA_PARM_OP_COPY)
3869 {
3870 tree arg = gimple_call_arg (stmt, adj->base_index);
3871
3872 vargs.quick_push (arg);
3873 }
3874 else if (adj->op != IPA_PARM_OP_REMOVE)
3875 {
3876 tree expr, base, off;
3877 location_t loc;
3878 unsigned int deref_align = 0;
3879 bool deref_base = false;
3880
3881 /* We create a new parameter out of the value of the old one, we can
3882 do the following kind of transformations:
3883
3884 - A scalar passed by reference is converted to a scalar passed by
3885 value. (adj->by_ref is false and the type of the original
3886 actual argument is a pointer to a scalar).
3887
3888 - A part of an aggregate is passed instead of the whole aggregate.
3889 The part can be passed either by value or by reference, this is
3890 determined by value of adj->by_ref. Moreover, the code below
3891 handles both situations when the original aggregate is passed by
3892 value (its type is not a pointer) and when it is passed by
3893 reference (it is a pointer to an aggregate).
3894
3895 When the new argument is passed by reference (adj->by_ref is true)
3896 it must be a part of an aggregate and therefore we form it by
3897 simply taking the address of a reference inside the original
3898 aggregate. */
3899
3900 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3901 base = gimple_call_arg (stmt, adj->base_index);
3902 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3903 : EXPR_LOCATION (base);
3904
3905 if (TREE_CODE (base) != ADDR_EXPR
3906 && POINTER_TYPE_P (TREE_TYPE (base)))
3907 off = build_int_cst (adj->alias_ptr_type,
3908 adj->offset / BITS_PER_UNIT);
3909 else
3910 {
3911 HOST_WIDE_INT base_offset;
3912 tree prev_base;
3913 bool addrof;
3914
3915 if (TREE_CODE (base) == ADDR_EXPR)
3916 {
3917 base = TREE_OPERAND (base, 0);
3918 addrof = true;
3919 }
3920 else
3921 addrof = false;
3922 prev_base = base;
3923 base = get_addr_base_and_unit_offset (base, &base_offset);
3924 /* Aggregate arguments can have non-invariant addresses. */
3925 if (!base)
3926 {
3927 base = build_fold_addr_expr (prev_base);
3928 off = build_int_cst (adj->alias_ptr_type,
3929 adj->offset / BITS_PER_UNIT);
3930 }
3931 else if (TREE_CODE (base) == MEM_REF)
3932 {
3933 if (!addrof)
3934 {
3935 deref_base = true;
3936 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3937 }
3938 off = build_int_cst (adj->alias_ptr_type,
3939 base_offset
3940 + adj->offset / BITS_PER_UNIT);
3941 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3942 off);
3943 base = TREE_OPERAND (base, 0);
3944 }
3945 else
3946 {
3947 off = build_int_cst (adj->alias_ptr_type,
3948 base_offset
3949 + adj->offset / BITS_PER_UNIT);
3950 base = build_fold_addr_expr (base);
3951 }
3952 }
3953
3954 if (!adj->by_ref)
3955 {
3956 tree type = adj->type;
3957 unsigned int align;
3958 unsigned HOST_WIDE_INT misalign;
3959
3960 if (deref_base)
3961 {
3962 align = deref_align;
3963 misalign = 0;
3964 }
3965 else
3966 {
3967 get_pointer_alignment_1 (base, &align, &misalign);
3968 if (TYPE_ALIGN (type) > align)
3969 align = TYPE_ALIGN (type);
3970 }
3971 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3972 * BITS_PER_UNIT);
3973 misalign = misalign & (align - 1);
3974 if (misalign != 0)
3975 align = (misalign & -misalign);
3976 if (align < TYPE_ALIGN (type))
3977 type = build_aligned_type (type, align);
3978 base = force_gimple_operand_gsi (&gsi, base,
3979 true, NULL, true, GSI_SAME_STMT);
3980 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3981 /* If expr is not a valid gimple call argument emit
3982 a load into a temporary. */
3983 if (is_gimple_reg_type (TREE_TYPE (expr)))
3984 {
3985 gimple tem = gimple_build_assign (NULL_TREE, expr);
3986 if (gimple_in_ssa_p (cfun))
3987 {
3988 gimple_set_vuse (tem, gimple_vuse (stmt));
3989 expr = make_ssa_name (TREE_TYPE (expr), tem);
3990 }
3991 else
3992 expr = create_tmp_reg (TREE_TYPE (expr));
3993 gimple_assign_set_lhs (tem, expr);
3994 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
3995 }
3996 }
3997 else
3998 {
3999 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4000 expr = build_fold_addr_expr (expr);
4001 expr = force_gimple_operand_gsi (&gsi, expr,
4002 true, NULL, true, GSI_SAME_STMT);
4003 }
4004 vargs.quick_push (expr);
4005 }
4006 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4007 {
4008 unsigned int ix;
4009 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4010 gimple def_temp;
4011
4012 arg = gimple_call_arg (stmt, adj->base_index);
4013 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4014 {
4015 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4016 continue;
4017 arg = fold_convert_loc (gimple_location (stmt),
4018 TREE_TYPE (origin), arg);
4019 }
4020 if (debug_args == NULL)
4021 debug_args = decl_debug_args_insert (callee_decl);
4022 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4023 if (ddecl == origin)
4024 {
4025 ddecl = (**debug_args)[ix + 1];
4026 break;
4027 }
4028 if (ddecl == NULL)
4029 {
4030 ddecl = make_node (DEBUG_EXPR_DECL);
4031 DECL_ARTIFICIAL (ddecl) = 1;
4032 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4033 DECL_MODE (ddecl) = DECL_MODE (origin);
4034
4035 vec_safe_push (*debug_args, origin);
4036 vec_safe_push (*debug_args, ddecl);
4037 }
4038 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4039 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4040 }
4041 }
4042
4043 if (dump_file && (dump_flags & TDF_DETAILS))
4044 {
4045 fprintf (dump_file, "replacing stmt:");
4046 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4047 }
4048
4049 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4050 vargs.release ();
4051 if (gimple_call_lhs (stmt))
4052 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4053
4054 gimple_set_block (new_stmt, gimple_block (stmt));
4055 if (gimple_has_location (stmt))
4056 gimple_set_location (new_stmt, gimple_location (stmt));
4057 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4058 gimple_call_copy_flags (new_stmt, stmt);
4059 if (gimple_in_ssa_p (cfun))
4060 {
4061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4062 if (gimple_vdef (stmt))
4063 {
4064 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4065 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4066 }
4067 }
4068
4069 if (dump_file && (dump_flags & TDF_DETAILS))
4070 {
4071 fprintf (dump_file, "with stmt:");
4072 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4073 fprintf (dump_file, "\n");
4074 }
4075 gsi_replace (&gsi, new_stmt, true);
4076 if (cs)
4077 cs->set_call_stmt (new_stmt);
4078 do
4079 {
4080 current_node->record_stmt_references (gsi_stmt (gsi));
4081 gsi_prev (&gsi);
4082 }
4083 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4084 }
4085
4086 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4087 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4088 specifies whether the function should care about type incompatibility the
4089 current and new expressions. If it is false, the function will leave
4090 incompatibility issues to the caller. Return true iff the expression
4091 was modified. */
4092
4093 bool
4094 ipa_modify_expr (tree *expr, bool convert,
4095 ipa_parm_adjustment_vec adjustments)
4096 {
4097 struct ipa_parm_adjustment *cand
4098 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4099 if (!cand)
4100 return false;
4101
4102 tree src;
4103 if (cand->by_ref)
4104 src = build_simple_mem_ref (cand->new_decl);
4105 else
4106 src = cand->new_decl;
4107
4108 if (dump_file && (dump_flags & TDF_DETAILS))
4109 {
4110 fprintf (dump_file, "About to replace expr ");
4111 print_generic_expr (dump_file, *expr, 0);
4112 fprintf (dump_file, " with ");
4113 print_generic_expr (dump_file, src, 0);
4114 fprintf (dump_file, "\n");
4115 }
4116
4117 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4118 {
4119 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4120 *expr = vce;
4121 }
4122 else
4123 *expr = src;
4124 return true;
4125 }
4126
4127 /* If T is an SSA_NAME, return NULL if it is not a default def or
4128 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4129 the base variable is always returned, regardless if it is a default
4130 def. Return T if it is not an SSA_NAME. */
4131
4132 static tree
4133 get_ssa_base_param (tree t, bool ignore_default_def)
4134 {
4135 if (TREE_CODE (t) == SSA_NAME)
4136 {
4137 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4138 return SSA_NAME_VAR (t);
4139 else
4140 return NULL_TREE;
4141 }
4142 return t;
4143 }
4144
4145 /* Given an expression, return an adjustment entry specifying the
4146 transformation to be done on EXPR. If no suitable adjustment entry
4147 was found, returns NULL.
4148
4149 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4150 default def, otherwise bail on them.
4151
4152 If CONVERT is non-NULL, this function will set *CONVERT if the
4153 expression provided is a component reference. ADJUSTMENTS is the
4154 adjustments vector. */
4155
4156 ipa_parm_adjustment *
4157 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4158 ipa_parm_adjustment_vec adjustments,
4159 bool ignore_default_def)
4160 {
4161 if (TREE_CODE (**expr) == BIT_FIELD_REF
4162 || TREE_CODE (**expr) == IMAGPART_EXPR
4163 || TREE_CODE (**expr) == REALPART_EXPR)
4164 {
4165 *expr = &TREE_OPERAND (**expr, 0);
4166 if (convert)
4167 *convert = true;
4168 }
4169
4170 HOST_WIDE_INT offset, size, max_size;
4171 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4172 if (!base || size == -1 || max_size == -1)
4173 return NULL;
4174
4175 if (TREE_CODE (base) == MEM_REF)
4176 {
4177 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4178 base = TREE_OPERAND (base, 0);
4179 }
4180
4181 base = get_ssa_base_param (base, ignore_default_def);
4182 if (!base || TREE_CODE (base) != PARM_DECL)
4183 return NULL;
4184
4185 struct ipa_parm_adjustment *cand = NULL;
4186 unsigned int len = adjustments.length ();
4187 for (unsigned i = 0; i < len; i++)
4188 {
4189 struct ipa_parm_adjustment *adj = &adjustments[i];
4190
4191 if (adj->base == base
4192 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4193 {
4194 cand = adj;
4195 break;
4196 }
4197 }
4198
4199 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4200 return NULL;
4201 return cand;
4202 }
4203
4204 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4205
4206 static bool
4207 index_in_adjustments_multiple_times_p (int base_index,
4208 ipa_parm_adjustment_vec adjustments)
4209 {
4210 int i, len = adjustments.length ();
4211 bool one = false;
4212
4213 for (i = 0; i < len; i++)
4214 {
4215 struct ipa_parm_adjustment *adj;
4216 adj = &adjustments[i];
4217
4218 if (adj->base_index == base_index)
4219 {
4220 if (one)
4221 return true;
4222 else
4223 one = true;
4224 }
4225 }
4226 return false;
4227 }
4228
4229
4230 /* Return adjustments that should have the same effect on function parameters
4231 and call arguments as if they were first changed according to adjustments in
4232 INNER and then by adjustments in OUTER. */
4233
4234 ipa_parm_adjustment_vec
4235 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4236 ipa_parm_adjustment_vec outer)
4237 {
4238 int i, outlen = outer.length ();
4239 int inlen = inner.length ();
4240 int removals = 0;
4241 ipa_parm_adjustment_vec adjustments, tmp;
4242
4243 tmp.create (inlen);
4244 for (i = 0; i < inlen; i++)
4245 {
4246 struct ipa_parm_adjustment *n;
4247 n = &inner[i];
4248
4249 if (n->op == IPA_PARM_OP_REMOVE)
4250 removals++;
4251 else
4252 {
4253 /* FIXME: Handling of new arguments are not implemented yet. */
4254 gcc_assert (n->op != IPA_PARM_OP_NEW);
4255 tmp.quick_push (*n);
4256 }
4257 }
4258
4259 adjustments.create (outlen + removals);
4260 for (i = 0; i < outlen; i++)
4261 {
4262 struct ipa_parm_adjustment r;
4263 struct ipa_parm_adjustment *out = &outer[i];
4264 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4265
4266 memset (&r, 0, sizeof (r));
4267 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4268 if (out->op == IPA_PARM_OP_REMOVE)
4269 {
4270 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4271 {
4272 r.op = IPA_PARM_OP_REMOVE;
4273 adjustments.quick_push (r);
4274 }
4275 continue;
4276 }
4277 else
4278 {
4279 /* FIXME: Handling of new arguments are not implemented yet. */
4280 gcc_assert (out->op != IPA_PARM_OP_NEW);
4281 }
4282
4283 r.base_index = in->base_index;
4284 r.type = out->type;
4285
4286 /* FIXME: Create nonlocal value too. */
4287
4288 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4289 r.op = IPA_PARM_OP_COPY;
4290 else if (in->op == IPA_PARM_OP_COPY)
4291 r.offset = out->offset;
4292 else if (out->op == IPA_PARM_OP_COPY)
4293 r.offset = in->offset;
4294 else
4295 r.offset = in->offset + out->offset;
4296 adjustments.quick_push (r);
4297 }
4298
4299 for (i = 0; i < inlen; i++)
4300 {
4301 struct ipa_parm_adjustment *n = &inner[i];
4302
4303 if (n->op == IPA_PARM_OP_REMOVE)
4304 adjustments.quick_push (*n);
4305 }
4306
4307 tmp.release ();
4308 return adjustments;
4309 }
4310
4311 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4312 friendly way, assuming they are meant to be applied to FNDECL. */
4313
4314 void
4315 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4316 tree fndecl)
4317 {
4318 int i, len = adjustments.length ();
4319 bool first = true;
4320 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4321
4322 fprintf (file, "IPA param adjustments: ");
4323 for (i = 0; i < len; i++)
4324 {
4325 struct ipa_parm_adjustment *adj;
4326 adj = &adjustments[i];
4327
4328 if (!first)
4329 fprintf (file, " ");
4330 else
4331 first = false;
4332
4333 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4334 print_generic_expr (file, parms[adj->base_index], 0);
4335 if (adj->base)
4336 {
4337 fprintf (file, ", base: ");
4338 print_generic_expr (file, adj->base, 0);
4339 }
4340 if (adj->new_decl)
4341 {
4342 fprintf (file, ", new_decl: ");
4343 print_generic_expr (file, adj->new_decl, 0);
4344 }
4345 if (adj->new_ssa_base)
4346 {
4347 fprintf (file, ", new_ssa_base: ");
4348 print_generic_expr (file, adj->new_ssa_base, 0);
4349 }
4350
4351 if (adj->op == IPA_PARM_OP_COPY)
4352 fprintf (file, ", copy_param");
4353 else if (adj->op == IPA_PARM_OP_REMOVE)
4354 fprintf (file, ", remove_param");
4355 else
4356 fprintf (file, ", offset %li", (long) adj->offset);
4357 if (adj->by_ref)
4358 fprintf (file, ", by_ref");
4359 print_node_brief (file, ", type: ", adj->type, 0);
4360 fprintf (file, "\n");
4361 }
4362 parms.release ();
4363 }
4364
4365 /* Dump the AV linked list. */
4366
4367 void
4368 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4369 {
4370 bool comma = false;
4371 fprintf (f, " Aggregate replacements:");
4372 for (; av; av = av->next)
4373 {
4374 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4375 av->index, av->offset);
4376 print_generic_expr (f, av->value, 0);
4377 comma = true;
4378 }
4379 fprintf (f, "\n");
4380 }
4381
4382 /* Stream out jump function JUMP_FUNC to OB. */
4383
4384 static void
4385 ipa_write_jump_function (struct output_block *ob,
4386 struct ipa_jump_func *jump_func)
4387 {
4388 struct ipa_agg_jf_item *item;
4389 struct bitpack_d bp;
4390 int i, count;
4391
4392 streamer_write_uhwi (ob, jump_func->type);
4393 switch (jump_func->type)
4394 {
4395 case IPA_JF_UNKNOWN:
4396 break;
4397 case IPA_JF_CONST:
4398 gcc_assert (
4399 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4400 stream_write_tree (ob, jump_func->value.constant.value, true);
4401 break;
4402 case IPA_JF_PASS_THROUGH:
4403 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4404 if (jump_func->value.pass_through.operation == NOP_EXPR)
4405 {
4406 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4407 bp = bitpack_create (ob->main_stream);
4408 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4409 streamer_write_bitpack (&bp);
4410 }
4411 else
4412 {
4413 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4414 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4415 }
4416 break;
4417 case IPA_JF_ANCESTOR:
4418 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4419 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4420 bp = bitpack_create (ob->main_stream);
4421 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4422 streamer_write_bitpack (&bp);
4423 break;
4424 }
4425
4426 count = vec_safe_length (jump_func->agg.items);
4427 streamer_write_uhwi (ob, count);
4428 if (count)
4429 {
4430 bp = bitpack_create (ob->main_stream);
4431 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4432 streamer_write_bitpack (&bp);
4433 }
4434
4435 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4436 {
4437 streamer_write_uhwi (ob, item->offset);
4438 stream_write_tree (ob, item->value, true);
4439 }
4440
4441 bp = bitpack_create (ob->main_stream);
4442 bp_pack_value (&bp, jump_func->alignment.known, 1);
4443 streamer_write_bitpack (&bp);
4444 if (jump_func->alignment.known)
4445 {
4446 streamer_write_uhwi (ob, jump_func->alignment.align);
4447 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4448 }
4449 }
4450
4451 /* Read in jump function JUMP_FUNC from IB. */
4452
4453 static void
4454 ipa_read_jump_function (struct lto_input_block *ib,
4455 struct ipa_jump_func *jump_func,
4456 struct cgraph_edge *cs,
4457 struct data_in *data_in)
4458 {
4459 enum jump_func_type jftype;
4460 enum tree_code operation;
4461 int i, count;
4462
4463 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4464 switch (jftype)
4465 {
4466 case IPA_JF_UNKNOWN:
4467 ipa_set_jf_unknown (jump_func);
4468 break;
4469 case IPA_JF_CONST:
4470 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4471 break;
4472 case IPA_JF_PASS_THROUGH:
4473 operation = (enum tree_code) streamer_read_uhwi (ib);
4474 if (operation == NOP_EXPR)
4475 {
4476 int formal_id = streamer_read_uhwi (ib);
4477 struct bitpack_d bp = streamer_read_bitpack (ib);
4478 bool agg_preserved = bp_unpack_value (&bp, 1);
4479 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4480 }
4481 else
4482 {
4483 tree operand = stream_read_tree (ib, data_in);
4484 int formal_id = streamer_read_uhwi (ib);
4485 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4486 operation);
4487 }
4488 break;
4489 case IPA_JF_ANCESTOR:
4490 {
4491 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4492 int formal_id = streamer_read_uhwi (ib);
4493 struct bitpack_d bp = streamer_read_bitpack (ib);
4494 bool agg_preserved = bp_unpack_value (&bp, 1);
4495 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4496 break;
4497 }
4498 }
4499
4500 count = streamer_read_uhwi (ib);
4501 vec_alloc (jump_func->agg.items, count);
4502 if (count)
4503 {
4504 struct bitpack_d bp = streamer_read_bitpack (ib);
4505 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4506 }
4507 for (i = 0; i < count; i++)
4508 {
4509 struct ipa_agg_jf_item item;
4510 item.offset = streamer_read_uhwi (ib);
4511 item.value = stream_read_tree (ib, data_in);
4512 jump_func->agg.items->quick_push (item);
4513 }
4514
4515 struct bitpack_d bp = streamer_read_bitpack (ib);
4516 bool alignment_known = bp_unpack_value (&bp, 1);
4517 if (alignment_known)
4518 {
4519 jump_func->alignment.known = true;
4520 jump_func->alignment.align = streamer_read_uhwi (ib);
4521 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4522 }
4523 else
4524 jump_func->alignment.known = false;
4525 }
4526
4527 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4528 relevant to indirect inlining to OB. */
4529
4530 static void
4531 ipa_write_indirect_edge_info (struct output_block *ob,
4532 struct cgraph_edge *cs)
4533 {
4534 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4535 struct bitpack_d bp;
4536
4537 streamer_write_hwi (ob, ii->param_index);
4538 bp = bitpack_create (ob->main_stream);
4539 bp_pack_value (&bp, ii->polymorphic, 1);
4540 bp_pack_value (&bp, ii->agg_contents, 1);
4541 bp_pack_value (&bp, ii->member_ptr, 1);
4542 bp_pack_value (&bp, ii->by_ref, 1);
4543 bp_pack_value (&bp, ii->vptr_changed, 1);
4544 streamer_write_bitpack (&bp);
4545 if (ii->agg_contents || ii->polymorphic)
4546 streamer_write_hwi (ob, ii->offset);
4547 else
4548 gcc_assert (ii->offset == 0);
4549
4550 if (ii->polymorphic)
4551 {
4552 streamer_write_hwi (ob, ii->otr_token);
4553 stream_write_tree (ob, ii->otr_type, true);
4554 ii->context.stream_out (ob);
4555 }
4556 }
4557
4558 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4559 relevant to indirect inlining from IB. */
4560
4561 static void
4562 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4563 struct data_in *data_in,
4564 struct cgraph_edge *cs)
4565 {
4566 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4567 struct bitpack_d bp;
4568
4569 ii->param_index = (int) streamer_read_hwi (ib);
4570 bp = streamer_read_bitpack (ib);
4571 ii->polymorphic = bp_unpack_value (&bp, 1);
4572 ii->agg_contents = bp_unpack_value (&bp, 1);
4573 ii->member_ptr = bp_unpack_value (&bp, 1);
4574 ii->by_ref = bp_unpack_value (&bp, 1);
4575 ii->vptr_changed = bp_unpack_value (&bp, 1);
4576 if (ii->agg_contents || ii->polymorphic)
4577 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4578 else
4579 ii->offset = 0;
4580 if (ii->polymorphic)
4581 {
4582 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4583 ii->otr_type = stream_read_tree (ib, data_in);
4584 ii->context.stream_in (ib, data_in);
4585 }
4586 }
4587
4588 /* Stream out NODE info to OB. */
4589
4590 static void
4591 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4592 {
4593 int node_ref;
4594 lto_symtab_encoder_t encoder;
4595 struct ipa_node_params *info = IPA_NODE_REF (node);
4596 int j;
4597 struct cgraph_edge *e;
4598 struct bitpack_d bp;
4599
4600 encoder = ob->decl_state->symtab_node_encoder;
4601 node_ref = lto_symtab_encoder_encode (encoder, node);
4602 streamer_write_uhwi (ob, node_ref);
4603
4604 streamer_write_uhwi (ob, ipa_get_param_count (info));
4605 for (j = 0; j < ipa_get_param_count (info); j++)
4606 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4607 bp = bitpack_create (ob->main_stream);
4608 gcc_assert (info->analysis_done
4609 || ipa_get_param_count (info) == 0);
4610 gcc_assert (!info->node_enqueued);
4611 gcc_assert (!info->ipcp_orig_node);
4612 for (j = 0; j < ipa_get_param_count (info); j++)
4613 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4614 streamer_write_bitpack (&bp);
4615 for (j = 0; j < ipa_get_param_count (info); j++)
4616 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4617 for (e = node->callees; e; e = e->next_callee)
4618 {
4619 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4620
4621 streamer_write_uhwi (ob,
4622 ipa_get_cs_argument_count (args) * 2
4623 + (args->polymorphic_call_contexts != NULL));
4624 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4625 {
4626 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4627 if (args->polymorphic_call_contexts != NULL)
4628 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4629 }
4630 }
4631 for (e = node->indirect_calls; e; e = e->next_callee)
4632 {
4633 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4634
4635 streamer_write_uhwi (ob,
4636 ipa_get_cs_argument_count (args) * 2
4637 + (args->polymorphic_call_contexts != NULL));
4638 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4639 {
4640 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4641 if (args->polymorphic_call_contexts != NULL)
4642 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4643 }
4644 ipa_write_indirect_edge_info (ob, e);
4645 }
4646 }
4647
4648 /* Stream in NODE info from IB. */
4649
4650 static void
4651 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4652 struct data_in *data_in)
4653 {
4654 struct ipa_node_params *info = IPA_NODE_REF (node);
4655 int k;
4656 struct cgraph_edge *e;
4657 struct bitpack_d bp;
4658
4659 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4660
4661 for (k = 0; k < ipa_get_param_count (info); k++)
4662 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4663
4664 bp = streamer_read_bitpack (ib);
4665 if (ipa_get_param_count (info) != 0)
4666 info->analysis_done = true;
4667 info->node_enqueued = false;
4668 for (k = 0; k < ipa_get_param_count (info); k++)
4669 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4670 for (k = 0; k < ipa_get_param_count (info); k++)
4671 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4672 for (e = node->callees; e; e = e->next_callee)
4673 {
4674 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4675 int count = streamer_read_uhwi (ib);
4676 bool contexts_computed = count & 1;
4677 count /= 2;
4678
4679 if (!count)
4680 continue;
4681 vec_safe_grow_cleared (args->jump_functions, count);
4682 if (contexts_computed)
4683 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4684
4685 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4686 {
4687 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4688 data_in);
4689 if (contexts_computed)
4690 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4691 }
4692 }
4693 for (e = node->indirect_calls; e; e = e->next_callee)
4694 {
4695 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4696 int count = streamer_read_uhwi (ib);
4697 bool contexts_computed = count & 1;
4698 count /= 2;
4699
4700 if (count)
4701 {
4702 vec_safe_grow_cleared (args->jump_functions, count);
4703 if (contexts_computed)
4704 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4705 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4706 {
4707 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4708 data_in);
4709 if (contexts_computed)
4710 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4711 }
4712 }
4713 ipa_read_indirect_edge_info (ib, data_in, e);
4714 }
4715 }
4716
4717 /* Write jump functions for nodes in SET. */
4718
4719 void
4720 ipa_prop_write_jump_functions (void)
4721 {
4722 struct cgraph_node *node;
4723 struct output_block *ob;
4724 unsigned int count = 0;
4725 lto_symtab_encoder_iterator lsei;
4726 lto_symtab_encoder_t encoder;
4727
4728 if (!ipa_node_params_sum)
4729 return;
4730
4731 ob = create_output_block (LTO_section_jump_functions);
4732 encoder = ob->decl_state->symtab_node_encoder;
4733 ob->symbol = NULL;
4734 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4735 lsei_next_function_in_partition (&lsei))
4736 {
4737 node = lsei_cgraph_node (lsei);
4738 if (node->has_gimple_body_p ()
4739 && IPA_NODE_REF (node) != NULL)
4740 count++;
4741 }
4742
4743 streamer_write_uhwi (ob, count);
4744
4745 /* Process all of the functions. */
4746 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4747 lsei_next_function_in_partition (&lsei))
4748 {
4749 node = lsei_cgraph_node (lsei);
4750 if (node->has_gimple_body_p ()
4751 && IPA_NODE_REF (node) != NULL)
4752 ipa_write_node_info (ob, node);
4753 }
4754 streamer_write_char_stream (ob->main_stream, 0);
4755 produce_asm (ob, NULL);
4756 destroy_output_block (ob);
4757 }
4758
4759 /* Read section in file FILE_DATA of length LEN with data DATA. */
4760
4761 static void
4762 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4763 size_t len)
4764 {
4765 const struct lto_function_header *header =
4766 (const struct lto_function_header *) data;
4767 const int cfg_offset = sizeof (struct lto_function_header);
4768 const int main_offset = cfg_offset + header->cfg_size;
4769 const int string_offset = main_offset + header->main_size;
4770 struct data_in *data_in;
4771 unsigned int i;
4772 unsigned int count;
4773
4774 lto_input_block ib_main ((const char *) data + main_offset,
4775 header->main_size, file_data->mode_table);
4776
4777 data_in =
4778 lto_data_in_create (file_data, (const char *) data + string_offset,
4779 header->string_size, vNULL);
4780 count = streamer_read_uhwi (&ib_main);
4781
4782 for (i = 0; i < count; i++)
4783 {
4784 unsigned int index;
4785 struct cgraph_node *node;
4786 lto_symtab_encoder_t encoder;
4787
4788 index = streamer_read_uhwi (&ib_main);
4789 encoder = file_data->symtab_node_encoder;
4790 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4791 index));
4792 gcc_assert (node->definition);
4793 ipa_read_node_info (&ib_main, node, data_in);
4794 }
4795 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4796 len);
4797 lto_data_in_delete (data_in);
4798 }
4799
4800 /* Read ipcp jump functions. */
4801
4802 void
4803 ipa_prop_read_jump_functions (void)
4804 {
4805 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4806 struct lto_file_decl_data *file_data;
4807 unsigned int j = 0;
4808
4809 ipa_check_create_node_params ();
4810 ipa_check_create_edge_args ();
4811 ipa_register_cgraph_hooks ();
4812
4813 while ((file_data = file_data_vec[j++]))
4814 {
4815 size_t len;
4816 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4817
4818 if (data)
4819 ipa_prop_read_section (file_data, data, len);
4820 }
4821 }
4822
4823 /* After merging units, we can get mismatch in argument counts.
4824 Also decl merging might've rendered parameter lists obsolete.
4825 Also compute called_with_variable_arg info. */
4826
4827 void
4828 ipa_update_after_lto_read (void)
4829 {
4830 ipa_check_create_node_params ();
4831 ipa_check_create_edge_args ();
4832 }
4833
4834 void
4835 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4836 {
4837 int node_ref;
4838 unsigned int count = 0;
4839 lto_symtab_encoder_t encoder;
4840 struct ipa_agg_replacement_value *aggvals, *av;
4841
4842 aggvals = ipa_get_agg_replacements_for_node (node);
4843 encoder = ob->decl_state->symtab_node_encoder;
4844 node_ref = lto_symtab_encoder_encode (encoder, node);
4845 streamer_write_uhwi (ob, node_ref);
4846
4847 for (av = aggvals; av; av = av->next)
4848 count++;
4849 streamer_write_uhwi (ob, count);
4850
4851 for (av = aggvals; av; av = av->next)
4852 {
4853 struct bitpack_d bp;
4854
4855 streamer_write_uhwi (ob, av->offset);
4856 streamer_write_uhwi (ob, av->index);
4857 stream_write_tree (ob, av->value, true);
4858
4859 bp = bitpack_create (ob->main_stream);
4860 bp_pack_value (&bp, av->by_ref, 1);
4861 streamer_write_bitpack (&bp);
4862 }
4863
4864 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4865 if (ts && vec_safe_length (ts->alignments) > 0)
4866 {
4867 count = ts->alignments->length ();
4868
4869 streamer_write_uhwi (ob, count);
4870 for (unsigned i = 0; i < count; ++i)
4871 {
4872 ipa_alignment *parm_al = &(*ts->alignments)[i];
4873
4874 struct bitpack_d bp;
4875 bp = bitpack_create (ob->main_stream);
4876 bp_pack_value (&bp, parm_al->known, 1);
4877 streamer_write_bitpack (&bp);
4878 if (parm_al->known)
4879 {
4880 streamer_write_uhwi (ob, parm_al->align);
4881 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4882 parm_al->misalign);
4883 }
4884 }
4885 }
4886 else
4887 streamer_write_uhwi (ob, 0);
4888 }
4889
4890 /* Stream in the aggregate value replacement chain for NODE from IB. */
4891
4892 static void
4893 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4894 data_in *data_in)
4895 {
4896 struct ipa_agg_replacement_value *aggvals = NULL;
4897 unsigned int count, i;
4898
4899 count = streamer_read_uhwi (ib);
4900 for (i = 0; i <count; i++)
4901 {
4902 struct ipa_agg_replacement_value *av;
4903 struct bitpack_d bp;
4904
4905 av = ggc_alloc<ipa_agg_replacement_value> ();
4906 av->offset = streamer_read_uhwi (ib);
4907 av->index = streamer_read_uhwi (ib);
4908 av->value = stream_read_tree (ib, data_in);
4909 bp = streamer_read_bitpack (ib);
4910 av->by_ref = bp_unpack_value (&bp, 1);
4911 av->next = aggvals;
4912 aggvals = av;
4913 }
4914 ipa_set_node_agg_value_chain (node, aggvals);
4915
4916 count = streamer_read_uhwi (ib);
4917 if (count > 0)
4918 {
4919 ipcp_grow_transformations_if_necessary ();
4920
4921 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4922 vec_safe_grow_cleared (ts->alignments, count);
4923
4924 for (i = 0; i < count; i++)
4925 {
4926 ipa_alignment *parm_al;
4927 parm_al = &(*ts->alignments)[i];
4928 struct bitpack_d bp;
4929 bp = streamer_read_bitpack (ib);
4930 parm_al->known = bp_unpack_value (&bp, 1);
4931 if (parm_al->known)
4932 {
4933 parm_al->align = streamer_read_uhwi (ib);
4934 parm_al->misalign
4935 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
4936 0, parm_al->align);
4937 }
4938 }
4939 }
4940 }
4941
4942 /* Write all aggregate replacement for nodes in set. */
4943
4944 void
4945 ipcp_write_transformation_summaries (void)
4946 {
4947 struct cgraph_node *node;
4948 struct output_block *ob;
4949 unsigned int count = 0;
4950 lto_symtab_encoder_iterator lsei;
4951 lto_symtab_encoder_t encoder;
4952
4953 ob = create_output_block (LTO_section_ipcp_transform);
4954 encoder = ob->decl_state->symtab_node_encoder;
4955 ob->symbol = NULL;
4956 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4957 lsei_next_function_in_partition (&lsei))
4958 {
4959 node = lsei_cgraph_node (lsei);
4960 if (node->has_gimple_body_p ())
4961 count++;
4962 }
4963
4964 streamer_write_uhwi (ob, count);
4965
4966 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4967 lsei_next_function_in_partition (&lsei))
4968 {
4969 node = lsei_cgraph_node (lsei);
4970 if (node->has_gimple_body_p ())
4971 write_ipcp_transformation_info (ob, node);
4972 }
4973 streamer_write_char_stream (ob->main_stream, 0);
4974 produce_asm (ob, NULL);
4975 destroy_output_block (ob);
4976 }
4977
4978 /* Read replacements section in file FILE_DATA of length LEN with data
4979 DATA. */
4980
4981 static void
4982 read_replacements_section (struct lto_file_decl_data *file_data,
4983 const char *data,
4984 size_t len)
4985 {
4986 const struct lto_function_header *header =
4987 (const struct lto_function_header *) data;
4988 const int cfg_offset = sizeof (struct lto_function_header);
4989 const int main_offset = cfg_offset + header->cfg_size;
4990 const int string_offset = main_offset + header->main_size;
4991 struct data_in *data_in;
4992 unsigned int i;
4993 unsigned int count;
4994
4995 lto_input_block ib_main ((const char *) data + main_offset,
4996 header->main_size, file_data->mode_table);
4997
4998 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4999 header->string_size, vNULL);
5000 count = streamer_read_uhwi (&ib_main);
5001
5002 for (i = 0; i < count; i++)
5003 {
5004 unsigned int index;
5005 struct cgraph_node *node;
5006 lto_symtab_encoder_t encoder;
5007
5008 index = streamer_read_uhwi (&ib_main);
5009 encoder = file_data->symtab_node_encoder;
5010 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5011 index));
5012 gcc_assert (node->definition);
5013 read_ipcp_transformation_info (&ib_main, node, data_in);
5014 }
5015 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5016 len);
5017 lto_data_in_delete (data_in);
5018 }
5019
5020 /* Read IPA-CP aggregate replacements. */
5021
5022 void
5023 ipcp_read_transformation_summaries (void)
5024 {
5025 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5026 struct lto_file_decl_data *file_data;
5027 unsigned int j = 0;
5028
5029 while ((file_data = file_data_vec[j++]))
5030 {
5031 size_t len;
5032 const char *data = lto_get_section_data (file_data,
5033 LTO_section_ipcp_transform,
5034 NULL, &len);
5035 if (data)
5036 read_replacements_section (file_data, data, len);
5037 }
5038 }
5039
5040 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5041 NODE. */
5042
5043 static void
5044 adjust_agg_replacement_values (struct cgraph_node *node,
5045 struct ipa_agg_replacement_value *aggval)
5046 {
5047 struct ipa_agg_replacement_value *v;
5048 int i, c = 0, d = 0, *adj;
5049
5050 if (!node->clone.combined_args_to_skip)
5051 return;
5052
5053 for (v = aggval; v; v = v->next)
5054 {
5055 gcc_assert (v->index >= 0);
5056 if (c < v->index)
5057 c = v->index;
5058 }
5059 c++;
5060
5061 adj = XALLOCAVEC (int, c);
5062 for (i = 0; i < c; i++)
5063 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5064 {
5065 adj[i] = -1;
5066 d++;
5067 }
5068 else
5069 adj[i] = i - d;
5070
5071 for (v = aggval; v; v = v->next)
5072 v->index = adj[v->index];
5073 }
5074
5075 /* Dominator walker driving the ipcp modification phase. */
5076
5077 class ipcp_modif_dom_walker : public dom_walker
5078 {
5079 public:
5080 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5081 vec<ipa_param_descriptor> descs,
5082 struct ipa_agg_replacement_value *av,
5083 bool *sc, bool *cc)
5084 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5085 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5086
5087 virtual void before_dom_children (basic_block);
5088
5089 private:
5090 struct ipa_func_body_info *m_fbi;
5091 vec<ipa_param_descriptor> m_descriptors;
5092 struct ipa_agg_replacement_value *m_aggval;
5093 bool *m_something_changed, *m_cfg_changed;
5094 };
5095
5096 void
5097 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5098 {
5099 gimple_stmt_iterator gsi;
5100 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5101 {
5102 struct ipa_agg_replacement_value *v;
5103 gimple stmt = gsi_stmt (gsi);
5104 tree rhs, val, t;
5105 HOST_WIDE_INT offset, size;
5106 int index;
5107 bool by_ref, vce;
5108
5109 if (!gimple_assign_load_p (stmt))
5110 continue;
5111 rhs = gimple_assign_rhs1 (stmt);
5112 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5113 continue;
5114
5115 vce = false;
5116 t = rhs;
5117 while (handled_component_p (t))
5118 {
5119 /* V_C_E can do things like convert an array of integers to one
5120 bigger integer and similar things we do not handle below. */
5121 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5122 {
5123 vce = true;
5124 break;
5125 }
5126 t = TREE_OPERAND (t, 0);
5127 }
5128 if (vce)
5129 continue;
5130
5131 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5132 &offset, &size, &by_ref))
5133 continue;
5134 for (v = m_aggval; v; v = v->next)
5135 if (v->index == index
5136 && v->offset == offset)
5137 break;
5138 if (!v
5139 || v->by_ref != by_ref
5140 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5141 continue;
5142
5143 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5144 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5145 {
5146 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5147 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5148 else if (TYPE_SIZE (TREE_TYPE (rhs))
5149 == TYPE_SIZE (TREE_TYPE (v->value)))
5150 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5151 else
5152 {
5153 if (dump_file)
5154 {
5155 fprintf (dump_file, " const ");
5156 print_generic_expr (dump_file, v->value, 0);
5157 fprintf (dump_file, " can't be converted to type of ");
5158 print_generic_expr (dump_file, rhs, 0);
5159 fprintf (dump_file, "\n");
5160 }
5161 continue;
5162 }
5163 }
5164 else
5165 val = v->value;
5166
5167 if (dump_file && (dump_flags & TDF_DETAILS))
5168 {
5169 fprintf (dump_file, "Modifying stmt:\n ");
5170 print_gimple_stmt (dump_file, stmt, 0, 0);
5171 }
5172 gimple_assign_set_rhs_from_tree (&gsi, val);
5173 update_stmt (stmt);
5174
5175 if (dump_file && (dump_flags & TDF_DETAILS))
5176 {
5177 fprintf (dump_file, "into:\n ");
5178 print_gimple_stmt (dump_file, stmt, 0, 0);
5179 fprintf (dump_file, "\n");
5180 }
5181
5182 *m_something_changed = true;
5183 if (maybe_clean_eh_stmt (stmt)
5184 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5185 *m_cfg_changed = true;
5186 }
5187
5188 }
5189
5190 /* Update alignment of formal parameters as described in
5191 ipcp_transformation_summary. */
5192
5193 static void
5194 ipcp_update_alignments (struct cgraph_node *node)
5195 {
5196 tree fndecl = node->decl;
5197 tree parm = DECL_ARGUMENTS (fndecl);
5198 tree next_parm = parm;
5199 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5200 if (!ts || vec_safe_length (ts->alignments) == 0)
5201 return;
5202 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5203 unsigned count = alignments.length ();
5204
5205 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5206 {
5207 if (node->clone.combined_args_to_skip
5208 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5209 continue;
5210 gcc_checking_assert (parm);
5211 next_parm = DECL_CHAIN (parm);
5212
5213 if (!alignments[i].known || !is_gimple_reg (parm))
5214 continue;
5215 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5216 if (!ddef)
5217 continue;
5218
5219 if (dump_file)
5220 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5221 "misalignment to %u\n", i, alignments[i].align,
5222 alignments[i].misalign);
5223
5224 struct ptr_info_def *pi = get_ptr_info (ddef);
5225 gcc_checking_assert (pi);
5226 unsigned old_align;
5227 unsigned old_misalign;
5228 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5229
5230 if (old_known
5231 && old_align >= alignments[i].align)
5232 {
5233 if (dump_file)
5234 fprintf (dump_file, " But the alignment was already %u.\n",
5235 old_align);
5236 continue;
5237 }
5238 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5239 }
5240 }
5241
5242 /* IPCP transformation phase doing propagation of aggregate values. */
5243
5244 unsigned int
5245 ipcp_transform_function (struct cgraph_node *node)
5246 {
5247 vec<ipa_param_descriptor> descriptors = vNULL;
5248 struct ipa_func_body_info fbi;
5249 struct ipa_agg_replacement_value *aggval;
5250 int param_count;
5251 bool cfg_changed = false, something_changed = false;
5252
5253 gcc_checking_assert (cfun);
5254 gcc_checking_assert (current_function_decl);
5255
5256 if (dump_file)
5257 fprintf (dump_file, "Modification phase of node %s/%i\n",
5258 node->name (), node->order);
5259
5260 ipcp_update_alignments (node);
5261 aggval = ipa_get_agg_replacements_for_node (node);
5262 if (!aggval)
5263 return 0;
5264 param_count = count_formal_params (node->decl);
5265 if (param_count == 0)
5266 return 0;
5267 adjust_agg_replacement_values (node, aggval);
5268 if (dump_file)
5269 ipa_dump_agg_replacement_values (dump_file, aggval);
5270
5271 fbi.node = node;
5272 fbi.info = NULL;
5273 fbi.bb_infos = vNULL;
5274 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5275 fbi.param_count = param_count;
5276 fbi.aa_walked = 0;
5277
5278 descriptors.safe_grow_cleared (param_count);
5279 ipa_populate_param_decls (node, descriptors);
5280 calculate_dominance_info (CDI_DOMINATORS);
5281 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5282 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5283
5284 int i;
5285 struct ipa_bb_info *bi;
5286 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5287 free_ipa_bb_info (bi);
5288 fbi.bb_infos.release ();
5289 free_dominance_info (CDI_DOMINATORS);
5290 (*ipcp_transformations)[node->uid].agg_values = NULL;
5291 (*ipcp_transformations)[node->uid].alignments = NULL;
5292 descriptors.release ();
5293
5294 if (!something_changed)
5295 return 0;
5296 else if (cfg_changed)
5297 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5298 else
5299 return TODO_update_ssa_only_virtuals;
5300 }