gen-pass-instances.awk: Remove unused var in handle_line
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Vector where the parameter infos are actually stored. */
61 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
62
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
65 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
66 static struct cgraph_node_hook_list *function_insertion_hook_holder;
67
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
70 {
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge *cs;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc *next_duplicate;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
76 if out of control. */
77 int refcount;
78 };
79
80 /* Allocation pool for reference descriptions. */
81
82 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
84
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
87
88 static bool
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
90 {
91 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
92
93 if (!fs_opts)
94 return false;
95 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
96 }
97
98 /* Return index of the formal whose tree is PTREE in function which corresponds
99 to INFO. */
100
101 static int
102 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
103 {
104 int i, count;
105
106 count = descriptors.length ();
107 for (i = 0; i < count; i++)
108 if (descriptors[i].decl == ptree)
109 return i;
110
111 return -1;
112 }
113
114 /* Return index of the formal whose tree is PTREE in function which corresponds
115 to INFO. */
116
117 int
118 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
119 {
120 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
121 }
122
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
124 NODE. */
125
126 static void
127 ipa_populate_param_decls (struct cgraph_node *node,
128 vec<ipa_param_descriptor> &descriptors)
129 {
130 tree fndecl;
131 tree fnargs;
132 tree parm;
133 int param_num;
134
135 fndecl = node->decl;
136 gcc_assert (gimple_has_body_p (fndecl));
137 fnargs = DECL_ARGUMENTS (fndecl);
138 param_num = 0;
139 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
140 {
141 descriptors[param_num].decl = parm;
142 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
143 true);
144 param_num++;
145 }
146 }
147
148 /* Return how many formal parameters FNDECL has. */
149
150 int
151 count_formal_params (tree fndecl)
152 {
153 tree parm;
154 int count = 0;
155 gcc_assert (gimple_has_body_p (fndecl));
156
157 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
158 count++;
159
160 return count;
161 }
162
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
166
167 void
168 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
169 {
170 fprintf (file, "param #%i", i);
171 if (info->descriptors[i].decl)
172 {
173 fprintf (file, " ");
174 print_generic_expr (file, info->descriptors[i].decl, 0);
175 }
176 }
177
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
180
181 void
182 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
183 {
184 struct ipa_node_params *info = IPA_NODE_REF (node);
185
186 if (!info->descriptors.exists () && param_count)
187 info->descriptors.safe_grow_cleared (param_count);
188 }
189
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
192 param_decls. */
193
194 void
195 ipa_initialize_node_params (struct cgraph_node *node)
196 {
197 struct ipa_node_params *info = IPA_NODE_REF (node);
198
199 if (!info->descriptors.exists ())
200 {
201 ipa_alloc_node_params (node, count_formal_params (node->decl));
202 ipa_populate_param_decls (node, info->descriptors);
203 }
204 }
205
206 /* Print the jump functions associated with call graph edge CS to file F. */
207
208 static void
209 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
210 {
211 int i, count;
212
213 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
214 for (i = 0; i < count; i++)
215 {
216 struct ipa_jump_func *jump_func;
217 enum jump_func_type type;
218
219 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
220 type = jump_func->type;
221
222 fprintf (f, " param %d: ", i);
223 if (type == IPA_JF_UNKNOWN)
224 fprintf (f, "UNKNOWN\n");
225 else if (type == IPA_JF_CONST)
226 {
227 tree val = jump_func->value.constant.value;
228 fprintf (f, "CONST: ");
229 print_generic_expr (f, val, 0);
230 if (TREE_CODE (val) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
232 {
233 fprintf (f, " -> ");
234 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
235 0);
236 }
237 fprintf (f, "\n");
238 }
239 else if (type == IPA_JF_PASS_THROUGH)
240 {
241 fprintf (f, "PASS THROUGH: ");
242 fprintf (f, "%d, op %s",
243 jump_func->value.pass_through.formal_id,
244 get_tree_code_name(jump_func->value.pass_through.operation));
245 if (jump_func->value.pass_through.operation != NOP_EXPR)
246 {
247 fprintf (f, " ");
248 print_generic_expr (f,
249 jump_func->value.pass_through.operand, 0);
250 }
251 if (jump_func->value.pass_through.agg_preserved)
252 fprintf (f, ", agg_preserved");
253 fprintf (f, "\n");
254 }
255 else if (type == IPA_JF_ANCESTOR)
256 {
257 fprintf (f, "ANCESTOR: ");
258 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
259 jump_func->value.ancestor.formal_id,
260 jump_func->value.ancestor.offset);
261 if (jump_func->value.ancestor.agg_preserved)
262 fprintf (f, ", agg_preserved");
263 fprintf (f, "\n");
264 }
265
266 if (jump_func->agg.items)
267 {
268 struct ipa_agg_jf_item *item;
269 int j;
270
271 fprintf (f, " Aggregate passed by %s:\n",
272 jump_func->agg.by_ref ? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
274 {
275 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
276 item->offset);
277 if (TYPE_P (item->value))
278 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
279 tree_to_uhwi (TYPE_SIZE (item->value)));
280 else
281 {
282 fprintf (f, "cst: ");
283 print_generic_expr (f, item->value, 0);
284 }
285 fprintf (f, "\n");
286 }
287 }
288
289 struct ipa_polymorphic_call_context *ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
291 if (ctx && !ctx->useless_p ())
292 {
293 fprintf (f, " Context: ");
294 ctx->dump (dump_file);
295 }
296
297 if (jump_func->alignment.known)
298 {
299 fprintf (f, " Alignment: %u, misalignment: %u\n",
300 jump_func->alignment.align,
301 jump_func->alignment.misalign);
302 }
303 else
304 fprintf (f, " Unknown alignment\n");
305 }
306 }
307
308
309 /* Print the jump functions of all arguments on all call graph edges going from
310 NODE to file F. */
311
312 void
313 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
314 {
315 struct cgraph_edge *cs;
316
317 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
318 node->order);
319 for (cs = node->callees; cs; cs = cs->next_callee)
320 {
321 if (!ipa_edge_args_info_available_for_edge_p (cs))
322 continue;
323
324 fprintf (f, " callsite %s/%i -> %s/%i : \n",
325 xstrdup_for_dump (node->name ()), node->order,
326 xstrdup_for_dump (cs->callee->name ()),
327 cs->callee->order);
328 ipa_print_node_jump_functions_for_edge (f, cs);
329 }
330
331 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
332 {
333 struct cgraph_indirect_call_info *ii;
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
337 ii = cs->indirect_info;
338 if (ii->agg_contents)
339 fprintf (f, " indirect %s callsite, calling param %i, "
340 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
341 ii->member_ptr ? "member ptr" : "aggregate",
342 ii->param_index, ii->offset,
343 ii->by_ref ? "by reference" : "by_value");
344 else
345 fprintf (f, " indirect %s callsite, calling param %i, "
346 "offset " HOST_WIDE_INT_PRINT_DEC,
347 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
348 ii->offset);
349
350 if (cs->call_stmt)
351 {
352 fprintf (f, ", for stmt ");
353 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
354 }
355 else
356 fprintf (f, "\n");
357 if (ii->polymorphic)
358 ii->context.dump (f);
359 ipa_print_node_jump_functions_for_edge (f, cs);
360 }
361 }
362
363 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
364
365 void
366 ipa_print_all_jump_functions (FILE *f)
367 {
368 struct cgraph_node *node;
369
370 fprintf (f, "\nJump functions:\n");
371 FOR_EACH_FUNCTION (node)
372 {
373 ipa_print_node_jump_functions (f, node);
374 }
375 }
376
377 /* Set jfunc to be a know-really nothing jump function. */
378
379 static void
380 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
381 {
382 jfunc->type = IPA_JF_UNKNOWN;
383 jfunc->alignment.known = false;
384 }
385
386 /* Set JFUNC to be a copy of another jmp (to be used by jump function
387 combination code). The two functions will share their rdesc. */
388
389 static void
390 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
391 struct ipa_jump_func *src)
392
393 {
394 gcc_checking_assert (src->type == IPA_JF_CONST);
395 dst->type = IPA_JF_CONST;
396 dst->value.constant = src->value.constant;
397 }
398
399 /* Set JFUNC to be a constant jmp function. */
400
401 static void
402 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
403 struct cgraph_edge *cs)
404 {
405 constant = unshare_expr (constant);
406 if (constant && EXPR_P (constant))
407 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
408 jfunc->type = IPA_JF_CONST;
409 jfunc->value.constant.value = unshare_expr_without_location (constant);
410
411 if (TREE_CODE (constant) == ADDR_EXPR
412 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
413 {
414 struct ipa_cst_ref_desc *rdesc;
415
416 rdesc = ipa_refdesc_pool.allocate ();
417 rdesc->cs = cs;
418 rdesc->next_duplicate = NULL;
419 rdesc->refcount = 1;
420 jfunc->value.constant.rdesc = rdesc;
421 }
422 else
423 jfunc->value.constant.rdesc = NULL;
424 }
425
426 /* Set JFUNC to be a simple pass-through jump function. */
427 static void
428 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
429 bool agg_preserved)
430 {
431 jfunc->type = IPA_JF_PASS_THROUGH;
432 jfunc->value.pass_through.operand = NULL_TREE;
433 jfunc->value.pass_through.formal_id = formal_id;
434 jfunc->value.pass_through.operation = NOP_EXPR;
435 jfunc->value.pass_through.agg_preserved = agg_preserved;
436 }
437
438 /* Set JFUNC to be an arithmetic pass through jump function. */
439
440 static void
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
443 {
444 jfunc->type = IPA_JF_PASS_THROUGH;
445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
448 jfunc->value.pass_through.agg_preserved = false;
449 }
450
451 /* Set JFUNC to be an ancestor jump function. */
452
453 static void
454 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
455 int formal_id, bool agg_preserved)
456 {
457 jfunc->type = IPA_JF_ANCESTOR;
458 jfunc->value.ancestor.formal_id = formal_id;
459 jfunc->value.ancestor.offset = offset;
460 jfunc->value.ancestor.agg_preserved = agg_preserved;
461 }
462
463 /* Get IPA BB information about the given BB. FBI is the context of analyzis
464 of this function body. */
465
466 static struct ipa_bb_info *
467 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
468 {
469 gcc_checking_assert (fbi);
470 return &fbi->bb_infos[bb->index];
471 }
472
473 /* Structure to be passed in between detect_type_change and
474 check_stmt_for_type_change. */
475
476 struct prop_type_change_info
477 {
478 /* Offset into the object where there is the virtual method pointer we are
479 looking for. */
480 HOST_WIDE_INT offset;
481 /* The declaration or SSA_NAME pointer of the base that we are checking for
482 type change. */
483 tree object;
484 /* Set to true if dynamic type change has been detected. */
485 bool type_maybe_changed;
486 };
487
488 /* Return true if STMT can modify a virtual method table pointer.
489
490 This function makes special assumptions about both constructors and
491 destructors which are all the functions that are allowed to alter the VMT
492 pointers. It assumes that destructors begin with assignment into all VMT
493 pointers and that constructors essentially look in the following way:
494
495 1) The very first thing they do is that they call constructors of ancestor
496 sub-objects that have them.
497
498 2) Then VMT pointers of this and all its ancestors is set to new values
499 corresponding to the type corresponding to the constructor.
500
501 3) Only afterwards, other stuff such as constructor of member sub-objects
502 and the code written by the user is run. Only this may include calling
503 virtual functions, directly or indirectly.
504
505 There is no way to call a constructor of an ancestor sub-object in any
506 other way.
507
508 This means that we do not have to care whether constructors get the correct
509 type information because they will always change it (in fact, if we define
510 the type to be given by the VMT pointer, it is undefined).
511
512 The most important fact to derive from the above is that if, for some
513 statement in the section 3, we try to detect whether the dynamic type has
514 changed, we can safely ignore all calls as we examine the function body
515 backwards until we reach statements in section 2 because these calls cannot
516 be ancestor constructors or destructors (if the input is not bogus) and so
517 do not change the dynamic type (this holds true only for automatically
518 allocated objects but at the moment we devirtualize only these). We then
519 must detect that statements in section 2 change the dynamic type and can try
520 to derive the new type. That is enough and we can stop, we will never see
521 the calls into constructors of sub-objects in this code. Therefore we can
522 safely ignore all call statements that we traverse.
523 */
524
525 static bool
526 stmt_may_be_vtbl_ptr_store (gimple *stmt)
527 {
528 if (is_gimple_call (stmt))
529 return false;
530 if (gimple_clobber_p (stmt))
531 return false;
532 else if (is_gimple_assign (stmt))
533 {
534 tree lhs = gimple_assign_lhs (stmt);
535
536 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
537 {
538 if (flag_strict_aliasing
539 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
540 return false;
541
542 if (TREE_CODE (lhs) == COMPONENT_REF
543 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
544 return false;
545 /* In the future we might want to use get_base_ref_and_offset to find
546 if there is a field corresponding to the offset and if so, proceed
547 almost like if it was a component ref. */
548 }
549 }
550 return true;
551 }
552
553 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
554 to check whether a particular statement may modify the virtual table
555 pointerIt stores its result into DATA, which points to a
556 prop_type_change_info structure. */
557
558 static bool
559 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
560 {
561 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
562 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
563
564 if (stmt_may_be_vtbl_ptr_store (stmt))
565 {
566 tci->type_maybe_changed = true;
567 return true;
568 }
569 else
570 return false;
571 }
572
573 /* See if ARG is PARAM_DECl describing instance passed by pointer
574 or reference in FUNCTION. Return false if the dynamic type may change
575 in between beggining of the function until CALL is invoked.
576
577 Generally functions are not allowed to change type of such instances,
578 but they call destructors. We assume that methods can not destroy the THIS
579 pointer. Also as a special cases, constructor and destructors may change
580 type of the THIS pointer. */
581
582 static bool
583 param_type_may_change_p (tree function, tree arg, gimple *call)
584 {
585 /* Pure functions can not do any changes on the dynamic type;
586 that require writting to memory. */
587 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
588 return false;
589 /* We need to check if we are within inlined consturctor
590 or destructor (ideally we would have way to check that the
591 inline cdtor is actually working on ARG, but we don't have
592 easy tie on this, so punt on all non-pure cdtors.
593 We may also record the types of cdtors and once we know type
594 of the instance match them.
595
596 Also code unification optimizations may merge calls from
597 different blocks making return values unreliable. So
598 do nothing during late optimization. */
599 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
600 return true;
601 if (TREE_CODE (arg) == SSA_NAME
602 && SSA_NAME_IS_DEFAULT_DEF (arg)
603 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
604 {
605 /* Normal (non-THIS) argument. */
606 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
607 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
608 /* THIS pointer of an method - here we want to watch constructors
609 and destructors as those definitely may change the dynamic
610 type. */
611 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
612 && !DECL_CXX_CONSTRUCTOR_P (function)
613 && !DECL_CXX_DESTRUCTOR_P (function)
614 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
615 {
616 /* Walk the inline stack and watch out for ctors/dtors. */
617 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
618 block = BLOCK_SUPERCONTEXT (block))
619 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
620 return true;
621 return false;
622 }
623 }
624 return true;
625 }
626
627 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
628 callsite CALL) by looking for assignments to its virtual table pointer. If
629 it is, return true and fill in the jump function JFUNC with relevant type
630 information or set it to unknown. ARG is the object itself (not a pointer
631 to it, unless dereferenced). BASE is the base of the memory access as
632 returned by get_ref_base_and_extent, as is the offset.
633
634 This is helper function for detect_type_change and detect_type_change_ssa
635 that does the heavy work which is usually unnecesary. */
636
637 static bool
638 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
639 gcall *call, struct ipa_jump_func *jfunc,
640 HOST_WIDE_INT offset)
641 {
642 struct prop_type_change_info tci;
643 ao_ref ao;
644 bool entry_reached = false;
645
646 gcc_checking_assert (DECL_P (arg)
647 || TREE_CODE (arg) == MEM_REF
648 || handled_component_p (arg));
649
650 comp_type = TYPE_MAIN_VARIANT (comp_type);
651
652 /* Const calls cannot call virtual methods through VMT and so type changes do
653 not matter. */
654 if (!flag_devirtualize || !gimple_vuse (call)
655 /* Be sure expected_type is polymorphic. */
656 || !comp_type
657 || TREE_CODE (comp_type) != RECORD_TYPE
658 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
659 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
660 return true;
661
662 ao_ref_init (&ao, arg);
663 ao.base = base;
664 ao.offset = offset;
665 ao.size = POINTER_SIZE;
666 ao.max_size = ao.size;
667
668 tci.offset = offset;
669 tci.object = get_base_address (arg);
670 tci.type_maybe_changed = false;
671
672 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
673 &tci, NULL, &entry_reached);
674 if (!tci.type_maybe_changed)
675 return false;
676
677 ipa_set_jf_unknown (jfunc);
678 return true;
679 }
680
681 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
682 If it is, return true and fill in the jump function JFUNC with relevant type
683 information or set it to unknown. ARG is the object itself (not a pointer
684 to it, unless dereferenced). BASE is the base of the memory access as
685 returned by get_ref_base_and_extent, as is the offset. */
686
687 static bool
688 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
689 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
690 {
691 if (!flag_devirtualize)
692 return false;
693
694 if (TREE_CODE (base) == MEM_REF
695 && !param_type_may_change_p (current_function_decl,
696 TREE_OPERAND (base, 0),
697 call))
698 return false;
699 return detect_type_change_from_memory_writes (arg, base, comp_type,
700 call, jfunc, offset);
701 }
702
703 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
704 SSA name (its dereference will become the base and the offset is assumed to
705 be zero). */
706
707 static bool
708 detect_type_change_ssa (tree arg, tree comp_type,
709 gcall *call, struct ipa_jump_func *jfunc)
710 {
711 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
712 if (!flag_devirtualize
713 || !POINTER_TYPE_P (TREE_TYPE (arg)))
714 return false;
715
716 if (!param_type_may_change_p (current_function_decl, arg, call))
717 return false;
718
719 arg = build2 (MEM_REF, ptr_type_node, arg,
720 build_int_cst (ptr_type_node, 0));
721
722 return detect_type_change_from_memory_writes (arg, arg, comp_type,
723 call, jfunc, 0);
724 }
725
726 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
727 boolean variable pointed to by DATA. */
728
729 static bool
730 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
731 void *data)
732 {
733 bool *b = (bool *) data;
734 *b = true;
735 return true;
736 }
737
738 /* Return true if we have already walked so many statements in AA that we
739 should really just start giving up. */
740
741 static bool
742 aa_overwalked (struct ipa_func_body_info *fbi)
743 {
744 gcc_checking_assert (fbi);
745 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
746 }
747
748 /* Find the nearest valid aa status for parameter specified by INDEX that
749 dominates BB. */
750
751 static struct ipa_param_aa_status *
752 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
753 int index)
754 {
755 while (true)
756 {
757 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
758 if (!bb)
759 return NULL;
760 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
761 if (!bi->param_aa_statuses.is_empty ()
762 && bi->param_aa_statuses[index].valid)
763 return &bi->param_aa_statuses[index];
764 }
765 }
766
767 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
768 structures and/or intialize the result with a dominating description as
769 necessary. */
770
771 static struct ipa_param_aa_status *
772 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
773 int index)
774 {
775 gcc_checking_assert (fbi);
776 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
777 if (bi->param_aa_statuses.is_empty ())
778 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
779 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
780 if (!paa->valid)
781 {
782 gcc_checking_assert (!paa->parm_modified
783 && !paa->ref_modified
784 && !paa->pt_modified);
785 struct ipa_param_aa_status *dom_paa;
786 dom_paa = find_dominating_aa_status (fbi, bb, index);
787 if (dom_paa)
788 *paa = *dom_paa;
789 else
790 paa->valid = true;
791 }
792
793 return paa;
794 }
795
796 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
797 a value known not to be modified in this function before reaching the
798 statement STMT. FBI holds information about the function we have so far
799 gathered but do not survive the summary building stage. */
800
801 static bool
802 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
803 gimple *stmt, tree parm_load)
804 {
805 struct ipa_param_aa_status *paa;
806 bool modified = false;
807 ao_ref refd;
808
809 /* FIXME: FBI can be NULL if we are being called from outside
810 ipa_node_analysis or ipcp_transform_function, which currently happens
811 during inlining analysis. It would be great to extend fbi's lifetime and
812 always have it. Currently, we are just not afraid of too much walking in
813 that case. */
814 if (fbi)
815 {
816 if (aa_overwalked (fbi))
817 return false;
818 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
819 if (paa->parm_modified)
820 return false;
821 }
822 else
823 paa = NULL;
824
825 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
826 ao_ref_init (&refd, parm_load);
827 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
828 &modified, NULL);
829 if (fbi)
830 fbi->aa_walked += walked;
831 if (paa && modified)
832 paa->parm_modified = true;
833 return !modified;
834 }
835
836 /* If STMT is an assignment that loads a value from an parameter declaration,
837 return the index of the parameter in ipa_node_params which has not been
838 modified. Otherwise return -1. */
839
840 static int
841 load_from_unmodified_param (struct ipa_func_body_info *fbi,
842 vec<ipa_param_descriptor> descriptors,
843 gimple *stmt)
844 {
845 int index;
846 tree op1;
847
848 if (!gimple_assign_single_p (stmt))
849 return -1;
850
851 op1 = gimple_assign_rhs1 (stmt);
852 if (TREE_CODE (op1) != PARM_DECL)
853 return -1;
854
855 index = ipa_get_param_decl_index_1 (descriptors, op1);
856 if (index < 0
857 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
858 return -1;
859
860 return index;
861 }
862
863 /* Return true if memory reference REF (which must be a load through parameter
864 with INDEX) loads data that are known to be unmodified in this function
865 before reaching statement STMT. */
866
867 static bool
868 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
869 int index, gimple *stmt, tree ref)
870 {
871 struct ipa_param_aa_status *paa;
872 bool modified = false;
873 ao_ref refd;
874
875 /* FIXME: FBI can be NULL if we are being called from outside
876 ipa_node_analysis or ipcp_transform_function, which currently happens
877 during inlining analysis. It would be great to extend fbi's lifetime and
878 always have it. Currently, we are just not afraid of too much walking in
879 that case. */
880 if (fbi)
881 {
882 if (aa_overwalked (fbi))
883 return false;
884 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
885 if (paa->ref_modified)
886 return false;
887 }
888 else
889 paa = NULL;
890
891 gcc_checking_assert (gimple_vuse (stmt));
892 ao_ref_init (&refd, ref);
893 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
894 &modified, NULL);
895 if (fbi)
896 fbi->aa_walked += walked;
897 if (paa && modified)
898 paa->ref_modified = true;
899 return !modified;
900 }
901
902 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
903 is known to be unmodified in this function before reaching call statement
904 CALL into which it is passed. FBI describes the function body. */
905
906 static bool
907 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
908 gimple *call, tree parm)
909 {
910 bool modified = false;
911 ao_ref refd;
912
913 /* It's unnecessary to calculate anything about memory contnets for a const
914 function because it is not goin to use it. But do not cache the result
915 either. Also, no such calculations for non-pointers. */
916 if (!gimple_vuse (call)
917 || !POINTER_TYPE_P (TREE_TYPE (parm))
918 || aa_overwalked (fbi))
919 return false;
920
921 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
922 gimple_bb (call),
923 index);
924 if (paa->pt_modified)
925 return false;
926
927 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
928 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
929 &modified, NULL);
930 fbi->aa_walked += walked;
931 if (modified)
932 paa->pt_modified = true;
933 return !modified;
934 }
935
936 /* Return true if we can prove that OP is a memory reference loading unmodified
937 data from an aggregate passed as a parameter and if the aggregate is passed
938 by reference, that the alias type of the load corresponds to the type of the
939 formal parameter (so that we can rely on this type for TBAA in callers).
940 INFO and PARMS_AINFO describe parameters of the current function (but the
941 latter can be NULL), STMT is the load statement. If function returns true,
942 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
943 within the aggregate and whether it is a load from a value passed by
944 reference respectively. */
945
946 bool
947 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
948 vec<ipa_param_descriptor> descriptors,
949 gimple *stmt, tree op, int *index_p,
950 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
951 bool *by_ref_p)
952 {
953 int index;
954 HOST_WIDE_INT size, max_size;
955 bool reverse;
956 tree base
957 = get_ref_base_and_extent (op, offset_p, &size, &max_size, &reverse);
958
959 if (max_size == -1 || max_size != size || *offset_p < 0)
960 return false;
961
962 if (DECL_P (base))
963 {
964 int index = ipa_get_param_decl_index_1 (descriptors, base);
965 if (index >= 0
966 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
967 {
968 *index_p = index;
969 *by_ref_p = false;
970 if (size_p)
971 *size_p = size;
972 return true;
973 }
974 return false;
975 }
976
977 if (TREE_CODE (base) != MEM_REF
978 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
979 || !integer_zerop (TREE_OPERAND (base, 1)))
980 return false;
981
982 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
983 {
984 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
985 index = ipa_get_param_decl_index_1 (descriptors, parm);
986 }
987 else
988 {
989 /* This branch catches situations where a pointer parameter is not a
990 gimple register, for example:
991
992 void hip7(S*) (struct S * p)
993 {
994 void (*<T2e4>) (struct S *) D.1867;
995 struct S * p.1;
996
997 <bb 2>:
998 p.1_1 = p;
999 D.1867_2 = p.1_1->f;
1000 D.1867_2 ();
1001 gdp = &p;
1002 */
1003
1004 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1005 index = load_from_unmodified_param (fbi, descriptors, def);
1006 }
1007
1008 if (index >= 0
1009 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1010 {
1011 *index_p = index;
1012 *by_ref_p = true;
1013 if (size_p)
1014 *size_p = size;
1015 return true;
1016 }
1017 return false;
1018 }
1019
1020 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1021 of an assignment statement STMT, try to determine whether we are actually
1022 handling any of the following cases and construct an appropriate jump
1023 function into JFUNC if so:
1024
1025 1) The passed value is loaded from a formal parameter which is not a gimple
1026 register (most probably because it is addressable, the value has to be
1027 scalar) and we can guarantee the value has not changed. This case can
1028 therefore be described by a simple pass-through jump function. For example:
1029
1030 foo (int a)
1031 {
1032 int a.0;
1033
1034 a.0_2 = a;
1035 bar (a.0_2);
1036
1037 2) The passed value can be described by a simple arithmetic pass-through
1038 jump function. E.g.
1039
1040 foo (int a)
1041 {
1042 int D.2064;
1043
1044 D.2064_4 = a.1(D) + 4;
1045 bar (D.2064_4);
1046
1047 This case can also occur in combination of the previous one, e.g.:
1048
1049 foo (int a, int z)
1050 {
1051 int a.0;
1052 int D.2064;
1053
1054 a.0_3 = a;
1055 D.2064_4 = a.0_3 + 4;
1056 foo (D.2064_4);
1057
1058 3) The passed value is an address of an object within another one (which
1059 also passed by reference). Such situations are described by an ancestor
1060 jump function and describe situations such as:
1061
1062 B::foo() (struct B * const this)
1063 {
1064 struct A * D.1845;
1065
1066 D.1845_2 = &this_1(D)->D.1748;
1067 A::bar (D.1845_2);
1068
1069 INFO is the structure describing individual parameters access different
1070 stages of IPA optimizations. PARMS_AINFO contains the information that is
1071 only needed for intraprocedural analysis. */
1072
1073 static void
1074 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1075 struct ipa_node_params *info,
1076 struct ipa_jump_func *jfunc,
1077 gcall *call, gimple *stmt, tree name,
1078 tree param_type)
1079 {
1080 HOST_WIDE_INT offset, size, max_size;
1081 tree op1, tc_ssa, base, ssa;
1082 bool reverse;
1083 int index;
1084
1085 op1 = gimple_assign_rhs1 (stmt);
1086
1087 if (TREE_CODE (op1) == SSA_NAME)
1088 {
1089 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1090 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1091 else
1092 index = load_from_unmodified_param (fbi, info->descriptors,
1093 SSA_NAME_DEF_STMT (op1));
1094 tc_ssa = op1;
1095 }
1096 else
1097 {
1098 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1099 tc_ssa = gimple_assign_lhs (stmt);
1100 }
1101
1102 if (index >= 0)
1103 {
1104 tree op2 = gimple_assign_rhs2 (stmt);
1105
1106 if (op2)
1107 {
1108 if (!is_gimple_ip_invariant (op2)
1109 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1110 && !useless_type_conversion_p (TREE_TYPE (name),
1111 TREE_TYPE (op1))))
1112 return;
1113
1114 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1115 gimple_assign_rhs_code (stmt));
1116 }
1117 else if (gimple_assign_single_p (stmt))
1118 {
1119 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1120 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1121 }
1122 return;
1123 }
1124
1125 if (TREE_CODE (op1) != ADDR_EXPR)
1126 return;
1127 op1 = TREE_OPERAND (op1, 0);
1128 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1129 return;
1130 base = get_ref_base_and_extent (op1, &offset, &size, &max_size, &reverse);
1131 if (TREE_CODE (base) != MEM_REF
1132 /* If this is a varying address, punt. */
1133 || max_size == -1
1134 || max_size != size)
1135 return;
1136 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1137 ssa = TREE_OPERAND (base, 0);
1138 if (TREE_CODE (ssa) != SSA_NAME
1139 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1140 || offset < 0)
1141 return;
1142
1143 /* Dynamic types are changed in constructors and destructors. */
1144 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1145 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1146 ipa_set_ancestor_jf (jfunc, offset, index,
1147 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1148 }
1149
1150 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1151 it looks like:
1152
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1154
1155 The base of the MEM_REF must be a default definition SSA NAME of a
1156 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1157 whole MEM_REF expression is returned and the offset calculated from any
1158 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1159 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1160
1161 static tree
1162 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1163 {
1164 HOST_WIDE_INT size, max_size;
1165 tree expr, parm, obj;
1166 bool reverse;
1167
1168 if (!gimple_assign_single_p (assign))
1169 return NULL_TREE;
1170 expr = gimple_assign_rhs1 (assign);
1171
1172 if (TREE_CODE (expr) != ADDR_EXPR)
1173 return NULL_TREE;
1174 expr = TREE_OPERAND (expr, 0);
1175 obj = expr;
1176 expr = get_ref_base_and_extent (expr, offset, &size, &max_size, &reverse);
1177
1178 if (TREE_CODE (expr) != MEM_REF
1179 /* If this is a varying address, punt. */
1180 || max_size == -1
1181 || max_size != size
1182 || *offset < 0)
1183 return NULL_TREE;
1184 parm = TREE_OPERAND (expr, 0);
1185 if (TREE_CODE (parm) != SSA_NAME
1186 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1187 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1188 return NULL_TREE;
1189
1190 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1191 *obj_p = obj;
1192 return expr;
1193 }
1194
1195
1196 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1197 statement PHI, try to find out whether NAME is in fact a
1198 multiple-inheritance typecast from a descendant into an ancestor of a formal
1199 parameter and thus can be described by an ancestor jump function and if so,
1200 write the appropriate function into JFUNC.
1201
1202 Essentially we want to match the following pattern:
1203
1204 if (obj_2(D) != 0B)
1205 goto <bb 3>;
1206 else
1207 goto <bb 4>;
1208
1209 <bb 3>:
1210 iftmp.1_3 = &obj_2(D)->D.1762;
1211
1212 <bb 4>:
1213 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1214 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1215 return D.1879_6; */
1216
1217 static void
1218 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1219 struct ipa_node_params *info,
1220 struct ipa_jump_func *jfunc,
1221 gcall *call, gphi *phi)
1222 {
1223 HOST_WIDE_INT offset;
1224 gimple *assign, *cond;
1225 basic_block phi_bb, assign_bb, cond_bb;
1226 tree tmp, parm, expr, obj;
1227 int index, i;
1228
1229 if (gimple_phi_num_args (phi) != 2)
1230 return;
1231
1232 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1233 tmp = PHI_ARG_DEF (phi, 0);
1234 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1235 tmp = PHI_ARG_DEF (phi, 1);
1236 else
1237 return;
1238 if (TREE_CODE (tmp) != SSA_NAME
1239 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1240 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1241 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1242 return;
1243
1244 assign = SSA_NAME_DEF_STMT (tmp);
1245 assign_bb = gimple_bb (assign);
1246 if (!single_pred_p (assign_bb))
1247 return;
1248 expr = get_ancestor_addr_info (assign, &obj, &offset);
1249 if (!expr)
1250 return;
1251 parm = TREE_OPERAND (expr, 0);
1252 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1253 if (index < 0)
1254 return;
1255
1256 cond_bb = single_pred (assign_bb);
1257 cond = last_stmt (cond_bb);
1258 if (!cond
1259 || gimple_code (cond) != GIMPLE_COND
1260 || gimple_cond_code (cond) != NE_EXPR
1261 || gimple_cond_lhs (cond) != parm
1262 || !integer_zerop (gimple_cond_rhs (cond)))
1263 return;
1264
1265 phi_bb = gimple_bb (phi);
1266 for (i = 0; i < 2; i++)
1267 {
1268 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1269 if (pred != assign_bb && pred != cond_bb)
1270 return;
1271 }
1272
1273 ipa_set_ancestor_jf (jfunc, offset, index,
1274 parm_ref_data_pass_through_p (fbi, index, call, parm));
1275 }
1276
1277 /* Inspect the given TYPE and return true iff it has the same structure (the
1278 same number of fields of the same types) as a C++ member pointer. If
1279 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1280 corresponding fields there. */
1281
1282 static bool
1283 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1284 {
1285 tree fld;
1286
1287 if (TREE_CODE (type) != RECORD_TYPE)
1288 return false;
1289
1290 fld = TYPE_FIELDS (type);
1291 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1292 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1293 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1294 return false;
1295
1296 if (method_ptr)
1297 *method_ptr = fld;
1298
1299 fld = DECL_CHAIN (fld);
1300 if (!fld || INTEGRAL_TYPE_P (fld)
1301 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1302 return false;
1303 if (delta)
1304 *delta = fld;
1305
1306 if (DECL_CHAIN (fld))
1307 return false;
1308
1309 return true;
1310 }
1311
1312 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1313 return the rhs of its defining statement. Otherwise return RHS as it
1314 is. */
1315
1316 static inline tree
1317 get_ssa_def_if_simple_copy (tree rhs)
1318 {
1319 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1320 {
1321 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1322
1323 if (gimple_assign_single_p (def_stmt))
1324 rhs = gimple_assign_rhs1 (def_stmt);
1325 else
1326 break;
1327 }
1328 return rhs;
1329 }
1330
1331 /* Simple linked list, describing known contents of an aggregate beforere
1332 call. */
1333
1334 struct ipa_known_agg_contents_list
1335 {
1336 /* Offset and size of the described part of the aggregate. */
1337 HOST_WIDE_INT offset, size;
1338 /* Known constant value or NULL if the contents is known to be unknown. */
1339 tree constant;
1340 /* Pointer to the next structure in the list. */
1341 struct ipa_known_agg_contents_list *next;
1342 };
1343
1344 /* Find the proper place in linked list of ipa_known_agg_contents_list
1345 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1346 unless there is a partial overlap, in which case return NULL, or such
1347 element is already there, in which case set *ALREADY_THERE to true. */
1348
1349 static struct ipa_known_agg_contents_list **
1350 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1351 HOST_WIDE_INT lhs_offset,
1352 HOST_WIDE_INT lhs_size,
1353 bool *already_there)
1354 {
1355 struct ipa_known_agg_contents_list **p = list;
1356 while (*p && (*p)->offset < lhs_offset)
1357 {
1358 if ((*p)->offset + (*p)->size > lhs_offset)
1359 return NULL;
1360 p = &(*p)->next;
1361 }
1362
1363 if (*p && (*p)->offset < lhs_offset + lhs_size)
1364 {
1365 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1366 /* We already know this value is subsequently overwritten with
1367 something else. */
1368 *already_there = true;
1369 else
1370 /* Otherwise this is a partial overlap which we cannot
1371 represent. */
1372 return NULL;
1373 }
1374 return p;
1375 }
1376
1377 /* Build aggregate jump function from LIST, assuming there are exactly
1378 CONST_COUNT constant entries there and that th offset of the passed argument
1379 is ARG_OFFSET and store it into JFUNC. */
1380
1381 static void
1382 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1383 int const_count, HOST_WIDE_INT arg_offset,
1384 struct ipa_jump_func *jfunc)
1385 {
1386 vec_alloc (jfunc->agg.items, const_count);
1387 while (list)
1388 {
1389 if (list->constant)
1390 {
1391 struct ipa_agg_jf_item item;
1392 item.offset = list->offset - arg_offset;
1393 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1394 item.value = unshare_expr_without_location (list->constant);
1395 jfunc->agg.items->quick_push (item);
1396 }
1397 list = list->next;
1398 }
1399 }
1400
1401 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1402 in ARG is filled in with constant values. ARG can either be an aggregate
1403 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1404 aggregate. JFUNC is the jump function into which the constants are
1405 subsequently stored. */
1406
1407 static void
1408 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1409 tree arg_type,
1410 struct ipa_jump_func *jfunc)
1411 {
1412 struct ipa_known_agg_contents_list *list = NULL;
1413 int item_count = 0, const_count = 0;
1414 HOST_WIDE_INT arg_offset, arg_size;
1415 gimple_stmt_iterator gsi;
1416 tree arg_base;
1417 bool check_ref, by_ref;
1418 ao_ref r;
1419
1420 /* The function operates in three stages. First, we prepare check_ref, r,
1421 arg_base and arg_offset based on what is actually passed as an actual
1422 argument. */
1423
1424 if (POINTER_TYPE_P (arg_type))
1425 {
1426 by_ref = true;
1427 if (TREE_CODE (arg) == SSA_NAME)
1428 {
1429 tree type_size;
1430 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1431 return;
1432 check_ref = true;
1433 arg_base = arg;
1434 arg_offset = 0;
1435 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1436 arg_size = tree_to_uhwi (type_size);
1437 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1438 }
1439 else if (TREE_CODE (arg) == ADDR_EXPR)
1440 {
1441 HOST_WIDE_INT arg_max_size;
1442 bool reverse;
1443
1444 arg = TREE_OPERAND (arg, 0);
1445 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1446 &arg_max_size, &reverse);
1447 if (arg_max_size == -1
1448 || arg_max_size != arg_size
1449 || arg_offset < 0)
1450 return;
1451 if (DECL_P (arg_base))
1452 {
1453 check_ref = false;
1454 ao_ref_init (&r, arg_base);
1455 }
1456 else
1457 return;
1458 }
1459 else
1460 return;
1461 }
1462 else
1463 {
1464 HOST_WIDE_INT arg_max_size;
1465 bool reverse;
1466
1467 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1468
1469 by_ref = false;
1470 check_ref = false;
1471 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1472 &arg_max_size, &reverse);
1473 if (arg_max_size == -1
1474 || arg_max_size != arg_size
1475 || arg_offset < 0)
1476 return;
1477
1478 ao_ref_init (&r, arg);
1479 }
1480
1481 /* Second stage walks back the BB, looks at individual statements and as long
1482 as it is confident of how the statements affect contents of the
1483 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1484 describing it. */
1485 gsi = gsi_for_stmt (call);
1486 gsi_prev (&gsi);
1487 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1488 {
1489 struct ipa_known_agg_contents_list *n, **p;
1490 gimple *stmt = gsi_stmt (gsi);
1491 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1492 tree lhs, rhs, lhs_base;
1493 bool reverse;
1494
1495 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1496 continue;
1497 if (!gimple_assign_single_p (stmt))
1498 break;
1499
1500 lhs = gimple_assign_lhs (stmt);
1501 rhs = gimple_assign_rhs1 (stmt);
1502 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1503 || TREE_CODE (lhs) == BIT_FIELD_REF
1504 || contains_bitfld_component_ref_p (lhs))
1505 break;
1506
1507 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1508 &lhs_max_size, &reverse);
1509 if (lhs_max_size == -1
1510 || lhs_max_size != lhs_size)
1511 break;
1512
1513 if (check_ref)
1514 {
1515 if (TREE_CODE (lhs_base) != MEM_REF
1516 || TREE_OPERAND (lhs_base, 0) != arg_base
1517 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1518 break;
1519 }
1520 else if (lhs_base != arg_base)
1521 {
1522 if (DECL_P (lhs_base))
1523 continue;
1524 else
1525 break;
1526 }
1527
1528 bool already_there = false;
1529 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1530 &already_there);
1531 if (!p)
1532 break;
1533 if (already_there)
1534 continue;
1535
1536 rhs = get_ssa_def_if_simple_copy (rhs);
1537 n = XALLOCA (struct ipa_known_agg_contents_list);
1538 n->size = lhs_size;
1539 n->offset = lhs_offset;
1540 if (is_gimple_ip_invariant (rhs))
1541 {
1542 n->constant = rhs;
1543 const_count++;
1544 }
1545 else
1546 n->constant = NULL_TREE;
1547 n->next = *p;
1548 *p = n;
1549
1550 item_count++;
1551 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1552 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1553 break;
1554 }
1555
1556 /* Third stage just goes over the list and creates an appropriate vector of
1557 ipa_agg_jf_item structures out of it, of sourse only if there are
1558 any known constants to begin with. */
1559
1560 if (const_count)
1561 {
1562 jfunc->agg.by_ref = by_ref;
1563 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1564 }
1565 }
1566
1567 static tree
1568 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1569 {
1570 int n;
1571 tree type = (e->callee
1572 ? TREE_TYPE (e->callee->decl)
1573 : gimple_call_fntype (e->call_stmt));
1574 tree t = TYPE_ARG_TYPES (type);
1575
1576 for (n = 0; n < i; n++)
1577 {
1578 if (!t)
1579 break;
1580 t = TREE_CHAIN (t);
1581 }
1582 if (t)
1583 return TREE_VALUE (t);
1584 if (!e->callee)
1585 return NULL;
1586 t = DECL_ARGUMENTS (e->callee->decl);
1587 for (n = 0; n < i; n++)
1588 {
1589 if (!t)
1590 return NULL;
1591 t = TREE_CHAIN (t);
1592 }
1593 if (t)
1594 return TREE_TYPE (t);
1595 return NULL;
1596 }
1597
1598 /* Compute jump function for all arguments of callsite CS and insert the
1599 information in the jump_functions array in the ipa_edge_args corresponding
1600 to this callsite. */
1601
1602 static void
1603 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1604 struct cgraph_edge *cs)
1605 {
1606 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1607 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1608 gcall *call = cs->call_stmt;
1609 int n, arg_num = gimple_call_num_args (call);
1610 bool useful_context = false;
1611
1612 if (arg_num == 0 || args->jump_functions)
1613 return;
1614 vec_safe_grow_cleared (args->jump_functions, arg_num);
1615 if (flag_devirtualize)
1616 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1617
1618 if (gimple_call_internal_p (call))
1619 return;
1620 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1621 return;
1622
1623 for (n = 0; n < arg_num; n++)
1624 {
1625 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1626 tree arg = gimple_call_arg (call, n);
1627 tree param_type = ipa_get_callee_param_type (cs, n);
1628 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1629 {
1630 tree instance;
1631 struct ipa_polymorphic_call_context context (cs->caller->decl,
1632 arg, cs->call_stmt,
1633 &instance);
1634 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1635 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1636 if (!context.useless_p ())
1637 useful_context = true;
1638 }
1639
1640 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1641 {
1642 unsigned HOST_WIDE_INT hwi_bitpos;
1643 unsigned align;
1644
1645 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1646 && align % BITS_PER_UNIT == 0
1647 && hwi_bitpos % BITS_PER_UNIT == 0)
1648 {
1649 jfunc->alignment.known = true;
1650 jfunc->alignment.align = align / BITS_PER_UNIT;
1651 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1652 }
1653 else
1654 gcc_assert (!jfunc->alignment.known);
1655 }
1656 else
1657 gcc_assert (!jfunc->alignment.known);
1658
1659 if (is_gimple_ip_invariant (arg))
1660 ipa_set_jf_constant (jfunc, arg, cs);
1661 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1662 && TREE_CODE (arg) == PARM_DECL)
1663 {
1664 int index = ipa_get_param_decl_index (info, arg);
1665
1666 gcc_assert (index >=0);
1667 /* Aggregate passed by value, check for pass-through, otherwise we
1668 will attempt to fill in aggregate contents later in this
1669 for cycle. */
1670 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1671 {
1672 ipa_set_jf_simple_pass_through (jfunc, index, false);
1673 continue;
1674 }
1675 }
1676 else if (TREE_CODE (arg) == SSA_NAME)
1677 {
1678 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1679 {
1680 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1681 if (index >= 0)
1682 {
1683 bool agg_p;
1684 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1685 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1686 }
1687 }
1688 else
1689 {
1690 gimple *stmt = SSA_NAME_DEF_STMT (arg);
1691 if (is_gimple_assign (stmt))
1692 compute_complex_assign_jump_func (fbi, info, jfunc,
1693 call, stmt, arg, param_type);
1694 else if (gimple_code (stmt) == GIMPLE_PHI)
1695 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1696 call,
1697 as_a <gphi *> (stmt));
1698 }
1699 }
1700
1701 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1702 passed (because type conversions are ignored in gimple). Usually we can
1703 safely get type from function declaration, but in case of K&R prototypes or
1704 variadic functions we can try our luck with type of the pointer passed.
1705 TODO: Since we look for actual initialization of the memory object, we may better
1706 work out the type based on the memory stores we find. */
1707 if (!param_type)
1708 param_type = TREE_TYPE (arg);
1709
1710 if ((jfunc->type != IPA_JF_PASS_THROUGH
1711 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1712 && (jfunc->type != IPA_JF_ANCESTOR
1713 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1714 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1715 || POINTER_TYPE_P (param_type)))
1716 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1717 }
1718 if (!useful_context)
1719 vec_free (args->polymorphic_call_contexts);
1720 }
1721
1722 /* Compute jump functions for all edges - both direct and indirect - outgoing
1723 from BB. */
1724
1725 static void
1726 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1727 {
1728 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1729 int i;
1730 struct cgraph_edge *cs;
1731
1732 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1733 {
1734 struct cgraph_node *callee = cs->callee;
1735
1736 if (callee)
1737 {
1738 callee->ultimate_alias_target ();
1739 /* We do not need to bother analyzing calls to unknown functions
1740 unless they may become known during lto/whopr. */
1741 if (!callee->definition && !flag_lto)
1742 continue;
1743 }
1744 ipa_compute_jump_functions_for_edge (fbi, cs);
1745 }
1746 }
1747
1748 /* If STMT looks like a statement loading a value from a member pointer formal
1749 parameter, return that parameter and store the offset of the field to
1750 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1751 might be clobbered). If USE_DELTA, then we look for a use of the delta
1752 field rather than the pfn. */
1753
1754 static tree
1755 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
1756 HOST_WIDE_INT *offset_p)
1757 {
1758 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1759
1760 if (!gimple_assign_single_p (stmt))
1761 return NULL_TREE;
1762
1763 rhs = gimple_assign_rhs1 (stmt);
1764 if (TREE_CODE (rhs) == COMPONENT_REF)
1765 {
1766 ref_field = TREE_OPERAND (rhs, 1);
1767 rhs = TREE_OPERAND (rhs, 0);
1768 }
1769 else
1770 ref_field = NULL_TREE;
1771 if (TREE_CODE (rhs) != MEM_REF)
1772 return NULL_TREE;
1773 rec = TREE_OPERAND (rhs, 0);
1774 if (TREE_CODE (rec) != ADDR_EXPR)
1775 return NULL_TREE;
1776 rec = TREE_OPERAND (rec, 0);
1777 if (TREE_CODE (rec) != PARM_DECL
1778 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1779 return NULL_TREE;
1780 ref_offset = TREE_OPERAND (rhs, 1);
1781
1782 if (use_delta)
1783 fld = delta_field;
1784 else
1785 fld = ptr_field;
1786 if (offset_p)
1787 *offset_p = int_bit_position (fld);
1788
1789 if (ref_field)
1790 {
1791 if (integer_nonzerop (ref_offset))
1792 return NULL_TREE;
1793 return ref_field == fld ? rec : NULL_TREE;
1794 }
1795 else
1796 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1797 : NULL_TREE;
1798 }
1799
1800 /* Returns true iff T is an SSA_NAME defined by a statement. */
1801
1802 static bool
1803 ipa_is_ssa_with_stmt_def (tree t)
1804 {
1805 if (TREE_CODE (t) == SSA_NAME
1806 && !SSA_NAME_IS_DEFAULT_DEF (t))
1807 return true;
1808 else
1809 return false;
1810 }
1811
1812 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1813 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1814 indirect call graph edge. */
1815
1816 static struct cgraph_edge *
1817 ipa_note_param_call (struct cgraph_node *node, int param_index,
1818 gcall *stmt)
1819 {
1820 struct cgraph_edge *cs;
1821
1822 cs = node->get_edge (stmt);
1823 cs->indirect_info->param_index = param_index;
1824 cs->indirect_info->agg_contents = 0;
1825 cs->indirect_info->member_ptr = 0;
1826 return cs;
1827 }
1828
1829 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1830 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1831 intermediate information about each formal parameter. Currently it checks
1832 whether the call calls a pointer that is a formal parameter and if so, the
1833 parameter is marked with the called flag and an indirect call graph edge
1834 describing the call is created. This is very simple for ordinary pointers
1835 represented in SSA but not-so-nice when it comes to member pointers. The
1836 ugly part of this function does nothing more than trying to match the
1837 pattern of such a call. An example of such a pattern is the gimple dump
1838 below, the call is on the last line:
1839
1840 <bb 2>:
1841 f$__delta_5 = f.__delta;
1842 f$__pfn_24 = f.__pfn;
1843
1844 or
1845 <bb 2>:
1846 f$__delta_5 = MEM[(struct *)&f];
1847 f$__pfn_24 = MEM[(struct *)&f + 4B];
1848
1849 and a few lines below:
1850
1851 <bb 5>
1852 D.2496_3 = (int) f$__pfn_24;
1853 D.2497_4 = D.2496_3 & 1;
1854 if (D.2497_4 != 0)
1855 goto <bb 3>;
1856 else
1857 goto <bb 4>;
1858
1859 <bb 6>:
1860 D.2500_7 = (unsigned int) f$__delta_5;
1861 D.2501_8 = &S + D.2500_7;
1862 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1863 D.2503_10 = *D.2502_9;
1864 D.2504_12 = f$__pfn_24 + -1;
1865 D.2505_13 = (unsigned int) D.2504_12;
1866 D.2506_14 = D.2503_10 + D.2505_13;
1867 D.2507_15 = *D.2506_14;
1868 iftmp.11_16 = (String:: *) D.2507_15;
1869
1870 <bb 7>:
1871 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1872 D.2500_19 = (unsigned int) f$__delta_5;
1873 D.2508_20 = &S + D.2500_19;
1874 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1875
1876 Such patterns are results of simple calls to a member pointer:
1877
1878 int doprinting (int (MyString::* f)(int) const)
1879 {
1880 MyString S ("somestring");
1881
1882 return (S.*f)(4);
1883 }
1884
1885 Moreover, the function also looks for called pointers loaded from aggregates
1886 passed by value or reference. */
1887
1888 static void
1889 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
1890 tree target)
1891 {
1892 struct ipa_node_params *info = fbi->info;
1893 HOST_WIDE_INT offset;
1894 bool by_ref;
1895
1896 if (SSA_NAME_IS_DEFAULT_DEF (target))
1897 {
1898 tree var = SSA_NAME_VAR (target);
1899 int index = ipa_get_param_decl_index (info, var);
1900 if (index >= 0)
1901 ipa_note_param_call (fbi->node, index, call);
1902 return;
1903 }
1904
1905 int index;
1906 gimple *def = SSA_NAME_DEF_STMT (target);
1907 if (gimple_assign_single_p (def)
1908 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
1909 gimple_assign_rhs1 (def), &index, &offset,
1910 NULL, &by_ref))
1911 {
1912 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1913 cs->indirect_info->offset = offset;
1914 cs->indirect_info->agg_contents = 1;
1915 cs->indirect_info->by_ref = by_ref;
1916 return;
1917 }
1918
1919 /* Now we need to try to match the complex pattern of calling a member
1920 pointer. */
1921 if (gimple_code (def) != GIMPLE_PHI
1922 || gimple_phi_num_args (def) != 2
1923 || !POINTER_TYPE_P (TREE_TYPE (target))
1924 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1925 return;
1926
1927 /* First, we need to check whether one of these is a load from a member
1928 pointer that is a parameter to this function. */
1929 tree n1 = PHI_ARG_DEF (def, 0);
1930 tree n2 = PHI_ARG_DEF (def, 1);
1931 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1932 return;
1933 gimple *d1 = SSA_NAME_DEF_STMT (n1);
1934 gimple *d2 = SSA_NAME_DEF_STMT (n2);
1935
1936 tree rec;
1937 basic_block bb, virt_bb;
1938 basic_block join = gimple_bb (def);
1939 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
1940 {
1941 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
1942 return;
1943
1944 bb = EDGE_PRED (join, 0)->src;
1945 virt_bb = gimple_bb (d2);
1946 }
1947 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
1948 {
1949 bb = EDGE_PRED (join, 1)->src;
1950 virt_bb = gimple_bb (d1);
1951 }
1952 else
1953 return;
1954
1955 /* Second, we need to check that the basic blocks are laid out in the way
1956 corresponding to the pattern. */
1957
1958 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1959 || single_pred (virt_bb) != bb
1960 || single_succ (virt_bb) != join)
1961 return;
1962
1963 /* Third, let's see that the branching is done depending on the least
1964 significant bit of the pfn. */
1965
1966 gimple *branch = last_stmt (bb);
1967 if (!branch || gimple_code (branch) != GIMPLE_COND)
1968 return;
1969
1970 if ((gimple_cond_code (branch) != NE_EXPR
1971 && gimple_cond_code (branch) != EQ_EXPR)
1972 || !integer_zerop (gimple_cond_rhs (branch)))
1973 return;
1974
1975 tree cond = gimple_cond_lhs (branch);
1976 if (!ipa_is_ssa_with_stmt_def (cond))
1977 return;
1978
1979 def = SSA_NAME_DEF_STMT (cond);
1980 if (!is_gimple_assign (def)
1981 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1982 || !integer_onep (gimple_assign_rhs2 (def)))
1983 return;
1984
1985 cond = gimple_assign_rhs1 (def);
1986 if (!ipa_is_ssa_with_stmt_def (cond))
1987 return;
1988
1989 def = SSA_NAME_DEF_STMT (cond);
1990
1991 if (is_gimple_assign (def)
1992 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
1993 {
1994 cond = gimple_assign_rhs1 (def);
1995 if (!ipa_is_ssa_with_stmt_def (cond))
1996 return;
1997 def = SSA_NAME_DEF_STMT (cond);
1998 }
1999
2000 tree rec2;
2001 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2002 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2003 == ptrmemfunc_vbit_in_delta),
2004 NULL);
2005 if (rec != rec2)
2006 return;
2007
2008 index = ipa_get_param_decl_index (info, rec);
2009 if (index >= 0
2010 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2011 {
2012 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2013 cs->indirect_info->offset = offset;
2014 cs->indirect_info->agg_contents = 1;
2015 cs->indirect_info->member_ptr = 1;
2016 }
2017
2018 return;
2019 }
2020
2021 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2022 object referenced in the expression is a formal parameter of the caller
2023 FBI->node (described by FBI->info), create a call note for the
2024 statement. */
2025
2026 static void
2027 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2028 gcall *call, tree target)
2029 {
2030 tree obj = OBJ_TYPE_REF_OBJECT (target);
2031 int index;
2032 HOST_WIDE_INT anc_offset;
2033
2034 if (!flag_devirtualize)
2035 return;
2036
2037 if (TREE_CODE (obj) != SSA_NAME)
2038 return;
2039
2040 struct ipa_node_params *info = fbi->info;
2041 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2042 {
2043 struct ipa_jump_func jfunc;
2044 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2045 return;
2046
2047 anc_offset = 0;
2048 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2049 gcc_assert (index >= 0);
2050 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2051 call, &jfunc))
2052 return;
2053 }
2054 else
2055 {
2056 struct ipa_jump_func jfunc;
2057 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2058 tree expr;
2059
2060 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2061 if (!expr)
2062 return;
2063 index = ipa_get_param_decl_index (info,
2064 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2065 gcc_assert (index >= 0);
2066 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2067 call, &jfunc, anc_offset))
2068 return;
2069 }
2070
2071 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2072 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2073 ii->offset = anc_offset;
2074 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2075 ii->otr_type = obj_type_ref_class (target);
2076 ii->polymorphic = 1;
2077 }
2078
2079 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2080 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2081 containing intermediate information about each formal parameter. */
2082
2083 static void
2084 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2085 {
2086 tree target = gimple_call_fn (call);
2087
2088 if (!target
2089 || (TREE_CODE (target) != SSA_NAME
2090 && !virtual_method_call_p (target)))
2091 return;
2092
2093 struct cgraph_edge *cs = fbi->node->get_edge (call);
2094 /* If we previously turned the call into a direct call, there is
2095 no need to analyze. */
2096 if (cs && !cs->indirect_unknown_callee)
2097 return;
2098
2099 if (cs->indirect_info->polymorphic && flag_devirtualize)
2100 {
2101 tree instance;
2102 tree target = gimple_call_fn (call);
2103 ipa_polymorphic_call_context context (current_function_decl,
2104 target, call, &instance);
2105
2106 gcc_checking_assert (cs->indirect_info->otr_type
2107 == obj_type_ref_class (target));
2108 gcc_checking_assert (cs->indirect_info->otr_token
2109 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2110
2111 cs->indirect_info->vptr_changed
2112 = !context.get_dynamic_type (instance,
2113 OBJ_TYPE_REF_OBJECT (target),
2114 obj_type_ref_class (target), call);
2115 cs->indirect_info->context = context;
2116 }
2117
2118 if (TREE_CODE (target) == SSA_NAME)
2119 ipa_analyze_indirect_call_uses (fbi, call, target);
2120 else if (virtual_method_call_p (target))
2121 ipa_analyze_virtual_call_uses (fbi, call, target);
2122 }
2123
2124
2125 /* Analyze the call statement STMT with respect to formal parameters (described
2126 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2127 formal parameters are called. */
2128
2129 static void
2130 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2131 {
2132 if (is_gimple_call (stmt))
2133 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2134 }
2135
2136 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2137 If OP is a parameter declaration, mark it as used in the info structure
2138 passed in DATA. */
2139
2140 static bool
2141 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2142 {
2143 struct ipa_node_params *info = (struct ipa_node_params *) data;
2144
2145 op = get_base_address (op);
2146 if (op
2147 && TREE_CODE (op) == PARM_DECL)
2148 {
2149 int index = ipa_get_param_decl_index (info, op);
2150 gcc_assert (index >= 0);
2151 ipa_set_param_used (info, index, true);
2152 }
2153
2154 return false;
2155 }
2156
2157 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2158 the findings in various structures of the associated ipa_node_params
2159 structure, such as parameter flags, notes etc. FBI holds various data about
2160 the function being analyzed. */
2161
2162 static void
2163 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2164 {
2165 gimple_stmt_iterator gsi;
2166 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2167 {
2168 gimple *stmt = gsi_stmt (gsi);
2169
2170 if (is_gimple_debug (stmt))
2171 continue;
2172
2173 ipa_analyze_stmt_uses (fbi, stmt);
2174 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2175 visit_ref_for_mod_analysis,
2176 visit_ref_for_mod_analysis,
2177 visit_ref_for_mod_analysis);
2178 }
2179 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2180 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2181 visit_ref_for_mod_analysis,
2182 visit_ref_for_mod_analysis,
2183 visit_ref_for_mod_analysis);
2184 }
2185
2186 /* Calculate controlled uses of parameters of NODE. */
2187
2188 static void
2189 ipa_analyze_controlled_uses (struct cgraph_node *node)
2190 {
2191 struct ipa_node_params *info = IPA_NODE_REF (node);
2192
2193 for (int i = 0; i < ipa_get_param_count (info); i++)
2194 {
2195 tree parm = ipa_get_param (info, i);
2196 int controlled_uses = 0;
2197
2198 /* For SSA regs see if parameter is used. For non-SSA we compute
2199 the flag during modification analysis. */
2200 if (is_gimple_reg (parm))
2201 {
2202 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2203 parm);
2204 if (ddef && !has_zero_uses (ddef))
2205 {
2206 imm_use_iterator imm_iter;
2207 use_operand_p use_p;
2208
2209 ipa_set_param_used (info, i, true);
2210 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2211 if (!is_gimple_call (USE_STMT (use_p)))
2212 {
2213 if (!is_gimple_debug (USE_STMT (use_p)))
2214 {
2215 controlled_uses = IPA_UNDESCRIBED_USE;
2216 break;
2217 }
2218 }
2219 else
2220 controlled_uses++;
2221 }
2222 else
2223 controlled_uses = 0;
2224 }
2225 else
2226 controlled_uses = IPA_UNDESCRIBED_USE;
2227 ipa_set_controlled_uses (info, i, controlled_uses);
2228 }
2229 }
2230
2231 /* Free stuff in BI. */
2232
2233 static void
2234 free_ipa_bb_info (struct ipa_bb_info *bi)
2235 {
2236 bi->cg_edges.release ();
2237 bi->param_aa_statuses.release ();
2238 }
2239
2240 /* Dominator walker driving the analysis. */
2241
2242 class analysis_dom_walker : public dom_walker
2243 {
2244 public:
2245 analysis_dom_walker (struct ipa_func_body_info *fbi)
2246 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2247
2248 virtual void before_dom_children (basic_block);
2249
2250 private:
2251 struct ipa_func_body_info *m_fbi;
2252 };
2253
2254 void
2255 analysis_dom_walker::before_dom_children (basic_block bb)
2256 {
2257 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2258 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2259 }
2260
2261 /* Release body info FBI. */
2262
2263 void
2264 ipa_release_body_info (struct ipa_func_body_info *fbi)
2265 {
2266 int i;
2267 struct ipa_bb_info *bi;
2268
2269 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2270 free_ipa_bb_info (bi);
2271 fbi->bb_infos.release ();
2272 }
2273
2274 /* Initialize the array describing properties of formal parameters
2275 of NODE, analyze their uses and compute jump functions associated
2276 with actual arguments of calls from within NODE. */
2277
2278 void
2279 ipa_analyze_node (struct cgraph_node *node)
2280 {
2281 struct ipa_func_body_info fbi;
2282 struct ipa_node_params *info;
2283
2284 ipa_check_create_node_params ();
2285 ipa_check_create_edge_args ();
2286 info = IPA_NODE_REF (node);
2287
2288 if (info->analysis_done)
2289 return;
2290 info->analysis_done = 1;
2291
2292 if (ipa_func_spec_opts_forbid_analysis_p (node))
2293 {
2294 for (int i = 0; i < ipa_get_param_count (info); i++)
2295 {
2296 ipa_set_param_used (info, i, true);
2297 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2298 }
2299 return;
2300 }
2301
2302 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2303 push_cfun (func);
2304 calculate_dominance_info (CDI_DOMINATORS);
2305 ipa_initialize_node_params (node);
2306 ipa_analyze_controlled_uses (node);
2307
2308 fbi.node = node;
2309 fbi.info = IPA_NODE_REF (node);
2310 fbi.bb_infos = vNULL;
2311 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2312 fbi.param_count = ipa_get_param_count (info);
2313 fbi.aa_walked = 0;
2314
2315 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2316 {
2317 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2318 bi->cg_edges.safe_push (cs);
2319 }
2320
2321 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2322 {
2323 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2324 bi->cg_edges.safe_push (cs);
2325 }
2326
2327 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2328
2329 ipa_release_body_info (&fbi);
2330 free_dominance_info (CDI_DOMINATORS);
2331 pop_cfun ();
2332 }
2333
2334 /* Update the jump functions associated with call graph edge E when the call
2335 graph edge CS is being inlined, assuming that E->caller is already (possibly
2336 indirectly) inlined into CS->callee and that E has not been inlined. */
2337
2338 static void
2339 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2340 struct cgraph_edge *e)
2341 {
2342 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2343 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2344 int count = ipa_get_cs_argument_count (args);
2345 int i;
2346
2347 for (i = 0; i < count; i++)
2348 {
2349 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2350 struct ipa_polymorphic_call_context *dst_ctx
2351 = ipa_get_ith_polymorhic_call_context (args, i);
2352
2353 if (dst->type == IPA_JF_ANCESTOR)
2354 {
2355 struct ipa_jump_func *src;
2356 int dst_fid = dst->value.ancestor.formal_id;
2357 struct ipa_polymorphic_call_context *src_ctx
2358 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2359
2360 /* Variable number of arguments can cause havoc if we try to access
2361 one that does not exist in the inlined edge. So make sure we
2362 don't. */
2363 if (dst_fid >= ipa_get_cs_argument_count (top))
2364 {
2365 ipa_set_jf_unknown (dst);
2366 continue;
2367 }
2368
2369 src = ipa_get_ith_jump_func (top, dst_fid);
2370
2371 if (src_ctx && !src_ctx->useless_p ())
2372 {
2373 struct ipa_polymorphic_call_context ctx = *src_ctx;
2374
2375 /* TODO: Make type preserved safe WRT contexts. */
2376 if (!ipa_get_jf_ancestor_type_preserved (dst))
2377 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2378 ctx.offset_by (dst->value.ancestor.offset);
2379 if (!ctx.useless_p ())
2380 {
2381 if (!dst_ctx)
2382 {
2383 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2384 count);
2385 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2386 }
2387
2388 dst_ctx->combine_with (ctx);
2389 }
2390 }
2391
2392 if (src->agg.items
2393 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2394 {
2395 struct ipa_agg_jf_item *item;
2396 int j;
2397
2398 /* Currently we do not produce clobber aggregate jump functions,
2399 replace with merging when we do. */
2400 gcc_assert (!dst->agg.items);
2401
2402 dst->agg.items = vec_safe_copy (src->agg.items);
2403 dst->agg.by_ref = src->agg.by_ref;
2404 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2405 item->offset -= dst->value.ancestor.offset;
2406 }
2407
2408 if (src->type == IPA_JF_PASS_THROUGH
2409 && src->value.pass_through.operation == NOP_EXPR)
2410 {
2411 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2412 dst->value.ancestor.agg_preserved &=
2413 src->value.pass_through.agg_preserved;
2414 }
2415 else if (src->type == IPA_JF_ANCESTOR)
2416 {
2417 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2418 dst->value.ancestor.offset += src->value.ancestor.offset;
2419 dst->value.ancestor.agg_preserved &=
2420 src->value.ancestor.agg_preserved;
2421 }
2422 else
2423 ipa_set_jf_unknown (dst);
2424 }
2425 else if (dst->type == IPA_JF_PASS_THROUGH)
2426 {
2427 struct ipa_jump_func *src;
2428 /* We must check range due to calls with variable number of arguments
2429 and we cannot combine jump functions with operations. */
2430 if (dst->value.pass_through.operation == NOP_EXPR
2431 && (dst->value.pass_through.formal_id
2432 < ipa_get_cs_argument_count (top)))
2433 {
2434 int dst_fid = dst->value.pass_through.formal_id;
2435 src = ipa_get_ith_jump_func (top, dst_fid);
2436 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2437 struct ipa_polymorphic_call_context *src_ctx
2438 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2439
2440 if (src_ctx && !src_ctx->useless_p ())
2441 {
2442 struct ipa_polymorphic_call_context ctx = *src_ctx;
2443
2444 /* TODO: Make type preserved safe WRT contexts. */
2445 if (!ipa_get_jf_pass_through_type_preserved (dst))
2446 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2447 if (!ctx.useless_p ())
2448 {
2449 if (!dst_ctx)
2450 {
2451 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2452 count);
2453 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2454 }
2455 dst_ctx->combine_with (ctx);
2456 }
2457 }
2458 switch (src->type)
2459 {
2460 case IPA_JF_UNKNOWN:
2461 ipa_set_jf_unknown (dst);
2462 break;
2463 case IPA_JF_CONST:
2464 ipa_set_jf_cst_copy (dst, src);
2465 break;
2466
2467 case IPA_JF_PASS_THROUGH:
2468 {
2469 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2470 enum tree_code operation;
2471 operation = ipa_get_jf_pass_through_operation (src);
2472
2473 if (operation == NOP_EXPR)
2474 {
2475 bool agg_p;
2476 agg_p = dst_agg_p
2477 && ipa_get_jf_pass_through_agg_preserved (src);
2478 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2479 }
2480 else
2481 {
2482 tree operand = ipa_get_jf_pass_through_operand (src);
2483 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2484 operation);
2485 }
2486 break;
2487 }
2488 case IPA_JF_ANCESTOR:
2489 {
2490 bool agg_p;
2491 agg_p = dst_agg_p
2492 && ipa_get_jf_ancestor_agg_preserved (src);
2493 ipa_set_ancestor_jf (dst,
2494 ipa_get_jf_ancestor_offset (src),
2495 ipa_get_jf_ancestor_formal_id (src),
2496 agg_p);
2497 break;
2498 }
2499 default:
2500 gcc_unreachable ();
2501 }
2502
2503 if (src->agg.items
2504 && (dst_agg_p || !src->agg.by_ref))
2505 {
2506 /* Currently we do not produce clobber aggregate jump
2507 functions, replace with merging when we do. */
2508 gcc_assert (!dst->agg.items);
2509
2510 dst->agg.by_ref = src->agg.by_ref;
2511 dst->agg.items = vec_safe_copy (src->agg.items);
2512 }
2513 }
2514 else
2515 ipa_set_jf_unknown (dst);
2516 }
2517 }
2518 }
2519
2520 /* If TARGET is an addr_expr of a function declaration, make it the
2521 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2522 Otherwise, return NULL. */
2523
2524 struct cgraph_edge *
2525 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2526 bool speculative)
2527 {
2528 struct cgraph_node *callee;
2529 struct inline_edge_summary *es = inline_edge_summary (ie);
2530 bool unreachable = false;
2531
2532 if (TREE_CODE (target) == ADDR_EXPR)
2533 target = TREE_OPERAND (target, 0);
2534 if (TREE_CODE (target) != FUNCTION_DECL)
2535 {
2536 target = canonicalize_constructor_val (target, NULL);
2537 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2538 {
2539 /* Member pointer call that goes through a VMT lookup. */
2540 if (ie->indirect_info->member_ptr
2541 /* Or if target is not an invariant expression and we do not
2542 know if it will evaulate to function at runtime.
2543 This can happen when folding through &VAR, where &VAR
2544 is IP invariant, but VAR itself is not.
2545
2546 TODO: Revisit this when GCC 5 is branched. It seems that
2547 member_ptr check is not needed and that we may try to fold
2548 the expression and see if VAR is readonly. */
2549 || !is_gimple_ip_invariant (target))
2550 {
2551 if (dump_enabled_p ())
2552 {
2553 location_t loc = gimple_location_safe (ie->call_stmt);
2554 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2555 "discovered direct call non-invariant "
2556 "%s/%i\n",
2557 ie->caller->name (), ie->caller->order);
2558 }
2559 return NULL;
2560 }
2561
2562
2563 if (dump_enabled_p ())
2564 {
2565 location_t loc = gimple_location_safe (ie->call_stmt);
2566 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2567 "discovered direct call to non-function in %s/%i, "
2568 "making it __builtin_unreachable\n",
2569 ie->caller->name (), ie->caller->order);
2570 }
2571
2572 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2573 callee = cgraph_node::get_create (target);
2574 unreachable = true;
2575 }
2576 else
2577 callee = cgraph_node::get (target);
2578 }
2579 else
2580 callee = cgraph_node::get (target);
2581
2582 /* Because may-edges are not explicitely represented and vtable may be external,
2583 we may create the first reference to the object in the unit. */
2584 if (!callee || callee->global.inlined_to)
2585 {
2586
2587 /* We are better to ensure we can refer to it.
2588 In the case of static functions we are out of luck, since we already
2589 removed its body. In the case of public functions we may or may
2590 not introduce the reference. */
2591 if (!canonicalize_constructor_val (target, NULL)
2592 || !TREE_PUBLIC (target))
2593 {
2594 if (dump_file)
2595 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2596 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2597 xstrdup_for_dump (ie->caller->name ()),
2598 ie->caller->order,
2599 xstrdup_for_dump (ie->callee->name ()),
2600 ie->callee->order);
2601 return NULL;
2602 }
2603 callee = cgraph_node::get_create (target);
2604 }
2605
2606 /* If the edge is already speculated. */
2607 if (speculative && ie->speculative)
2608 {
2609 struct cgraph_edge *e2;
2610 struct ipa_ref *ref;
2611 ie->speculative_call_info (e2, ie, ref);
2612 if (e2->callee->ultimate_alias_target ()
2613 != callee->ultimate_alias_target ())
2614 {
2615 if (dump_file)
2616 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2617 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2618 xstrdup_for_dump (ie->caller->name ()),
2619 ie->caller->order,
2620 xstrdup_for_dump (callee->name ()),
2621 callee->order,
2622 xstrdup_for_dump (e2->callee->name ()),
2623 e2->callee->order);
2624 }
2625 else
2626 {
2627 if (dump_file)
2628 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2629 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2630 xstrdup_for_dump (ie->caller->name ()),
2631 ie->caller->order,
2632 xstrdup_for_dump (callee->name ()),
2633 callee->order);
2634 }
2635 return NULL;
2636 }
2637
2638 if (!dbg_cnt (devirt))
2639 return NULL;
2640
2641 ipa_check_create_node_params ();
2642
2643 /* We can not make edges to inline clones. It is bug that someone removed
2644 the cgraph node too early. */
2645 gcc_assert (!callee->global.inlined_to);
2646
2647 if (dump_file && !unreachable)
2648 {
2649 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2650 "(%s/%i -> %s/%i), for stmt ",
2651 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2652 speculative ? "speculative" : "known",
2653 xstrdup_for_dump (ie->caller->name ()),
2654 ie->caller->order,
2655 xstrdup_for_dump (callee->name ()),
2656 callee->order);
2657 if (ie->call_stmt)
2658 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2659 else
2660 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2661 }
2662 if (dump_enabled_p ())
2663 {
2664 location_t loc = gimple_location_safe (ie->call_stmt);
2665
2666 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2667 "converting indirect call in %s to direct call to %s\n",
2668 ie->caller->name (), callee->name ());
2669 }
2670 if (!speculative)
2671 {
2672 struct cgraph_edge *orig = ie;
2673 ie = ie->make_direct (callee);
2674 /* If we resolved speculative edge the cost is already up to date
2675 for direct call (adjusted by inline_edge_duplication_hook). */
2676 if (ie == orig)
2677 {
2678 es = inline_edge_summary (ie);
2679 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2680 - eni_size_weights.call_cost);
2681 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2682 - eni_time_weights.call_cost);
2683 }
2684 }
2685 else
2686 {
2687 if (!callee->can_be_discarded_p ())
2688 {
2689 cgraph_node *alias;
2690 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2691 if (alias)
2692 callee = alias;
2693 }
2694 /* make_speculative will update ie's cost to direct call cost. */
2695 ie = ie->make_speculative
2696 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2697 }
2698
2699 return ie;
2700 }
2701
2702 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2703 return NULL if there is not any. BY_REF specifies whether the value has to
2704 be passed by reference or by value. */
2705
2706 tree
2707 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2708 HOST_WIDE_INT offset, bool by_ref)
2709 {
2710 struct ipa_agg_jf_item *item;
2711 int i;
2712
2713 if (by_ref != agg->by_ref)
2714 return NULL;
2715
2716 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2717 if (item->offset == offset)
2718 {
2719 /* Currently we do not have clobber values, return NULL for them once
2720 we do. */
2721 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2722 return item->value;
2723 }
2724 return NULL;
2725 }
2726
2727 /* Remove a reference to SYMBOL from the list of references of a node given by
2728 reference description RDESC. Return true if the reference has been
2729 successfully found and removed. */
2730
2731 static bool
2732 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2733 {
2734 struct ipa_ref *to_del;
2735 struct cgraph_edge *origin;
2736
2737 origin = rdesc->cs;
2738 if (!origin)
2739 return false;
2740 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2741 origin->lto_stmt_uid);
2742 if (!to_del)
2743 return false;
2744
2745 to_del->remove_reference ();
2746 if (dump_file)
2747 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2748 xstrdup_for_dump (origin->caller->name ()),
2749 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2750 return true;
2751 }
2752
2753 /* If JFUNC has a reference description with refcount different from
2754 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2755 NULL. JFUNC must be a constant jump function. */
2756
2757 static struct ipa_cst_ref_desc *
2758 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2759 {
2760 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2761 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2762 return rdesc;
2763 else
2764 return NULL;
2765 }
2766
2767 /* If the value of constant jump function JFUNC is an address of a function
2768 declaration, return the associated call graph node. Otherwise return
2769 NULL. */
2770
2771 static cgraph_node *
2772 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2773 {
2774 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2775 tree cst = ipa_get_jf_constant (jfunc);
2776 if (TREE_CODE (cst) != ADDR_EXPR
2777 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2778 return NULL;
2779
2780 return cgraph_node::get (TREE_OPERAND (cst, 0));
2781 }
2782
2783
2784 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2785 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2786 the edge specified in the rdesc. Return false if either the symbol or the
2787 reference could not be found, otherwise return true. */
2788
2789 static bool
2790 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2791 {
2792 struct ipa_cst_ref_desc *rdesc;
2793 if (jfunc->type == IPA_JF_CONST
2794 && (rdesc = jfunc_rdesc_usable (jfunc))
2795 && --rdesc->refcount == 0)
2796 {
2797 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2798 if (!symbol)
2799 return false;
2800
2801 return remove_described_reference (symbol, rdesc);
2802 }
2803 return true;
2804 }
2805
2806 /* Try to find a destination for indirect edge IE that corresponds to a simple
2807 call or a call of a member function pointer and where the destination is a
2808 pointer formal parameter described by jump function JFUNC. If it can be
2809 determined, return the newly direct edge, otherwise return NULL.
2810 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2811
2812 static struct cgraph_edge *
2813 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2814 struct ipa_jump_func *jfunc,
2815 struct ipa_node_params *new_root_info)
2816 {
2817 struct cgraph_edge *cs;
2818 tree target;
2819 bool agg_contents = ie->indirect_info->agg_contents;
2820
2821 if (ie->indirect_info->agg_contents)
2822 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2823 ie->indirect_info->offset,
2824 ie->indirect_info->by_ref);
2825 else
2826 target = ipa_value_from_jfunc (new_root_info, jfunc);
2827 if (!target)
2828 return NULL;
2829 cs = ipa_make_edge_direct_to_target (ie, target);
2830
2831 if (cs && !agg_contents)
2832 {
2833 bool ok;
2834 gcc_checking_assert (cs->callee
2835 && (cs != ie
2836 || jfunc->type != IPA_JF_CONST
2837 || !cgraph_node_for_jfunc (jfunc)
2838 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2839 ok = try_decrement_rdesc_refcount (jfunc);
2840 gcc_checking_assert (ok);
2841 }
2842
2843 return cs;
2844 }
2845
2846 /* Return the target to be used in cases of impossible devirtualization. IE
2847 and target (the latter can be NULL) are dumped when dumping is enabled. */
2848
2849 tree
2850 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2851 {
2852 if (dump_file)
2853 {
2854 if (target)
2855 fprintf (dump_file,
2856 "Type inconsistent devirtualization: %s/%i->%s\n",
2857 ie->caller->name (), ie->caller->order,
2858 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2859 else
2860 fprintf (dump_file,
2861 "No devirtualization target in %s/%i\n",
2862 ie->caller->name (), ie->caller->order);
2863 }
2864 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2865 cgraph_node::get_create (new_target);
2866 return new_target;
2867 }
2868
2869 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2870 call based on a formal parameter which is described by jump function JFUNC
2871 and if it can be determined, make it direct and return the direct edge.
2872 Otherwise, return NULL. CTX describes the polymorphic context that the
2873 parameter the call is based on brings along with it. */
2874
2875 static struct cgraph_edge *
2876 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2877 struct ipa_jump_func *jfunc,
2878 struct ipa_polymorphic_call_context ctx)
2879 {
2880 tree target = NULL;
2881 bool speculative = false;
2882
2883 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2884 return NULL;
2885
2886 gcc_assert (!ie->indirect_info->by_ref);
2887
2888 /* Try to do lookup via known virtual table pointer value. */
2889 if (!ie->indirect_info->vptr_changed
2890 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2891 {
2892 tree vtable;
2893 unsigned HOST_WIDE_INT offset;
2894 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2895 ie->indirect_info->offset,
2896 true);
2897 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2898 {
2899 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2900 vtable, offset);
2901 if (t)
2902 {
2903 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2904 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2905 || !possible_polymorphic_call_target_p
2906 (ie, cgraph_node::get (t)))
2907 {
2908 /* Do not speculate builtin_unreachable, it is stupid! */
2909 if (!ie->indirect_info->vptr_changed)
2910 target = ipa_impossible_devirt_target (ie, target);
2911 }
2912 else
2913 {
2914 target = t;
2915 speculative = ie->indirect_info->vptr_changed;
2916 }
2917 }
2918 }
2919 }
2920
2921 ipa_polymorphic_call_context ie_context (ie);
2922 vec <cgraph_node *>targets;
2923 bool final;
2924
2925 ctx.offset_by (ie->indirect_info->offset);
2926 if (ie->indirect_info->vptr_changed)
2927 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2928 ie->indirect_info->otr_type);
2929 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2930 targets = possible_polymorphic_call_targets
2931 (ie->indirect_info->otr_type,
2932 ie->indirect_info->otr_token,
2933 ctx, &final);
2934 if (final && targets.length () <= 1)
2935 {
2936 speculative = false;
2937 if (targets.length () == 1)
2938 target = targets[0]->decl;
2939 else
2940 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2941 }
2942 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2943 && !ie->speculative && ie->maybe_hot_p ())
2944 {
2945 cgraph_node *n;
2946 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2947 ie->indirect_info->otr_token,
2948 ie->indirect_info->context);
2949 if (n)
2950 {
2951 target = n->decl;
2952 speculative = true;
2953 }
2954 }
2955
2956 if (target)
2957 {
2958 if (!possible_polymorphic_call_target_p
2959 (ie, cgraph_node::get_create (target)))
2960 {
2961 if (speculative)
2962 return NULL;
2963 target = ipa_impossible_devirt_target (ie, target);
2964 }
2965 return ipa_make_edge_direct_to_target (ie, target, speculative);
2966 }
2967 else
2968 return NULL;
2969 }
2970
2971 /* Update the param called notes associated with NODE when CS is being inlined,
2972 assuming NODE is (potentially indirectly) inlined into CS->callee.
2973 Moreover, if the callee is discovered to be constant, create a new cgraph
2974 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2975 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2976
2977 static bool
2978 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2979 struct cgraph_node *node,
2980 vec<cgraph_edge *> *new_edges)
2981 {
2982 struct ipa_edge_args *top;
2983 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
2984 struct ipa_node_params *new_root_info;
2985 bool res = false;
2986
2987 ipa_check_create_edge_args ();
2988 top = IPA_EDGE_REF (cs);
2989 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2990 ? cs->caller->global.inlined_to
2991 : cs->caller);
2992
2993 for (ie = node->indirect_calls; ie; ie = next_ie)
2994 {
2995 struct cgraph_indirect_call_info *ici = ie->indirect_info;
2996 struct ipa_jump_func *jfunc;
2997 int param_index;
2998 cgraph_node *spec_target = NULL;
2999
3000 next_ie = ie->next_callee;
3001
3002 if (ici->param_index == -1)
3003 continue;
3004
3005 /* We must check range due to calls with variable number of arguments: */
3006 if (ici->param_index >= ipa_get_cs_argument_count (top))
3007 {
3008 ici->param_index = -1;
3009 continue;
3010 }
3011
3012 param_index = ici->param_index;
3013 jfunc = ipa_get_ith_jump_func (top, param_index);
3014
3015 if (ie->speculative)
3016 {
3017 struct cgraph_edge *de;
3018 struct ipa_ref *ref;
3019 ie->speculative_call_info (de, ie, ref);
3020 spec_target = de->callee;
3021 }
3022
3023 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3024 new_direct_edge = NULL;
3025 else if (ici->polymorphic)
3026 {
3027 ipa_polymorphic_call_context ctx;
3028 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3029 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3030 }
3031 else
3032 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3033 new_root_info);
3034 /* If speculation was removed, then we need to do nothing. */
3035 if (new_direct_edge && new_direct_edge != ie
3036 && new_direct_edge->callee == spec_target)
3037 {
3038 new_direct_edge->indirect_inlining_edge = 1;
3039 top = IPA_EDGE_REF (cs);
3040 res = true;
3041 if (!new_direct_edge->speculative)
3042 continue;
3043 }
3044 else if (new_direct_edge)
3045 {
3046 new_direct_edge->indirect_inlining_edge = 1;
3047 if (new_direct_edge->call_stmt)
3048 new_direct_edge->call_stmt_cannot_inline_p
3049 = !gimple_check_call_matching_types (
3050 new_direct_edge->call_stmt,
3051 new_direct_edge->callee->decl, false);
3052 if (new_edges)
3053 {
3054 new_edges->safe_push (new_direct_edge);
3055 res = true;
3056 }
3057 top = IPA_EDGE_REF (cs);
3058 /* If speculative edge was introduced we still need to update
3059 call info of the indirect edge. */
3060 if (!new_direct_edge->speculative)
3061 continue;
3062 }
3063 if (jfunc->type == IPA_JF_PASS_THROUGH
3064 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3065 {
3066 if (ici->agg_contents
3067 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3068 && !ici->polymorphic)
3069 ici->param_index = -1;
3070 else
3071 {
3072 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3073 if (ici->polymorphic
3074 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3075 ici->vptr_changed = true;
3076 }
3077 }
3078 else if (jfunc->type == IPA_JF_ANCESTOR)
3079 {
3080 if (ici->agg_contents
3081 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3082 && !ici->polymorphic)
3083 ici->param_index = -1;
3084 else
3085 {
3086 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3087 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3088 if (ici->polymorphic
3089 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3090 ici->vptr_changed = true;
3091 }
3092 }
3093 else
3094 /* Either we can find a destination for this edge now or never. */
3095 ici->param_index = -1;
3096 }
3097
3098 return res;
3099 }
3100
3101 /* Recursively traverse subtree of NODE (including node) made of inlined
3102 cgraph_edges when CS has been inlined and invoke
3103 update_indirect_edges_after_inlining on all nodes and
3104 update_jump_functions_after_inlining on all non-inlined edges that lead out
3105 of this subtree. Newly discovered indirect edges will be added to
3106 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3107 created. */
3108
3109 static bool
3110 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3111 struct cgraph_node *node,
3112 vec<cgraph_edge *> *new_edges)
3113 {
3114 struct cgraph_edge *e;
3115 bool res;
3116
3117 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3118
3119 for (e = node->callees; e; e = e->next_callee)
3120 if (!e->inline_failed)
3121 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3122 else
3123 update_jump_functions_after_inlining (cs, e);
3124 for (e = node->indirect_calls; e; e = e->next_callee)
3125 update_jump_functions_after_inlining (cs, e);
3126
3127 return res;
3128 }
3129
3130 /* Combine two controlled uses counts as done during inlining. */
3131
3132 static int
3133 combine_controlled_uses_counters (int c, int d)
3134 {
3135 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3136 return IPA_UNDESCRIBED_USE;
3137 else
3138 return c + d - 1;
3139 }
3140
3141 /* Propagate number of controlled users from CS->caleee to the new root of the
3142 tree of inlined nodes. */
3143
3144 static void
3145 propagate_controlled_uses (struct cgraph_edge *cs)
3146 {
3147 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3148 struct cgraph_node *new_root = cs->caller->global.inlined_to
3149 ? cs->caller->global.inlined_to : cs->caller;
3150 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3151 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3152 int count, i;
3153
3154 count = MIN (ipa_get_cs_argument_count (args),
3155 ipa_get_param_count (old_root_info));
3156 for (i = 0; i < count; i++)
3157 {
3158 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3159 struct ipa_cst_ref_desc *rdesc;
3160
3161 if (jf->type == IPA_JF_PASS_THROUGH)
3162 {
3163 int src_idx, c, d;
3164 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3165 c = ipa_get_controlled_uses (new_root_info, src_idx);
3166 d = ipa_get_controlled_uses (old_root_info, i);
3167
3168 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3169 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3170 c = combine_controlled_uses_counters (c, d);
3171 ipa_set_controlled_uses (new_root_info, src_idx, c);
3172 if (c == 0 && new_root_info->ipcp_orig_node)
3173 {
3174 struct cgraph_node *n;
3175 struct ipa_ref *ref;
3176 tree t = new_root_info->known_csts[src_idx];
3177
3178 if (t && TREE_CODE (t) == ADDR_EXPR
3179 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3180 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3181 && (ref = new_root->find_reference (n, NULL, 0)))
3182 {
3183 if (dump_file)
3184 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3185 "reference from %s/%i to %s/%i.\n",
3186 xstrdup_for_dump (new_root->name ()),
3187 new_root->order,
3188 xstrdup_for_dump (n->name ()), n->order);
3189 ref->remove_reference ();
3190 }
3191 }
3192 }
3193 else if (jf->type == IPA_JF_CONST
3194 && (rdesc = jfunc_rdesc_usable (jf)))
3195 {
3196 int d = ipa_get_controlled_uses (old_root_info, i);
3197 int c = rdesc->refcount;
3198 rdesc->refcount = combine_controlled_uses_counters (c, d);
3199 if (rdesc->refcount == 0)
3200 {
3201 tree cst = ipa_get_jf_constant (jf);
3202 struct cgraph_node *n;
3203 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3204 && TREE_CODE (TREE_OPERAND (cst, 0))
3205 == FUNCTION_DECL);
3206 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3207 if (n)
3208 {
3209 struct cgraph_node *clone;
3210 bool ok;
3211 ok = remove_described_reference (n, rdesc);
3212 gcc_checking_assert (ok);
3213
3214 clone = cs->caller;
3215 while (clone->global.inlined_to
3216 && clone != rdesc->cs->caller
3217 && IPA_NODE_REF (clone)->ipcp_orig_node)
3218 {
3219 struct ipa_ref *ref;
3220 ref = clone->find_reference (n, NULL, 0);
3221 if (ref)
3222 {
3223 if (dump_file)
3224 fprintf (dump_file, "ipa-prop: Removing "
3225 "cloning-created reference "
3226 "from %s/%i to %s/%i.\n",
3227 xstrdup_for_dump (clone->name ()),
3228 clone->order,
3229 xstrdup_for_dump (n->name ()),
3230 n->order);
3231 ref->remove_reference ();
3232 }
3233 clone = clone->callers->caller;
3234 }
3235 }
3236 }
3237 }
3238 }
3239
3240 for (i = ipa_get_param_count (old_root_info);
3241 i < ipa_get_cs_argument_count (args);
3242 i++)
3243 {
3244 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3245
3246 if (jf->type == IPA_JF_CONST)
3247 {
3248 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3249 if (rdesc)
3250 rdesc->refcount = IPA_UNDESCRIBED_USE;
3251 }
3252 else if (jf->type == IPA_JF_PASS_THROUGH)
3253 ipa_set_controlled_uses (new_root_info,
3254 jf->value.pass_through.formal_id,
3255 IPA_UNDESCRIBED_USE);
3256 }
3257 }
3258
3259 /* Update jump functions and call note functions on inlining the call site CS.
3260 CS is expected to lead to a node already cloned by
3261 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3262 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3263 created. */
3264
3265 bool
3266 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3267 vec<cgraph_edge *> *new_edges)
3268 {
3269 bool changed;
3270 /* Do nothing if the preparation phase has not been carried out yet
3271 (i.e. during early inlining). */
3272 if (!ipa_node_params_sum)
3273 return false;
3274 gcc_assert (ipa_edge_args_vector);
3275
3276 propagate_controlled_uses (cs);
3277 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3278
3279 return changed;
3280 }
3281
3282 /* Frees all dynamically allocated structures that the argument info points
3283 to. */
3284
3285 void
3286 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3287 {
3288 vec_free (args->jump_functions);
3289 memset (args, 0, sizeof (*args));
3290 }
3291
3292 /* Free all ipa_edge structures. */
3293
3294 void
3295 ipa_free_all_edge_args (void)
3296 {
3297 int i;
3298 struct ipa_edge_args *args;
3299
3300 if (!ipa_edge_args_vector)
3301 return;
3302
3303 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3304 ipa_free_edge_args_substructures (args);
3305
3306 vec_free (ipa_edge_args_vector);
3307 }
3308
3309 /* Frees all dynamically allocated structures that the param info points
3310 to. */
3311
3312 ipa_node_params::~ipa_node_params ()
3313 {
3314 descriptors.release ();
3315 free (lattices);
3316 /* Lattice values and their sources are deallocated with their alocation
3317 pool. */
3318 known_csts.release ();
3319 known_contexts.release ();
3320
3321 lattices = NULL;
3322 ipcp_orig_node = NULL;
3323 analysis_done = 0;
3324 node_enqueued = 0;
3325 do_clone_for_all_contexts = 0;
3326 is_all_contexts_clone = 0;
3327 node_dead = 0;
3328 }
3329
3330 /* Free all ipa_node_params structures. */
3331
3332 void
3333 ipa_free_all_node_params (void)
3334 {
3335 delete ipa_node_params_sum;
3336 ipa_node_params_sum = NULL;
3337 }
3338
3339 /* Grow ipcp_transformations if necessary. */
3340
3341 void
3342 ipcp_grow_transformations_if_necessary (void)
3343 {
3344 if (vec_safe_length (ipcp_transformations)
3345 <= (unsigned) symtab->cgraph_max_uid)
3346 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3347 }
3348
3349 /* Set the aggregate replacements of NODE to be AGGVALS. */
3350
3351 void
3352 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3353 struct ipa_agg_replacement_value *aggvals)
3354 {
3355 ipcp_grow_transformations_if_necessary ();
3356 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3357 }
3358
3359 /* Hook that is called by cgraph.c when an edge is removed. */
3360
3361 static void
3362 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3363 {
3364 struct ipa_edge_args *args;
3365
3366 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3367 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3368 return;
3369
3370 args = IPA_EDGE_REF (cs);
3371 if (args->jump_functions)
3372 {
3373 struct ipa_jump_func *jf;
3374 int i;
3375 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3376 {
3377 struct ipa_cst_ref_desc *rdesc;
3378 try_decrement_rdesc_refcount (jf);
3379 if (jf->type == IPA_JF_CONST
3380 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3381 && rdesc->cs == cs)
3382 rdesc->cs = NULL;
3383 }
3384 }
3385
3386 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3387 }
3388
3389 /* Hook that is called by cgraph.c when an edge is duplicated. */
3390
3391 static void
3392 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3393 void *)
3394 {
3395 struct ipa_edge_args *old_args, *new_args;
3396 unsigned int i;
3397
3398 ipa_check_create_edge_args ();
3399
3400 old_args = IPA_EDGE_REF (src);
3401 new_args = IPA_EDGE_REF (dst);
3402
3403 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3404 if (old_args->polymorphic_call_contexts)
3405 new_args->polymorphic_call_contexts
3406 = vec_safe_copy (old_args->polymorphic_call_contexts);
3407
3408 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3409 {
3410 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3411 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3412
3413 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3414
3415 if (src_jf->type == IPA_JF_CONST)
3416 {
3417 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3418
3419 if (!src_rdesc)
3420 dst_jf->value.constant.rdesc = NULL;
3421 else if (src->caller == dst->caller)
3422 {
3423 struct ipa_ref *ref;
3424 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3425 gcc_checking_assert (n);
3426 ref = src->caller->find_reference (n, src->call_stmt,
3427 src->lto_stmt_uid);
3428 gcc_checking_assert (ref);
3429 dst->caller->clone_reference (ref, ref->stmt);
3430
3431 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3432 dst_rdesc->cs = dst;
3433 dst_rdesc->refcount = src_rdesc->refcount;
3434 dst_rdesc->next_duplicate = NULL;
3435 dst_jf->value.constant.rdesc = dst_rdesc;
3436 }
3437 else if (src_rdesc->cs == src)
3438 {
3439 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3440 dst_rdesc->cs = dst;
3441 dst_rdesc->refcount = src_rdesc->refcount;
3442 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3443 src_rdesc->next_duplicate = dst_rdesc;
3444 dst_jf->value.constant.rdesc = dst_rdesc;
3445 }
3446 else
3447 {
3448 struct ipa_cst_ref_desc *dst_rdesc;
3449 /* This can happen during inlining, when a JFUNC can refer to a
3450 reference taken in a function up in the tree of inline clones.
3451 We need to find the duplicate that refers to our tree of
3452 inline clones. */
3453
3454 gcc_assert (dst->caller->global.inlined_to);
3455 for (dst_rdesc = src_rdesc->next_duplicate;
3456 dst_rdesc;
3457 dst_rdesc = dst_rdesc->next_duplicate)
3458 {
3459 struct cgraph_node *top;
3460 top = dst_rdesc->cs->caller->global.inlined_to
3461 ? dst_rdesc->cs->caller->global.inlined_to
3462 : dst_rdesc->cs->caller;
3463 if (dst->caller->global.inlined_to == top)
3464 break;
3465 }
3466 gcc_assert (dst_rdesc);
3467 dst_jf->value.constant.rdesc = dst_rdesc;
3468 }
3469 }
3470 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3471 && src->caller == dst->caller)
3472 {
3473 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3474 ? dst->caller->global.inlined_to : dst->caller;
3475 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3476 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3477
3478 int c = ipa_get_controlled_uses (root_info, idx);
3479 if (c != IPA_UNDESCRIBED_USE)
3480 {
3481 c++;
3482 ipa_set_controlled_uses (root_info, idx, c);
3483 }
3484 }
3485 }
3486 }
3487
3488 /* Analyze newly added function into callgraph. */
3489
3490 static void
3491 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3492 {
3493 if (node->has_gimple_body_p ())
3494 ipa_analyze_node (node);
3495 }
3496
3497 /* Hook that is called by summary when a node is duplicated. */
3498
3499 void
3500 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3501 ipa_node_params *old_info,
3502 ipa_node_params *new_info)
3503 {
3504 ipa_agg_replacement_value *old_av, *new_av;
3505
3506 new_info->descriptors = old_info->descriptors.copy ();
3507 new_info->lattices = NULL;
3508 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3509
3510 new_info->analysis_done = old_info->analysis_done;
3511 new_info->node_enqueued = old_info->node_enqueued;
3512 new_info->versionable = old_info->versionable;
3513
3514 old_av = ipa_get_agg_replacements_for_node (src);
3515 if (old_av)
3516 {
3517 new_av = NULL;
3518 while (old_av)
3519 {
3520 struct ipa_agg_replacement_value *v;
3521
3522 v = ggc_alloc<ipa_agg_replacement_value> ();
3523 memcpy (v, old_av, sizeof (*v));
3524 v->next = new_av;
3525 new_av = v;
3526 old_av = old_av->next;
3527 }
3528 ipa_set_node_agg_value_chain (dst, new_av);
3529 }
3530
3531 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3532
3533 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3534 {
3535 ipcp_grow_transformations_if_necessary ();
3536 src_trans = ipcp_get_transformation_summary (src);
3537 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3538 vec<ipa_alignment, va_gc> *&dst_alignments
3539 = ipcp_get_transformation_summary (dst)->alignments;
3540 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3541 for (unsigned i = 0; i < src_alignments->length (); ++i)
3542 dst_alignments->quick_push ((*src_alignments)[i]);
3543 }
3544 }
3545
3546 /* Register our cgraph hooks if they are not already there. */
3547
3548 void
3549 ipa_register_cgraph_hooks (void)
3550 {
3551 ipa_check_create_node_params ();
3552
3553 if (!edge_removal_hook_holder)
3554 edge_removal_hook_holder =
3555 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3556 if (!edge_duplication_hook_holder)
3557 edge_duplication_hook_holder =
3558 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3559 function_insertion_hook_holder =
3560 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3561 }
3562
3563 /* Unregister our cgraph hooks if they are not already there. */
3564
3565 static void
3566 ipa_unregister_cgraph_hooks (void)
3567 {
3568 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3569 edge_removal_hook_holder = NULL;
3570 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3571 edge_duplication_hook_holder = NULL;
3572 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3573 function_insertion_hook_holder = NULL;
3574 }
3575
3576 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3577 longer needed after ipa-cp. */
3578
3579 void
3580 ipa_free_all_structures_after_ipa_cp (void)
3581 {
3582 if (!optimize && !in_lto_p)
3583 {
3584 ipa_free_all_edge_args ();
3585 ipa_free_all_node_params ();
3586 ipcp_sources_pool.release ();
3587 ipcp_cst_values_pool.release ();
3588 ipcp_poly_ctx_values_pool.release ();
3589 ipcp_agg_lattice_pool.release ();
3590 ipa_unregister_cgraph_hooks ();
3591 ipa_refdesc_pool.release ();
3592 }
3593 }
3594
3595 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3596 longer needed after indirect inlining. */
3597
3598 void
3599 ipa_free_all_structures_after_iinln (void)
3600 {
3601 ipa_free_all_edge_args ();
3602 ipa_free_all_node_params ();
3603 ipa_unregister_cgraph_hooks ();
3604 ipcp_sources_pool.release ();
3605 ipcp_cst_values_pool.release ();
3606 ipcp_poly_ctx_values_pool.release ();
3607 ipcp_agg_lattice_pool.release ();
3608 ipa_refdesc_pool.release ();
3609 }
3610
3611 /* Print ipa_tree_map data structures of all functions in the
3612 callgraph to F. */
3613
3614 void
3615 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3616 {
3617 int i, count;
3618 struct ipa_node_params *info;
3619
3620 if (!node->definition)
3621 return;
3622 info = IPA_NODE_REF (node);
3623 fprintf (f, " function %s/%i parameter descriptors:\n",
3624 node->name (), node->order);
3625 count = ipa_get_param_count (info);
3626 for (i = 0; i < count; i++)
3627 {
3628 int c;
3629
3630 fprintf (f, " ");
3631 ipa_dump_param (f, info, i);
3632 if (ipa_is_param_used (info, i))
3633 fprintf (f, " used");
3634 c = ipa_get_controlled_uses (info, i);
3635 if (c == IPA_UNDESCRIBED_USE)
3636 fprintf (f, " undescribed_use");
3637 else
3638 fprintf (f, " controlled_uses=%i", c);
3639 fprintf (f, "\n");
3640 }
3641 }
3642
3643 /* Print ipa_tree_map data structures of all functions in the
3644 callgraph to F. */
3645
3646 void
3647 ipa_print_all_params (FILE * f)
3648 {
3649 struct cgraph_node *node;
3650
3651 fprintf (f, "\nFunction parameters:\n");
3652 FOR_EACH_FUNCTION (node)
3653 ipa_print_node_params (f, node);
3654 }
3655
3656 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3657
3658 vec<tree>
3659 ipa_get_vector_of_formal_parms (tree fndecl)
3660 {
3661 vec<tree> args;
3662 int count;
3663 tree parm;
3664
3665 gcc_assert (!flag_wpa);
3666 count = count_formal_params (fndecl);
3667 args.create (count);
3668 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3669 args.quick_push (parm);
3670
3671 return args;
3672 }
3673
3674 /* Return a heap allocated vector containing types of formal parameters of
3675 function type FNTYPE. */
3676
3677 vec<tree>
3678 ipa_get_vector_of_formal_parm_types (tree fntype)
3679 {
3680 vec<tree> types;
3681 int count = 0;
3682 tree t;
3683
3684 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3685 count++;
3686
3687 types.create (count);
3688 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3689 types.quick_push (TREE_VALUE (t));
3690
3691 return types;
3692 }
3693
3694 /* Modify the function declaration FNDECL and its type according to the plan in
3695 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3696 to reflect the actual parameters being modified which are determined by the
3697 base_index field. */
3698
3699 void
3700 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3701 {
3702 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3703 tree orig_type = TREE_TYPE (fndecl);
3704 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3705
3706 /* The following test is an ugly hack, some functions simply don't have any
3707 arguments in their type. This is probably a bug but well... */
3708 bool care_for_types = (old_arg_types != NULL_TREE);
3709 bool last_parm_void;
3710 vec<tree> otypes;
3711 if (care_for_types)
3712 {
3713 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3714 == void_type_node);
3715 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3716 if (last_parm_void)
3717 gcc_assert (oparms.length () + 1 == otypes.length ());
3718 else
3719 gcc_assert (oparms.length () == otypes.length ());
3720 }
3721 else
3722 {
3723 last_parm_void = false;
3724 otypes.create (0);
3725 }
3726
3727 int len = adjustments.length ();
3728 tree *link = &DECL_ARGUMENTS (fndecl);
3729 tree new_arg_types = NULL;
3730 for (int i = 0; i < len; i++)
3731 {
3732 struct ipa_parm_adjustment *adj;
3733 gcc_assert (link);
3734
3735 adj = &adjustments[i];
3736 tree parm;
3737 if (adj->op == IPA_PARM_OP_NEW)
3738 parm = NULL;
3739 else
3740 parm = oparms[adj->base_index];
3741 adj->base = parm;
3742
3743 if (adj->op == IPA_PARM_OP_COPY)
3744 {
3745 if (care_for_types)
3746 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3747 new_arg_types);
3748 *link = parm;
3749 link = &DECL_CHAIN (parm);
3750 }
3751 else if (adj->op != IPA_PARM_OP_REMOVE)
3752 {
3753 tree new_parm;
3754 tree ptype;
3755
3756 if (adj->by_ref)
3757 ptype = build_pointer_type (adj->type);
3758 else
3759 {
3760 ptype = adj->type;
3761 if (is_gimple_reg_type (ptype))
3762 {
3763 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3764 if (TYPE_ALIGN (ptype) < malign)
3765 ptype = build_aligned_type (ptype, malign);
3766 }
3767 }
3768
3769 if (care_for_types)
3770 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3771
3772 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3773 ptype);
3774 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3775 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3776 DECL_ARTIFICIAL (new_parm) = 1;
3777 DECL_ARG_TYPE (new_parm) = ptype;
3778 DECL_CONTEXT (new_parm) = fndecl;
3779 TREE_USED (new_parm) = 1;
3780 DECL_IGNORED_P (new_parm) = 1;
3781 layout_decl (new_parm, 0);
3782
3783 if (adj->op == IPA_PARM_OP_NEW)
3784 adj->base = NULL;
3785 else
3786 adj->base = parm;
3787 adj->new_decl = new_parm;
3788
3789 *link = new_parm;
3790 link = &DECL_CHAIN (new_parm);
3791 }
3792 }
3793
3794 *link = NULL_TREE;
3795
3796 tree new_reversed = NULL;
3797 if (care_for_types)
3798 {
3799 new_reversed = nreverse (new_arg_types);
3800 if (last_parm_void)
3801 {
3802 if (new_reversed)
3803 TREE_CHAIN (new_arg_types) = void_list_node;
3804 else
3805 new_reversed = void_list_node;
3806 }
3807 }
3808
3809 /* Use copy_node to preserve as much as possible from original type
3810 (debug info, attribute lists etc.)
3811 Exception is METHOD_TYPEs must have THIS argument.
3812 When we are asked to remove it, we need to build new FUNCTION_TYPE
3813 instead. */
3814 tree new_type = NULL;
3815 if (TREE_CODE (orig_type) != METHOD_TYPE
3816 || (adjustments[0].op == IPA_PARM_OP_COPY
3817 && adjustments[0].base_index == 0))
3818 {
3819 new_type = build_distinct_type_copy (orig_type);
3820 TYPE_ARG_TYPES (new_type) = new_reversed;
3821 }
3822 else
3823 {
3824 new_type
3825 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3826 new_reversed));
3827 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3828 DECL_VINDEX (fndecl) = NULL_TREE;
3829 }
3830
3831 /* When signature changes, we need to clear builtin info. */
3832 if (DECL_BUILT_IN (fndecl))
3833 {
3834 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3835 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3836 }
3837
3838 TREE_TYPE (fndecl) = new_type;
3839 DECL_VIRTUAL_P (fndecl) = 0;
3840 DECL_LANG_SPECIFIC (fndecl) = NULL;
3841 otypes.release ();
3842 oparms.release ();
3843 }
3844
3845 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3846 If this is a directly recursive call, CS must be NULL. Otherwise it must
3847 contain the corresponding call graph edge. */
3848
3849 void
3850 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3851 ipa_parm_adjustment_vec adjustments)
3852 {
3853 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3854 vec<tree> vargs;
3855 vec<tree, va_gc> **debug_args = NULL;
3856 gcall *new_stmt;
3857 gimple_stmt_iterator gsi, prev_gsi;
3858 tree callee_decl;
3859 int i, len;
3860
3861 len = adjustments.length ();
3862 vargs.create (len);
3863 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3864 current_node->remove_stmt_references (stmt);
3865
3866 gsi = gsi_for_stmt (stmt);
3867 prev_gsi = gsi;
3868 gsi_prev (&prev_gsi);
3869 for (i = 0; i < len; i++)
3870 {
3871 struct ipa_parm_adjustment *adj;
3872
3873 adj = &adjustments[i];
3874
3875 if (adj->op == IPA_PARM_OP_COPY)
3876 {
3877 tree arg = gimple_call_arg (stmt, adj->base_index);
3878
3879 vargs.quick_push (arg);
3880 }
3881 else if (adj->op != IPA_PARM_OP_REMOVE)
3882 {
3883 tree expr, base, off;
3884 location_t loc;
3885 unsigned int deref_align = 0;
3886 bool deref_base = false;
3887
3888 /* We create a new parameter out of the value of the old one, we can
3889 do the following kind of transformations:
3890
3891 - A scalar passed by reference is converted to a scalar passed by
3892 value. (adj->by_ref is false and the type of the original
3893 actual argument is a pointer to a scalar).
3894
3895 - A part of an aggregate is passed instead of the whole aggregate.
3896 The part can be passed either by value or by reference, this is
3897 determined by value of adj->by_ref. Moreover, the code below
3898 handles both situations when the original aggregate is passed by
3899 value (its type is not a pointer) and when it is passed by
3900 reference (it is a pointer to an aggregate).
3901
3902 When the new argument is passed by reference (adj->by_ref is true)
3903 it must be a part of an aggregate and therefore we form it by
3904 simply taking the address of a reference inside the original
3905 aggregate. */
3906
3907 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3908 base = gimple_call_arg (stmt, adj->base_index);
3909 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3910 : EXPR_LOCATION (base);
3911
3912 if (TREE_CODE (base) != ADDR_EXPR
3913 && POINTER_TYPE_P (TREE_TYPE (base)))
3914 off = build_int_cst (adj->alias_ptr_type,
3915 adj->offset / BITS_PER_UNIT);
3916 else
3917 {
3918 HOST_WIDE_INT base_offset;
3919 tree prev_base;
3920 bool addrof;
3921
3922 if (TREE_CODE (base) == ADDR_EXPR)
3923 {
3924 base = TREE_OPERAND (base, 0);
3925 addrof = true;
3926 }
3927 else
3928 addrof = false;
3929 prev_base = base;
3930 base = get_addr_base_and_unit_offset (base, &base_offset);
3931 /* Aggregate arguments can have non-invariant addresses. */
3932 if (!base)
3933 {
3934 base = build_fold_addr_expr (prev_base);
3935 off = build_int_cst (adj->alias_ptr_type,
3936 adj->offset / BITS_PER_UNIT);
3937 }
3938 else if (TREE_CODE (base) == MEM_REF)
3939 {
3940 if (!addrof)
3941 {
3942 deref_base = true;
3943 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3944 }
3945 off = build_int_cst (adj->alias_ptr_type,
3946 base_offset
3947 + adj->offset / BITS_PER_UNIT);
3948 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3949 off);
3950 base = TREE_OPERAND (base, 0);
3951 }
3952 else
3953 {
3954 off = build_int_cst (adj->alias_ptr_type,
3955 base_offset
3956 + adj->offset / BITS_PER_UNIT);
3957 base = build_fold_addr_expr (base);
3958 }
3959 }
3960
3961 if (!adj->by_ref)
3962 {
3963 tree type = adj->type;
3964 unsigned int align;
3965 unsigned HOST_WIDE_INT misalign;
3966
3967 if (deref_base)
3968 {
3969 align = deref_align;
3970 misalign = 0;
3971 }
3972 else
3973 {
3974 get_pointer_alignment_1 (base, &align, &misalign);
3975 if (TYPE_ALIGN (type) > align)
3976 align = TYPE_ALIGN (type);
3977 }
3978 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3979 * BITS_PER_UNIT);
3980 misalign = misalign & (align - 1);
3981 if (misalign != 0)
3982 align = (misalign & -misalign);
3983 if (align < TYPE_ALIGN (type))
3984 type = build_aligned_type (type, align);
3985 base = force_gimple_operand_gsi (&gsi, base,
3986 true, NULL, true, GSI_SAME_STMT);
3987 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3988 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
3989 /* If expr is not a valid gimple call argument emit
3990 a load into a temporary. */
3991 if (is_gimple_reg_type (TREE_TYPE (expr)))
3992 {
3993 gimple *tem = gimple_build_assign (NULL_TREE, expr);
3994 if (gimple_in_ssa_p (cfun))
3995 {
3996 gimple_set_vuse (tem, gimple_vuse (stmt));
3997 expr = make_ssa_name (TREE_TYPE (expr), tem);
3998 }
3999 else
4000 expr = create_tmp_reg (TREE_TYPE (expr));
4001 gimple_assign_set_lhs (tem, expr);
4002 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4003 }
4004 }
4005 else
4006 {
4007 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4008 REF_REVERSE_STORAGE_ORDER (expr) = adj->reverse;
4009 expr = build_fold_addr_expr (expr);
4010 expr = force_gimple_operand_gsi (&gsi, expr,
4011 true, NULL, true, GSI_SAME_STMT);
4012 }
4013 vargs.quick_push (expr);
4014 }
4015 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4016 {
4017 unsigned int ix;
4018 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4019 gimple *def_temp;
4020
4021 arg = gimple_call_arg (stmt, adj->base_index);
4022 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4023 {
4024 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4025 continue;
4026 arg = fold_convert_loc (gimple_location (stmt),
4027 TREE_TYPE (origin), arg);
4028 }
4029 if (debug_args == NULL)
4030 debug_args = decl_debug_args_insert (callee_decl);
4031 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4032 if (ddecl == origin)
4033 {
4034 ddecl = (**debug_args)[ix + 1];
4035 break;
4036 }
4037 if (ddecl == NULL)
4038 {
4039 ddecl = make_node (DEBUG_EXPR_DECL);
4040 DECL_ARTIFICIAL (ddecl) = 1;
4041 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4042 DECL_MODE (ddecl) = DECL_MODE (origin);
4043
4044 vec_safe_push (*debug_args, origin);
4045 vec_safe_push (*debug_args, ddecl);
4046 }
4047 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4048 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4049 }
4050 }
4051
4052 if (dump_file && (dump_flags & TDF_DETAILS))
4053 {
4054 fprintf (dump_file, "replacing stmt:");
4055 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4056 }
4057
4058 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4059 vargs.release ();
4060 if (gimple_call_lhs (stmt))
4061 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4062
4063 gimple_set_block (new_stmt, gimple_block (stmt));
4064 if (gimple_has_location (stmt))
4065 gimple_set_location (new_stmt, gimple_location (stmt));
4066 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4067 gimple_call_copy_flags (new_stmt, stmt);
4068 if (gimple_in_ssa_p (cfun))
4069 {
4070 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4071 if (gimple_vdef (stmt))
4072 {
4073 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4074 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4075 }
4076 }
4077
4078 if (dump_file && (dump_flags & TDF_DETAILS))
4079 {
4080 fprintf (dump_file, "with stmt:");
4081 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4082 fprintf (dump_file, "\n");
4083 }
4084 gsi_replace (&gsi, new_stmt, true);
4085 if (cs)
4086 cs->set_call_stmt (new_stmt);
4087 do
4088 {
4089 current_node->record_stmt_references (gsi_stmt (gsi));
4090 gsi_prev (&gsi);
4091 }
4092 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4093 }
4094
4095 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4096 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4097 specifies whether the function should care about type incompatibility the
4098 current and new expressions. If it is false, the function will leave
4099 incompatibility issues to the caller. Return true iff the expression
4100 was modified. */
4101
4102 bool
4103 ipa_modify_expr (tree *expr, bool convert,
4104 ipa_parm_adjustment_vec adjustments)
4105 {
4106 struct ipa_parm_adjustment *cand
4107 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4108 if (!cand)
4109 return false;
4110
4111 tree src;
4112 if (cand->by_ref)
4113 {
4114 src = build_simple_mem_ref (cand->new_decl);
4115 REF_REVERSE_STORAGE_ORDER (src) = cand->reverse;
4116 }
4117 else
4118 src = cand->new_decl;
4119
4120 if (dump_file && (dump_flags & TDF_DETAILS))
4121 {
4122 fprintf (dump_file, "About to replace expr ");
4123 print_generic_expr (dump_file, *expr, 0);
4124 fprintf (dump_file, " with ");
4125 print_generic_expr (dump_file, src, 0);
4126 fprintf (dump_file, "\n");
4127 }
4128
4129 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4130 {
4131 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4132 *expr = vce;
4133 }
4134 else
4135 *expr = src;
4136 return true;
4137 }
4138
4139 /* If T is an SSA_NAME, return NULL if it is not a default def or
4140 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4141 the base variable is always returned, regardless if it is a default
4142 def. Return T if it is not an SSA_NAME. */
4143
4144 static tree
4145 get_ssa_base_param (tree t, bool ignore_default_def)
4146 {
4147 if (TREE_CODE (t) == SSA_NAME)
4148 {
4149 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4150 return SSA_NAME_VAR (t);
4151 else
4152 return NULL_TREE;
4153 }
4154 return t;
4155 }
4156
4157 /* Given an expression, return an adjustment entry specifying the
4158 transformation to be done on EXPR. If no suitable adjustment entry
4159 was found, returns NULL.
4160
4161 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4162 default def, otherwise bail on them.
4163
4164 If CONVERT is non-NULL, this function will set *CONVERT if the
4165 expression provided is a component reference. ADJUSTMENTS is the
4166 adjustments vector. */
4167
4168 ipa_parm_adjustment *
4169 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4170 ipa_parm_adjustment_vec adjustments,
4171 bool ignore_default_def)
4172 {
4173 if (TREE_CODE (**expr) == BIT_FIELD_REF
4174 || TREE_CODE (**expr) == IMAGPART_EXPR
4175 || TREE_CODE (**expr) == REALPART_EXPR)
4176 {
4177 *expr = &TREE_OPERAND (**expr, 0);
4178 if (convert)
4179 *convert = true;
4180 }
4181
4182 HOST_WIDE_INT offset, size, max_size;
4183 bool reverse;
4184 tree base
4185 = get_ref_base_and_extent (**expr, &offset, &size, &max_size, &reverse);
4186 if (!base || size == -1 || max_size == -1)
4187 return NULL;
4188
4189 if (TREE_CODE (base) == MEM_REF)
4190 {
4191 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4192 base = TREE_OPERAND (base, 0);
4193 }
4194
4195 base = get_ssa_base_param (base, ignore_default_def);
4196 if (!base || TREE_CODE (base) != PARM_DECL)
4197 return NULL;
4198
4199 struct ipa_parm_adjustment *cand = NULL;
4200 unsigned int len = adjustments.length ();
4201 for (unsigned i = 0; i < len; i++)
4202 {
4203 struct ipa_parm_adjustment *adj = &adjustments[i];
4204
4205 if (adj->base == base
4206 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4207 {
4208 cand = adj;
4209 break;
4210 }
4211 }
4212
4213 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4214 return NULL;
4215 return cand;
4216 }
4217
4218 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4219
4220 static bool
4221 index_in_adjustments_multiple_times_p (int base_index,
4222 ipa_parm_adjustment_vec adjustments)
4223 {
4224 int i, len = adjustments.length ();
4225 bool one = false;
4226
4227 for (i = 0; i < len; i++)
4228 {
4229 struct ipa_parm_adjustment *adj;
4230 adj = &adjustments[i];
4231
4232 if (adj->base_index == base_index)
4233 {
4234 if (one)
4235 return true;
4236 else
4237 one = true;
4238 }
4239 }
4240 return false;
4241 }
4242
4243
4244 /* Return adjustments that should have the same effect on function parameters
4245 and call arguments as if they were first changed according to adjustments in
4246 INNER and then by adjustments in OUTER. */
4247
4248 ipa_parm_adjustment_vec
4249 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4250 ipa_parm_adjustment_vec outer)
4251 {
4252 int i, outlen = outer.length ();
4253 int inlen = inner.length ();
4254 int removals = 0;
4255 ipa_parm_adjustment_vec adjustments, tmp;
4256
4257 tmp.create (inlen);
4258 for (i = 0; i < inlen; i++)
4259 {
4260 struct ipa_parm_adjustment *n;
4261 n = &inner[i];
4262
4263 if (n->op == IPA_PARM_OP_REMOVE)
4264 removals++;
4265 else
4266 {
4267 /* FIXME: Handling of new arguments are not implemented yet. */
4268 gcc_assert (n->op != IPA_PARM_OP_NEW);
4269 tmp.quick_push (*n);
4270 }
4271 }
4272
4273 adjustments.create (outlen + removals);
4274 for (i = 0; i < outlen; i++)
4275 {
4276 struct ipa_parm_adjustment r;
4277 struct ipa_parm_adjustment *out = &outer[i];
4278 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4279
4280 memset (&r, 0, sizeof (r));
4281 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4282 if (out->op == IPA_PARM_OP_REMOVE)
4283 {
4284 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4285 {
4286 r.op = IPA_PARM_OP_REMOVE;
4287 adjustments.quick_push (r);
4288 }
4289 continue;
4290 }
4291 else
4292 {
4293 /* FIXME: Handling of new arguments are not implemented yet. */
4294 gcc_assert (out->op != IPA_PARM_OP_NEW);
4295 }
4296
4297 r.base_index = in->base_index;
4298 r.type = out->type;
4299
4300 /* FIXME: Create nonlocal value too. */
4301
4302 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4303 r.op = IPA_PARM_OP_COPY;
4304 else if (in->op == IPA_PARM_OP_COPY)
4305 r.offset = out->offset;
4306 else if (out->op == IPA_PARM_OP_COPY)
4307 r.offset = in->offset;
4308 else
4309 r.offset = in->offset + out->offset;
4310 adjustments.quick_push (r);
4311 }
4312
4313 for (i = 0; i < inlen; i++)
4314 {
4315 struct ipa_parm_adjustment *n = &inner[i];
4316
4317 if (n->op == IPA_PARM_OP_REMOVE)
4318 adjustments.quick_push (*n);
4319 }
4320
4321 tmp.release ();
4322 return adjustments;
4323 }
4324
4325 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4326 friendly way, assuming they are meant to be applied to FNDECL. */
4327
4328 void
4329 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4330 tree fndecl)
4331 {
4332 int i, len = adjustments.length ();
4333 bool first = true;
4334 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4335
4336 fprintf (file, "IPA param adjustments: ");
4337 for (i = 0; i < len; i++)
4338 {
4339 struct ipa_parm_adjustment *adj;
4340 adj = &adjustments[i];
4341
4342 if (!first)
4343 fprintf (file, " ");
4344 else
4345 first = false;
4346
4347 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4348 print_generic_expr (file, parms[adj->base_index], 0);
4349 if (adj->base)
4350 {
4351 fprintf (file, ", base: ");
4352 print_generic_expr (file, adj->base, 0);
4353 }
4354 if (adj->new_decl)
4355 {
4356 fprintf (file, ", new_decl: ");
4357 print_generic_expr (file, adj->new_decl, 0);
4358 }
4359 if (adj->new_ssa_base)
4360 {
4361 fprintf (file, ", new_ssa_base: ");
4362 print_generic_expr (file, adj->new_ssa_base, 0);
4363 }
4364
4365 if (adj->op == IPA_PARM_OP_COPY)
4366 fprintf (file, ", copy_param");
4367 else if (adj->op == IPA_PARM_OP_REMOVE)
4368 fprintf (file, ", remove_param");
4369 else
4370 fprintf (file, ", offset %li", (long) adj->offset);
4371 if (adj->by_ref)
4372 fprintf (file, ", by_ref");
4373 print_node_brief (file, ", type: ", adj->type, 0);
4374 fprintf (file, "\n");
4375 }
4376 parms.release ();
4377 }
4378
4379 /* Dump the AV linked list. */
4380
4381 void
4382 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4383 {
4384 bool comma = false;
4385 fprintf (f, " Aggregate replacements:");
4386 for (; av; av = av->next)
4387 {
4388 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4389 av->index, av->offset);
4390 print_generic_expr (f, av->value, 0);
4391 comma = true;
4392 }
4393 fprintf (f, "\n");
4394 }
4395
4396 /* Stream out jump function JUMP_FUNC to OB. */
4397
4398 static void
4399 ipa_write_jump_function (struct output_block *ob,
4400 struct ipa_jump_func *jump_func)
4401 {
4402 struct ipa_agg_jf_item *item;
4403 struct bitpack_d bp;
4404 int i, count;
4405
4406 streamer_write_uhwi (ob, jump_func->type);
4407 switch (jump_func->type)
4408 {
4409 case IPA_JF_UNKNOWN:
4410 break;
4411 case IPA_JF_CONST:
4412 gcc_assert (
4413 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4414 stream_write_tree (ob, jump_func->value.constant.value, true);
4415 break;
4416 case IPA_JF_PASS_THROUGH:
4417 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4418 if (jump_func->value.pass_through.operation == NOP_EXPR)
4419 {
4420 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4421 bp = bitpack_create (ob->main_stream);
4422 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4423 streamer_write_bitpack (&bp);
4424 }
4425 else
4426 {
4427 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4428 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4429 }
4430 break;
4431 case IPA_JF_ANCESTOR:
4432 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4433 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4434 bp = bitpack_create (ob->main_stream);
4435 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4436 streamer_write_bitpack (&bp);
4437 break;
4438 }
4439
4440 count = vec_safe_length (jump_func->agg.items);
4441 streamer_write_uhwi (ob, count);
4442 if (count)
4443 {
4444 bp = bitpack_create (ob->main_stream);
4445 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4446 streamer_write_bitpack (&bp);
4447 }
4448
4449 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4450 {
4451 streamer_write_uhwi (ob, item->offset);
4452 stream_write_tree (ob, item->value, true);
4453 }
4454
4455 bp = bitpack_create (ob->main_stream);
4456 bp_pack_value (&bp, jump_func->alignment.known, 1);
4457 streamer_write_bitpack (&bp);
4458 if (jump_func->alignment.known)
4459 {
4460 streamer_write_uhwi (ob, jump_func->alignment.align);
4461 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4462 }
4463 }
4464
4465 /* Read in jump function JUMP_FUNC from IB. */
4466
4467 static void
4468 ipa_read_jump_function (struct lto_input_block *ib,
4469 struct ipa_jump_func *jump_func,
4470 struct cgraph_edge *cs,
4471 struct data_in *data_in)
4472 {
4473 enum jump_func_type jftype;
4474 enum tree_code operation;
4475 int i, count;
4476
4477 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4478 switch (jftype)
4479 {
4480 case IPA_JF_UNKNOWN:
4481 ipa_set_jf_unknown (jump_func);
4482 break;
4483 case IPA_JF_CONST:
4484 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4485 break;
4486 case IPA_JF_PASS_THROUGH:
4487 operation = (enum tree_code) streamer_read_uhwi (ib);
4488 if (operation == NOP_EXPR)
4489 {
4490 int formal_id = streamer_read_uhwi (ib);
4491 struct bitpack_d bp = streamer_read_bitpack (ib);
4492 bool agg_preserved = bp_unpack_value (&bp, 1);
4493 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4494 }
4495 else
4496 {
4497 tree operand = stream_read_tree (ib, data_in);
4498 int formal_id = streamer_read_uhwi (ib);
4499 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4500 operation);
4501 }
4502 break;
4503 case IPA_JF_ANCESTOR:
4504 {
4505 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4506 int formal_id = streamer_read_uhwi (ib);
4507 struct bitpack_d bp = streamer_read_bitpack (ib);
4508 bool agg_preserved = bp_unpack_value (&bp, 1);
4509 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4510 break;
4511 }
4512 }
4513
4514 count = streamer_read_uhwi (ib);
4515 vec_alloc (jump_func->agg.items, count);
4516 if (count)
4517 {
4518 struct bitpack_d bp = streamer_read_bitpack (ib);
4519 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4520 }
4521 for (i = 0; i < count; i++)
4522 {
4523 struct ipa_agg_jf_item item;
4524 item.offset = streamer_read_uhwi (ib);
4525 item.value = stream_read_tree (ib, data_in);
4526 jump_func->agg.items->quick_push (item);
4527 }
4528
4529 struct bitpack_d bp = streamer_read_bitpack (ib);
4530 bool alignment_known = bp_unpack_value (&bp, 1);
4531 if (alignment_known)
4532 {
4533 jump_func->alignment.known = true;
4534 jump_func->alignment.align = streamer_read_uhwi (ib);
4535 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4536 }
4537 else
4538 jump_func->alignment.known = false;
4539 }
4540
4541 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4542 relevant to indirect inlining to OB. */
4543
4544 static void
4545 ipa_write_indirect_edge_info (struct output_block *ob,
4546 struct cgraph_edge *cs)
4547 {
4548 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4549 struct bitpack_d bp;
4550
4551 streamer_write_hwi (ob, ii->param_index);
4552 bp = bitpack_create (ob->main_stream);
4553 bp_pack_value (&bp, ii->polymorphic, 1);
4554 bp_pack_value (&bp, ii->agg_contents, 1);
4555 bp_pack_value (&bp, ii->member_ptr, 1);
4556 bp_pack_value (&bp, ii->by_ref, 1);
4557 bp_pack_value (&bp, ii->vptr_changed, 1);
4558 streamer_write_bitpack (&bp);
4559 if (ii->agg_contents || ii->polymorphic)
4560 streamer_write_hwi (ob, ii->offset);
4561 else
4562 gcc_assert (ii->offset == 0);
4563
4564 if (ii->polymorphic)
4565 {
4566 streamer_write_hwi (ob, ii->otr_token);
4567 stream_write_tree (ob, ii->otr_type, true);
4568 ii->context.stream_out (ob);
4569 }
4570 }
4571
4572 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4573 relevant to indirect inlining from IB. */
4574
4575 static void
4576 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4577 struct data_in *data_in,
4578 struct cgraph_edge *cs)
4579 {
4580 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4581 struct bitpack_d bp;
4582
4583 ii->param_index = (int) streamer_read_hwi (ib);
4584 bp = streamer_read_bitpack (ib);
4585 ii->polymorphic = bp_unpack_value (&bp, 1);
4586 ii->agg_contents = bp_unpack_value (&bp, 1);
4587 ii->member_ptr = bp_unpack_value (&bp, 1);
4588 ii->by_ref = bp_unpack_value (&bp, 1);
4589 ii->vptr_changed = bp_unpack_value (&bp, 1);
4590 if (ii->agg_contents || ii->polymorphic)
4591 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4592 else
4593 ii->offset = 0;
4594 if (ii->polymorphic)
4595 {
4596 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4597 ii->otr_type = stream_read_tree (ib, data_in);
4598 ii->context.stream_in (ib, data_in);
4599 }
4600 }
4601
4602 /* Stream out NODE info to OB. */
4603
4604 static void
4605 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4606 {
4607 int node_ref;
4608 lto_symtab_encoder_t encoder;
4609 struct ipa_node_params *info = IPA_NODE_REF (node);
4610 int j;
4611 struct cgraph_edge *e;
4612 struct bitpack_d bp;
4613
4614 encoder = ob->decl_state->symtab_node_encoder;
4615 node_ref = lto_symtab_encoder_encode (encoder, node);
4616 streamer_write_uhwi (ob, node_ref);
4617
4618 streamer_write_uhwi (ob, ipa_get_param_count (info));
4619 for (j = 0; j < ipa_get_param_count (info); j++)
4620 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4621 bp = bitpack_create (ob->main_stream);
4622 gcc_assert (info->analysis_done
4623 || ipa_get_param_count (info) == 0);
4624 gcc_assert (!info->node_enqueued);
4625 gcc_assert (!info->ipcp_orig_node);
4626 for (j = 0; j < ipa_get_param_count (info); j++)
4627 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4628 streamer_write_bitpack (&bp);
4629 for (j = 0; j < ipa_get_param_count (info); j++)
4630 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4631 for (e = node->callees; e; e = e->next_callee)
4632 {
4633 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4634
4635 streamer_write_uhwi (ob,
4636 ipa_get_cs_argument_count (args) * 2
4637 + (args->polymorphic_call_contexts != NULL));
4638 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4639 {
4640 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4641 if (args->polymorphic_call_contexts != NULL)
4642 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4643 }
4644 }
4645 for (e = node->indirect_calls; e; e = e->next_callee)
4646 {
4647 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4648
4649 streamer_write_uhwi (ob,
4650 ipa_get_cs_argument_count (args) * 2
4651 + (args->polymorphic_call_contexts != NULL));
4652 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4653 {
4654 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4655 if (args->polymorphic_call_contexts != NULL)
4656 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4657 }
4658 ipa_write_indirect_edge_info (ob, e);
4659 }
4660 }
4661
4662 /* Stream in NODE info from IB. */
4663
4664 static void
4665 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4666 struct data_in *data_in)
4667 {
4668 struct ipa_node_params *info = IPA_NODE_REF (node);
4669 int k;
4670 struct cgraph_edge *e;
4671 struct bitpack_d bp;
4672
4673 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4674
4675 for (k = 0; k < ipa_get_param_count (info); k++)
4676 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4677
4678 bp = streamer_read_bitpack (ib);
4679 if (ipa_get_param_count (info) != 0)
4680 info->analysis_done = true;
4681 info->node_enqueued = false;
4682 for (k = 0; k < ipa_get_param_count (info); k++)
4683 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4684 for (k = 0; k < ipa_get_param_count (info); k++)
4685 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4686 for (e = node->callees; e; e = e->next_callee)
4687 {
4688 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4689 int count = streamer_read_uhwi (ib);
4690 bool contexts_computed = count & 1;
4691 count /= 2;
4692
4693 if (!count)
4694 continue;
4695 vec_safe_grow_cleared (args->jump_functions, count);
4696 if (contexts_computed)
4697 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4698
4699 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4700 {
4701 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4702 data_in);
4703 if (contexts_computed)
4704 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4705 }
4706 }
4707 for (e = node->indirect_calls; e; e = e->next_callee)
4708 {
4709 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4710 int count = streamer_read_uhwi (ib);
4711 bool contexts_computed = count & 1;
4712 count /= 2;
4713
4714 if (count)
4715 {
4716 vec_safe_grow_cleared (args->jump_functions, count);
4717 if (contexts_computed)
4718 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4719 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4720 {
4721 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4722 data_in);
4723 if (contexts_computed)
4724 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4725 }
4726 }
4727 ipa_read_indirect_edge_info (ib, data_in, e);
4728 }
4729 }
4730
4731 /* Write jump functions for nodes in SET. */
4732
4733 void
4734 ipa_prop_write_jump_functions (void)
4735 {
4736 struct cgraph_node *node;
4737 struct output_block *ob;
4738 unsigned int count = 0;
4739 lto_symtab_encoder_iterator lsei;
4740 lto_symtab_encoder_t encoder;
4741
4742 if (!ipa_node_params_sum)
4743 return;
4744
4745 ob = create_output_block (LTO_section_jump_functions);
4746 encoder = ob->decl_state->symtab_node_encoder;
4747 ob->symbol = NULL;
4748 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4749 lsei_next_function_in_partition (&lsei))
4750 {
4751 node = lsei_cgraph_node (lsei);
4752 if (node->has_gimple_body_p ()
4753 && IPA_NODE_REF (node) != NULL)
4754 count++;
4755 }
4756
4757 streamer_write_uhwi (ob, count);
4758
4759 /* Process all of the functions. */
4760 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4761 lsei_next_function_in_partition (&lsei))
4762 {
4763 node = lsei_cgraph_node (lsei);
4764 if (node->has_gimple_body_p ()
4765 && IPA_NODE_REF (node) != NULL)
4766 ipa_write_node_info (ob, node);
4767 }
4768 streamer_write_char_stream (ob->main_stream, 0);
4769 produce_asm (ob, NULL);
4770 destroy_output_block (ob);
4771 }
4772
4773 /* Read section in file FILE_DATA of length LEN with data DATA. */
4774
4775 static void
4776 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4777 size_t len)
4778 {
4779 const struct lto_function_header *header =
4780 (const struct lto_function_header *) data;
4781 const int cfg_offset = sizeof (struct lto_function_header);
4782 const int main_offset = cfg_offset + header->cfg_size;
4783 const int string_offset = main_offset + header->main_size;
4784 struct data_in *data_in;
4785 unsigned int i;
4786 unsigned int count;
4787
4788 lto_input_block ib_main ((const char *) data + main_offset,
4789 header->main_size, file_data->mode_table);
4790
4791 data_in =
4792 lto_data_in_create (file_data, (const char *) data + string_offset,
4793 header->string_size, vNULL);
4794 count = streamer_read_uhwi (&ib_main);
4795
4796 for (i = 0; i < count; i++)
4797 {
4798 unsigned int index;
4799 struct cgraph_node *node;
4800 lto_symtab_encoder_t encoder;
4801
4802 index = streamer_read_uhwi (&ib_main);
4803 encoder = file_data->symtab_node_encoder;
4804 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4805 index));
4806 gcc_assert (node->definition);
4807 ipa_read_node_info (&ib_main, node, data_in);
4808 }
4809 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4810 len);
4811 lto_data_in_delete (data_in);
4812 }
4813
4814 /* Read ipcp jump functions. */
4815
4816 void
4817 ipa_prop_read_jump_functions (void)
4818 {
4819 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4820 struct lto_file_decl_data *file_data;
4821 unsigned int j = 0;
4822
4823 ipa_check_create_node_params ();
4824 ipa_check_create_edge_args ();
4825 ipa_register_cgraph_hooks ();
4826
4827 while ((file_data = file_data_vec[j++]))
4828 {
4829 size_t len;
4830 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4831
4832 if (data)
4833 ipa_prop_read_section (file_data, data, len);
4834 }
4835 }
4836
4837 /* After merging units, we can get mismatch in argument counts.
4838 Also decl merging might've rendered parameter lists obsolete.
4839 Also compute called_with_variable_arg info. */
4840
4841 void
4842 ipa_update_after_lto_read (void)
4843 {
4844 ipa_check_create_node_params ();
4845 ipa_check_create_edge_args ();
4846 }
4847
4848 void
4849 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4850 {
4851 int node_ref;
4852 unsigned int count = 0;
4853 lto_symtab_encoder_t encoder;
4854 struct ipa_agg_replacement_value *aggvals, *av;
4855
4856 aggvals = ipa_get_agg_replacements_for_node (node);
4857 encoder = ob->decl_state->symtab_node_encoder;
4858 node_ref = lto_symtab_encoder_encode (encoder, node);
4859 streamer_write_uhwi (ob, node_ref);
4860
4861 for (av = aggvals; av; av = av->next)
4862 count++;
4863 streamer_write_uhwi (ob, count);
4864
4865 for (av = aggvals; av; av = av->next)
4866 {
4867 struct bitpack_d bp;
4868
4869 streamer_write_uhwi (ob, av->offset);
4870 streamer_write_uhwi (ob, av->index);
4871 stream_write_tree (ob, av->value, true);
4872
4873 bp = bitpack_create (ob->main_stream);
4874 bp_pack_value (&bp, av->by_ref, 1);
4875 streamer_write_bitpack (&bp);
4876 }
4877
4878 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4879 if (ts && vec_safe_length (ts->alignments) > 0)
4880 {
4881 count = ts->alignments->length ();
4882
4883 streamer_write_uhwi (ob, count);
4884 for (unsigned i = 0; i < count; ++i)
4885 {
4886 ipa_alignment *parm_al = &(*ts->alignments)[i];
4887
4888 struct bitpack_d bp;
4889 bp = bitpack_create (ob->main_stream);
4890 bp_pack_value (&bp, parm_al->known, 1);
4891 streamer_write_bitpack (&bp);
4892 if (parm_al->known)
4893 {
4894 streamer_write_uhwi (ob, parm_al->align);
4895 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4896 parm_al->misalign);
4897 }
4898 }
4899 }
4900 else
4901 streamer_write_uhwi (ob, 0);
4902 }
4903
4904 /* Stream in the aggregate value replacement chain for NODE from IB. */
4905
4906 static void
4907 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4908 data_in *data_in)
4909 {
4910 struct ipa_agg_replacement_value *aggvals = NULL;
4911 unsigned int count, i;
4912
4913 count = streamer_read_uhwi (ib);
4914 for (i = 0; i <count; i++)
4915 {
4916 struct ipa_agg_replacement_value *av;
4917 struct bitpack_d bp;
4918
4919 av = ggc_alloc<ipa_agg_replacement_value> ();
4920 av->offset = streamer_read_uhwi (ib);
4921 av->index = streamer_read_uhwi (ib);
4922 av->value = stream_read_tree (ib, data_in);
4923 bp = streamer_read_bitpack (ib);
4924 av->by_ref = bp_unpack_value (&bp, 1);
4925 av->next = aggvals;
4926 aggvals = av;
4927 }
4928 ipa_set_node_agg_value_chain (node, aggvals);
4929
4930 count = streamer_read_uhwi (ib);
4931 if (count > 0)
4932 {
4933 ipcp_grow_transformations_if_necessary ();
4934
4935 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4936 vec_safe_grow_cleared (ts->alignments, count);
4937
4938 for (i = 0; i < count; i++)
4939 {
4940 ipa_alignment *parm_al;
4941 parm_al = &(*ts->alignments)[i];
4942 struct bitpack_d bp;
4943 bp = streamer_read_bitpack (ib);
4944 parm_al->known = bp_unpack_value (&bp, 1);
4945 if (parm_al->known)
4946 {
4947 parm_al->align = streamer_read_uhwi (ib);
4948 parm_al->misalign
4949 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
4950 0, parm_al->align);
4951 }
4952 }
4953 }
4954 }
4955
4956 /* Write all aggregate replacement for nodes in set. */
4957
4958 void
4959 ipcp_write_transformation_summaries (void)
4960 {
4961 struct cgraph_node *node;
4962 struct output_block *ob;
4963 unsigned int count = 0;
4964 lto_symtab_encoder_iterator lsei;
4965 lto_symtab_encoder_t encoder;
4966
4967 ob = create_output_block (LTO_section_ipcp_transform);
4968 encoder = ob->decl_state->symtab_node_encoder;
4969 ob->symbol = NULL;
4970 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4971 lsei_next_function_in_partition (&lsei))
4972 {
4973 node = lsei_cgraph_node (lsei);
4974 if (node->has_gimple_body_p ())
4975 count++;
4976 }
4977
4978 streamer_write_uhwi (ob, count);
4979
4980 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4981 lsei_next_function_in_partition (&lsei))
4982 {
4983 node = lsei_cgraph_node (lsei);
4984 if (node->has_gimple_body_p ())
4985 write_ipcp_transformation_info (ob, node);
4986 }
4987 streamer_write_char_stream (ob->main_stream, 0);
4988 produce_asm (ob, NULL);
4989 destroy_output_block (ob);
4990 }
4991
4992 /* Read replacements section in file FILE_DATA of length LEN with data
4993 DATA. */
4994
4995 static void
4996 read_replacements_section (struct lto_file_decl_data *file_data,
4997 const char *data,
4998 size_t len)
4999 {
5000 const struct lto_function_header *header =
5001 (const struct lto_function_header *) data;
5002 const int cfg_offset = sizeof (struct lto_function_header);
5003 const int main_offset = cfg_offset + header->cfg_size;
5004 const int string_offset = main_offset + header->main_size;
5005 struct data_in *data_in;
5006 unsigned int i;
5007 unsigned int count;
5008
5009 lto_input_block ib_main ((const char *) data + main_offset,
5010 header->main_size, file_data->mode_table);
5011
5012 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5013 header->string_size, vNULL);
5014 count = streamer_read_uhwi (&ib_main);
5015
5016 for (i = 0; i < count; i++)
5017 {
5018 unsigned int index;
5019 struct cgraph_node *node;
5020 lto_symtab_encoder_t encoder;
5021
5022 index = streamer_read_uhwi (&ib_main);
5023 encoder = file_data->symtab_node_encoder;
5024 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5025 index));
5026 gcc_assert (node->definition);
5027 read_ipcp_transformation_info (&ib_main, node, data_in);
5028 }
5029 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5030 len);
5031 lto_data_in_delete (data_in);
5032 }
5033
5034 /* Read IPA-CP aggregate replacements. */
5035
5036 void
5037 ipcp_read_transformation_summaries (void)
5038 {
5039 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5040 struct lto_file_decl_data *file_data;
5041 unsigned int j = 0;
5042
5043 while ((file_data = file_data_vec[j++]))
5044 {
5045 size_t len;
5046 const char *data = lto_get_section_data (file_data,
5047 LTO_section_ipcp_transform,
5048 NULL, &len);
5049 if (data)
5050 read_replacements_section (file_data, data, len);
5051 }
5052 }
5053
5054 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5055 NODE. */
5056
5057 static void
5058 adjust_agg_replacement_values (struct cgraph_node *node,
5059 struct ipa_agg_replacement_value *aggval)
5060 {
5061 struct ipa_agg_replacement_value *v;
5062 int i, c = 0, d = 0, *adj;
5063
5064 if (!node->clone.combined_args_to_skip)
5065 return;
5066
5067 for (v = aggval; v; v = v->next)
5068 {
5069 gcc_assert (v->index >= 0);
5070 if (c < v->index)
5071 c = v->index;
5072 }
5073 c++;
5074
5075 adj = XALLOCAVEC (int, c);
5076 for (i = 0; i < c; i++)
5077 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5078 {
5079 adj[i] = -1;
5080 d++;
5081 }
5082 else
5083 adj[i] = i - d;
5084
5085 for (v = aggval; v; v = v->next)
5086 v->index = adj[v->index];
5087 }
5088
5089 /* Dominator walker driving the ipcp modification phase. */
5090
5091 class ipcp_modif_dom_walker : public dom_walker
5092 {
5093 public:
5094 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5095 vec<ipa_param_descriptor> descs,
5096 struct ipa_agg_replacement_value *av,
5097 bool *sc, bool *cc)
5098 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5099 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5100
5101 virtual void before_dom_children (basic_block);
5102
5103 private:
5104 struct ipa_func_body_info *m_fbi;
5105 vec<ipa_param_descriptor> m_descriptors;
5106 struct ipa_agg_replacement_value *m_aggval;
5107 bool *m_something_changed, *m_cfg_changed;
5108 };
5109
5110 void
5111 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5112 {
5113 gimple_stmt_iterator gsi;
5114 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5115 {
5116 struct ipa_agg_replacement_value *v;
5117 gimple *stmt = gsi_stmt (gsi);
5118 tree rhs, val, t;
5119 HOST_WIDE_INT offset, size;
5120 int index;
5121 bool by_ref, vce;
5122
5123 if (!gimple_assign_load_p (stmt))
5124 continue;
5125 rhs = gimple_assign_rhs1 (stmt);
5126 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5127 continue;
5128
5129 vce = false;
5130 t = rhs;
5131 while (handled_component_p (t))
5132 {
5133 /* V_C_E can do things like convert an array of integers to one
5134 bigger integer and similar things we do not handle below. */
5135 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5136 {
5137 vce = true;
5138 break;
5139 }
5140 t = TREE_OPERAND (t, 0);
5141 }
5142 if (vce)
5143 continue;
5144
5145 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5146 &offset, &size, &by_ref))
5147 continue;
5148 for (v = m_aggval; v; v = v->next)
5149 if (v->index == index
5150 && v->offset == offset)
5151 break;
5152 if (!v
5153 || v->by_ref != by_ref
5154 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5155 continue;
5156
5157 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5158 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5159 {
5160 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5161 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5162 else if (TYPE_SIZE (TREE_TYPE (rhs))
5163 == TYPE_SIZE (TREE_TYPE (v->value)))
5164 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5165 else
5166 {
5167 if (dump_file)
5168 {
5169 fprintf (dump_file, " const ");
5170 print_generic_expr (dump_file, v->value, 0);
5171 fprintf (dump_file, " can't be converted to type of ");
5172 print_generic_expr (dump_file, rhs, 0);
5173 fprintf (dump_file, "\n");
5174 }
5175 continue;
5176 }
5177 }
5178 else
5179 val = v->value;
5180
5181 if (dump_file && (dump_flags & TDF_DETAILS))
5182 {
5183 fprintf (dump_file, "Modifying stmt:\n ");
5184 print_gimple_stmt (dump_file, stmt, 0, 0);
5185 }
5186 gimple_assign_set_rhs_from_tree (&gsi, val);
5187 update_stmt (stmt);
5188
5189 if (dump_file && (dump_flags & TDF_DETAILS))
5190 {
5191 fprintf (dump_file, "into:\n ");
5192 print_gimple_stmt (dump_file, stmt, 0, 0);
5193 fprintf (dump_file, "\n");
5194 }
5195
5196 *m_something_changed = true;
5197 if (maybe_clean_eh_stmt (stmt)
5198 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5199 *m_cfg_changed = true;
5200 }
5201
5202 }
5203
5204 /* Update alignment of formal parameters as described in
5205 ipcp_transformation_summary. */
5206
5207 static void
5208 ipcp_update_alignments (struct cgraph_node *node)
5209 {
5210 tree fndecl = node->decl;
5211 tree parm = DECL_ARGUMENTS (fndecl);
5212 tree next_parm = parm;
5213 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5214 if (!ts || vec_safe_length (ts->alignments) == 0)
5215 return;
5216 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5217 unsigned count = alignments.length ();
5218
5219 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5220 {
5221 if (node->clone.combined_args_to_skip
5222 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5223 continue;
5224 gcc_checking_assert (parm);
5225 next_parm = DECL_CHAIN (parm);
5226
5227 if (!alignments[i].known || !is_gimple_reg (parm))
5228 continue;
5229 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5230 if (!ddef)
5231 continue;
5232
5233 if (dump_file)
5234 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5235 "misalignment to %u\n", i, alignments[i].align,
5236 alignments[i].misalign);
5237
5238 struct ptr_info_def *pi = get_ptr_info (ddef);
5239 gcc_checking_assert (pi);
5240 unsigned old_align;
5241 unsigned old_misalign;
5242 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5243
5244 if (old_known
5245 && old_align >= alignments[i].align)
5246 {
5247 if (dump_file)
5248 fprintf (dump_file, " But the alignment was already %u.\n",
5249 old_align);
5250 continue;
5251 }
5252 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5253 }
5254 }
5255
5256 /* IPCP transformation phase doing propagation of aggregate values. */
5257
5258 unsigned int
5259 ipcp_transform_function (struct cgraph_node *node)
5260 {
5261 vec<ipa_param_descriptor> descriptors = vNULL;
5262 struct ipa_func_body_info fbi;
5263 struct ipa_agg_replacement_value *aggval;
5264 int param_count;
5265 bool cfg_changed = false, something_changed = false;
5266
5267 gcc_checking_assert (cfun);
5268 gcc_checking_assert (current_function_decl);
5269
5270 if (dump_file)
5271 fprintf (dump_file, "Modification phase of node %s/%i\n",
5272 node->name (), node->order);
5273
5274 ipcp_update_alignments (node);
5275 aggval = ipa_get_agg_replacements_for_node (node);
5276 if (!aggval)
5277 return 0;
5278 param_count = count_formal_params (node->decl);
5279 if (param_count == 0)
5280 return 0;
5281 adjust_agg_replacement_values (node, aggval);
5282 if (dump_file)
5283 ipa_dump_agg_replacement_values (dump_file, aggval);
5284
5285 fbi.node = node;
5286 fbi.info = NULL;
5287 fbi.bb_infos = vNULL;
5288 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5289 fbi.param_count = param_count;
5290 fbi.aa_walked = 0;
5291
5292 descriptors.safe_grow_cleared (param_count);
5293 ipa_populate_param_decls (node, descriptors);
5294 calculate_dominance_info (CDI_DOMINATORS);
5295 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5296 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5297
5298 int i;
5299 struct ipa_bb_info *bi;
5300 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5301 free_ipa_bb_info (bi);
5302 fbi.bb_infos.release ();
5303 free_dominance_info (CDI_DOMINATORS);
5304 (*ipcp_transformations)[node->uid].agg_values = NULL;
5305 (*ipcp_transformations)[node->uid].alignments = NULL;
5306 descriptors.release ();
5307
5308 if (!something_changed)
5309 return 0;
5310 else if (cfg_changed)
5311 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5312 else
5313 return TODO_update_ssa_only_virtuals;
5314 }