ipa-inline-transform.c (save_inline_function_body): Add comments.
[gcc.git] / gcc / ipa-inline-transform.c
1 /* Callgraph transformations to handle inlining
2 Copyright (C) 2003, 2004, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* The inline decisions are stored in callgraph in "inline plan" and
23 applied later.
24
25 To mark given call inline, use inline_call function.
26 The function marks the edge inlinable and, if necessary, produces
27 virtual clone in the callgraph representing the new copy of callee's
28 function body.
29
30 The inline plan is applied on given function body by inline_transform. */
31
32 #include "config.h"
33 #include "system.h"
34 #include "coretypes.h"
35 #include "tm.h"
36 #include "tree.h"
37 #include "langhooks.h"
38 #include "cgraph.h"
39 #include "timevar.h"
40 #include "output.h"
41 #include "intl.h"
42 #include "coverage.h"
43 #include "ggc.h"
44 #include "tree-flow.h"
45 #include "ipa-prop.h"
46 #include "ipa-inline.h"
47 #include "tree-inline.h"
48
49 int ncalls_inlined;
50 int nfunctions_inlined;
51
52 /* Scale frequency of NODE edges by FREQ_SCALE and increase loop nest
53 by NEST. */
54
55 static void
56 update_noncloned_frequencies (struct cgraph_node *node,
57 int freq_scale, int nest)
58 {
59 struct cgraph_edge *e;
60
61 /* We do not want to ignore high loop nest after freq drops to 0. */
62 if (!freq_scale)
63 freq_scale = 1;
64 for (e = node->callees; e; e = e->next_callee)
65 {
66 e->loop_nest += nest;
67 e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
68 if (e->frequency > CGRAPH_FREQ_MAX)
69 e->frequency = CGRAPH_FREQ_MAX;
70 if (!e->inline_failed)
71 update_noncloned_frequencies (e->callee, freq_scale, nest);
72 }
73 }
74
75
76 /* E is expected to be an edge being inlined. Clone destination node of
77 the edge and redirect it to the new clone.
78 DUPLICATE is used for bookkeeping on whether we are actually creating new
79 clones or re-using node originally representing out-of-line function call.
80 */
81
82 void
83 clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
84 bool update_original, int *overall_size)
85 {
86 HOST_WIDE_INT peak;
87 struct inline_summary *caller_info, *callee_info;
88
89 if (duplicate)
90 {
91 /* We may eliminate the need for out-of-line copy to be output.
92 In that case just go ahead and re-use it. This is not just an
93 memory optimization. Making offline copy of fuction disappear
94 from the program will improve future decisions on inlining. */
95 if (!e->callee->callers->next_caller
96 /* Recursive inlining never wants the master clone to
97 be overwritten. */
98 && update_original
99 /* FIXME: When address is taken of DECL_EXTERNAL function we still
100 can remove its offline copy, but we would need to keep unanalyzed
101 node in the callgraph so references can point to it. */
102 && !e->callee->address_taken
103 && cgraph_can_remove_if_no_direct_calls_p (e->callee)
104 /* Inlining might enable more devirtualizing, so we want to remove
105 those only after all devirtualizable virtual calls are processed.
106 Lacking may edges in callgraph we just preserve them post
107 inlining. */
108 && (!DECL_VIRTUAL_P (e->callee->decl)
109 || (!DECL_COMDAT (e->callee->decl)
110 && !DECL_EXTERNAL (e->callee->decl)))
111 /* Don't reuse if more than one function shares a comdat group.
112 If the other function(s) are needed, we need to emit even
113 this function out of line. */
114 && !e->callee->same_comdat_group
115 /* During early inlining some unanalyzed cgraph nodes might be in the
116 callgraph and they might reffer the function in question. */
117 && !cgraph_new_nodes)
118 {
119 gcc_assert (!e->callee->global.inlined_to);
120 if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
121 {
122 if (overall_size)
123 *overall_size -= inline_summary (e->callee)->size;
124 nfunctions_inlined++;
125 }
126 duplicate = false;
127 e->callee->local.externally_visible = false;
128 update_noncloned_frequencies (e->callee, e->frequency, e->loop_nest);
129 }
130 else
131 {
132 struct cgraph_node *n;
133 n = cgraph_clone_node (e->callee, e->callee->decl,
134 e->count, e->frequency, e->loop_nest,
135 update_original, NULL);
136 cgraph_redirect_edge_callee (e, n);
137 }
138 }
139
140 callee_info = inline_summary (e->callee);
141 caller_info = inline_summary (e->caller);
142
143 if (e->caller->global.inlined_to)
144 e->callee->global.inlined_to = e->caller->global.inlined_to;
145 else
146 e->callee->global.inlined_to = e->caller;
147 callee_info->stack_frame_offset
148 = caller_info->stack_frame_offset
149 + caller_info->estimated_self_stack_size;
150 peak = callee_info->stack_frame_offset
151 + callee_info->estimated_self_stack_size;
152 if (inline_summary (e->callee->global.inlined_to)->estimated_stack_size
153 < peak)
154 inline_summary (e->callee->global.inlined_to)->estimated_stack_size = peak;
155 cgraph_propagate_frequency (e->callee);
156
157 /* Recursively clone all bodies. */
158 for (e = e->callee->callees; e; e = e->next_callee)
159 if (!e->inline_failed)
160 clone_inlined_nodes (e, duplicate, update_original, overall_size);
161 }
162
163
164 /* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
165 specify whether profile of original function should be updated. If any new
166 indirect edges are discovered in the process, add them to NEW_EDGES, unless
167 it is NULL. Return true iff any new callgraph edges were discovered as a
168 result of inlining. */
169
170 bool
171 inline_call (struct cgraph_edge *e, bool update_original,
172 VEC (cgraph_edge_p, heap) **new_edges,
173 int *overall_size)
174 {
175 int old_size = 0, new_size = 0;
176 struct cgraph_node *to = NULL;
177 struct cgraph_edge *curr = e;
178 struct inline_summary *info;
179
180 /* Don't inline inlined edges. */
181 gcc_assert (e->inline_failed);
182 /* Don't even think of inlining inline clone. */
183 gcc_assert (!e->callee->global.inlined_to);
184
185 e->inline_failed = CIF_OK;
186 DECL_POSSIBLY_INLINED (e->callee->decl) = true;
187
188 clone_inlined_nodes (e, true, update_original, overall_size);
189
190 /* Now update size of caller and all functions caller is inlined into. */
191 for (;e && !e->inline_failed; e = e->caller->callers)
192 {
193 to = e->caller;
194 info = inline_summary (to);
195 old_size = info->size;
196 new_size = estimate_size_after_inlining (to, curr);
197 info->size = new_size;
198 info->time = estimate_time_after_inlining (to, curr);
199 }
200 gcc_assert (curr->callee->global.inlined_to == to);
201 if (overall_size && new_size > old_size)
202 *overall_size += new_size - old_size;
203 ncalls_inlined++;
204
205 if (flag_indirect_inlining && optimize)
206 return ipa_propagate_indirect_call_infos (curr, new_edges);
207 else
208 return false;
209 }
210
211
212 /* Copy function body of NODE and redirect all inline clones to it.
213 This is done before inline plan is applied to NODE when there are
214 still some inline clones if it.
215
216 This is neccesary because inline decisions are not really transitive
217 and the other inline clones may have different bodies. */
218
219 static struct cgraph_node *
220 save_inline_function_body (struct cgraph_node *node)
221 {
222 struct cgraph_node *first_clone, *n;
223
224 if (dump_file)
225 fprintf (dump_file, "\nSaving body of %s for later reuse\n",
226 cgraph_node_name (node));
227
228 gcc_assert (node == cgraph_get_node (node->decl));
229
230 /* first_clone will be turned into real function. */
231 first_clone = node->clones;
232 first_clone->decl = copy_node (node->decl);
233 cgraph_insert_node_to_hashtable (first_clone);
234 gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
235
236 /* Now reshape the clone tree, so all other clones descends from
237 first_clone. */
238 if (first_clone->next_sibling_clone)
239 {
240 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
241 n->clone_of = first_clone;
242 n->clone_of = first_clone;
243 n->next_sibling_clone = first_clone->clones;
244 if (first_clone->clones)
245 first_clone->clones->prev_sibling_clone = n;
246 first_clone->clones = first_clone->next_sibling_clone;
247 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
248 first_clone->next_sibling_clone = NULL;
249 gcc_assert (!first_clone->prev_sibling_clone);
250 }
251 first_clone->clone_of = NULL;
252
253 /* Now node in question has no clones. */
254 node->clones = NULL;
255
256 /* Inline clones share decl with the function they are cloned
257 from. Walk the whole clone tree and redirect them all to the
258 new decl. */
259 if (first_clone->clones)
260 for (n = first_clone->clones; n != first_clone;)
261 {
262 gcc_assert (n->decl == node->decl);
263 n->decl = first_clone->decl;
264 if (n->clones)
265 n = n->clones;
266 else if (n->next_sibling_clone)
267 n = n->next_sibling_clone;
268 else
269 {
270 while (n != first_clone && !n->next_sibling_clone)
271 n = n->clone_of;
272 if (n != first_clone)
273 n = n->next_sibling_clone;
274 }
275 }
276
277 /* Copy the OLD_VERSION_NODE function tree to the new version. */
278 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
279 NULL, NULL);
280
281 /* The function will be short lived and removed after we inline all the clones,
282 but make it internal so we won't confuse ourself. */
283 DECL_EXTERNAL (first_clone->decl) = 0;
284 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
285 TREE_PUBLIC (first_clone->decl) = 0;
286 DECL_COMDAT (first_clone->decl) = 0;
287 VEC_free (ipa_opt_pass, heap,
288 first_clone->ipa_transforms_to_apply);
289 first_clone->ipa_transforms_to_apply = NULL;
290
291 #ifdef ENABLE_CHECKING
292 verify_cgraph_node (first_clone);
293 #endif
294 return first_clone;
295 }
296
297
298 /* Apply inline plan to function. */
299
300 unsigned int
301 inline_transform (struct cgraph_node *node)
302 {
303 unsigned int todo = 0;
304 struct cgraph_edge *e;
305 bool inline_p = false;
306
307 /* FIXME: Currently the pass manager is adding inline transform more than
308 once to some clones. This needs revisiting after WPA cleanups. */
309 if (cfun->after_inlining)
310 return 0;
311
312 /* We might need the body of this function so that we can expand
313 it inline somewhere else. */
314 if (cgraph_preserve_function_body_p (node->decl))
315 save_inline_function_body (node);
316
317 for (e = node->callees; e; e = e->next_callee)
318 {
319 cgraph_redirect_edge_call_stmt_to_callee (e);
320 if (!e->inline_failed || warn_inline)
321 inline_p = true;
322 }
323
324 if (inline_p)
325 {
326 timevar_push (TV_INTEGRATION);
327 todo = optimize_inline_calls (current_function_decl);
328 timevar_pop (TV_INTEGRATION);
329 }
330 cfun->always_inline_functions_inlined = true;
331 cfun->after_inlining = true;
332 return todo | execute_fixup_cfg ();
333 }