optimize.c (inline_data): Remove fns_top.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2
3 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
4 Written by Mark Michell (mark@codesourcery.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "rtl.h"
28 #include "insn-config.h"
29 #include "integrate.h"
30 #include "varray.h"
31
32 /* To Do:
33
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
39 are not needed.
40
41 o Provide heuristics to clamp inlining of recursive template
42 calls?
43
44 o It looks like the return label is not being placed in the optimal
45 place. Shouldn't it come before the returned value? */
46
47 /* Data required for function inlining. */
48
49 typedef struct inline_data
50 {
51 /* A stack of the functions we are inlining. For example, if we are
52 compiling `f', which calls `g', which calls `h', and we are
53 inlining the body of `h', the stack will contain, `h', followed
54 by `g', followed by `f'. */
55 varray_type fns;
56 /* The last SCOPE_STMT we have encountered. */
57 tree scope_stmt;
58 /* The label to jump to when a return statement is encountered. */
59 tree ret_label;
60 /* The map from local declarations in the inlined function to
61 equivalents in the function into which it is being inlined. */
62 splay_tree decl_map;
63 /* Nonzero if we are currently within the cleanup for a
64 TARGET_EXPR. */
65 int in_target_cleanup_p;
66 } inline_data;
67
68 /* Prototypes. */
69
70 static tree initialize_inlined_parameters PROTO((inline_data *, tree));
71 static tree declare_return_variable PROTO((inline_data *, tree *));
72 static tree copy_body_r PROTO((tree *, int *, void *));
73 static tree copy_body PROTO((inline_data *));
74 static tree expand_call_inline PROTO((tree *, int *, void *));
75 static void expand_calls_inline PROTO((tree *, inline_data *));
76 static int inlinable_function_p PROTO((tree, inline_data *));
77 static tree remap_decl PROTO((tree, inline_data *));
78 static void remap_block PROTO((tree, tree, inline_data *));
79 static void copy_scope_stmt PROTO((tree *, int *, inline_data *));
80
81 /* Remap DECL during the copying of the BLOCK tree for the function.
82 DATA is really an `inline_data *'. */
83
84 static tree
85 remap_decl (decl, id)
86 tree decl;
87 inline_data *id;
88 {
89 splay_tree_node n;
90 tree fn;
91
92 /* We only remap local variables in the current function. */
93 fn = VARRAY_TOP_TREE (id->fns);
94 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
95 return NULL_TREE;
96
97 /* See if we have remapped this declaration. */
98 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
99 /* If we didn't already have an equivalent for this declaration,
100 create one now. */
101 if (!n)
102 {
103 tree t;
104
105 /* Make a copy of the variable or label. */
106 t = copy_decl_for_inlining (decl, fn,
107 VARRAY_TREE (id->fns, 0));
108 /* Remember it, so that if we encounter this local entity
109 again we can reuse this copy. */
110 n = splay_tree_insert (id->decl_map,
111 (splay_tree_key) decl,
112 (splay_tree_value) t);
113 }
114
115 return (tree) n->value;
116 }
117
118 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
119 remapped versions of the variables therein. And hook the new block
120 into the block-tree. If non-NULL, the DECLS are declarations to
121 add to use instead of the BLOCK_VARS in the old block. */
122
123 static void
124 remap_block (scope_stmt, decls, id)
125 tree scope_stmt;
126 tree decls;
127 inline_data *id;
128 {
129 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
130 not know whether or not expand_expr will actually write out the
131 code we put there. If it does not, then we'll have more BLOCKs
132 than block-notes, and things will go awry. At some point, we
133 should make the back-end handle BLOCK notes in a tidier way,
134 without requiring a strict correspondence to the block-tree; then
135 this check can go. */
136 if (id->in_target_cleanup_p)
137 {
138 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
139 return;
140 }
141
142 /* If this is the beginning of a scope, remap the associated BLOCK. */
143 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
144 {
145 tree old_block;
146 tree new_block;
147 tree old_var;
148
149 /* Make the new block. */
150 old_block = SCOPE_STMT_BLOCK (scope_stmt);
151 new_block = make_node (BLOCK);
152 TREE_USED (new_block) = TREE_USED (old_block);
153 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
154 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
155
156 /* Remap its variables. */
157 for (old_var = decls ? decls : BLOCK_VARS (old_block);
158 old_var;
159 old_var = TREE_CHAIN (old_var))
160 {
161 tree new_var;
162
163 /* Remap the variable. */
164 new_var = remap_decl (old_var, id);
165 if (!new_var)
166 /* We didn't remap this variable, so we can't mess with
167 its TREE_CHAIN. */
168 ;
169 else
170 {
171 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
172 BLOCK_VARS (new_block) = new_var;
173 }
174 }
175 /* We put the BLOCK_VARS in reverse order; fix that now. */
176 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
177 /* Graft the new block into the tree. */
178 insert_block_after_note (new_block,
179 (id->scope_stmt
180 ? SCOPE_STMT_BLOCK (id->scope_stmt)
181 : NULL_TREE),
182 (id->scope_stmt
183 ? SCOPE_BEGIN_P (id->scope_stmt) : 1),
184 VARRAY_TREE (id->fns, 0));
185 /* Remember that this is now the last scope statement with
186 an associated block. */
187 id->scope_stmt = scope_stmt;
188 /* Remember the remapped block. */
189 splay_tree_insert (id->decl_map,
190 (splay_tree_key) old_block,
191 (splay_tree_value) new_block);
192 }
193 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
194 remapped block. */
195 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
196 {
197 splay_tree_node n;
198
199 /* Find this block in the table of remapped things. */
200 n = splay_tree_lookup (id->decl_map,
201 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
202 my_friendly_assert (n != NULL, 19991203);
203 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
204
205 /* Remember that this is now the last scope statement with an
206 associated block. */
207 id->scope_stmt = scope_stmt;
208 }
209 }
210
211 /* Copy the SCOPE_STMT pointed to by TP. */
212
213 static void
214 copy_scope_stmt (tp, walk_subtrees, id)
215 tree *tp;
216 int *walk_subtrees;
217 inline_data *id;
218 {
219 tree block;
220
221 /* Remember whether or not this statement was nullified. When
222 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
223 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
224 deal with copying BLOCKs if they do not wish to do so. */
225 block = SCOPE_STMT_BLOCK (*tp);
226 /* Copy (and replace) the statement. */
227 copy_tree_r (tp, walk_subtrees, NULL);
228 /* Restore the SCOPE_STMT_BLOCK. */
229 SCOPE_STMT_BLOCK (*tp) = block;
230
231 /* Remap the associated block. */
232 remap_block (*tp, NULL_TREE, id);
233 }
234
235 /* Called from copy_body via walk_tree. DATA is really an
236 `inline_data *'. */
237
238 static tree
239 copy_body_r (tp, walk_subtrees, data)
240 tree *tp;
241 int *walk_subtrees;
242 void *data;
243 {
244 inline_data* id;
245 tree fn;
246
247 /* Set up. */
248 id = (inline_data *) data;
249 fn = VARRAY_TOP_TREE (id->fns);
250
251 /* All automatic variables should have a DECL_CONTEXT indicating
252 what function they come from. */
253 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
254 && DECL_NAMESPACE_SCOPE_P (*tp))
255 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
256 19991113);
257
258 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
259 GOTO_STMT with the RET_LABEL as its target. */
260 if (TREE_CODE (*tp) == RETURN_STMT)
261 {
262 tree return_stmt = *tp;
263 tree goto_stmt;
264
265 /* Build the GOTO_STMT. */
266 goto_stmt = build_min_nt (GOTO_STMT, id->ret_label);
267 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
268
269 /* If we're returning something, just turn that into an
270 assignment into the equivalent of the original
271 RESULT_DECL. */
272 if (RETURN_EXPR (return_stmt))
273 {
274 *tp = build_min_nt (EXPR_STMT,
275 RETURN_EXPR (return_stmt));
276 /* And then jump to the end of the function. */
277 TREE_CHAIN (*tp) = goto_stmt;
278 }
279 /* If we're not returning anything just do the jump. */
280 else
281 *tp = goto_stmt;
282 }
283 /* Local variables and labels need to be replaced by equivalent
284 variables. We don't want to copy static variables; there's only
285 one of those, no matter how many times we inline the containing
286 function. */
287 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
288 {
289 tree new_decl;
290
291 /* Remap the declaration. */
292 new_decl = remap_decl (*tp, id);
293 my_friendly_assert (new_decl != NULL_TREE, 19991203);
294 /* Replace this variable with the copy. */
295 *tp = new_decl;
296 }
297 else if (TREE_CODE (*tp) == SAVE_EXPR)
298 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0));
299 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
300 my_friendly_abort (19991113);
301 /* For a SCOPE_STMT, we must copy the associated block so that we
302 can write out debugging information for the inlined variables. */
303 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
304 copy_scope_stmt (tp, walk_subtrees, id);
305 /* Otherwise, just copy the node. Note that copy_tree_r already
306 knows not to copy VAR_DECLs, etc., so this is safe. */
307 else
308 {
309 copy_tree_r (tp, walk_subtrees, NULL);
310
311 /* The copied TARGET_EXPR has never been expanded, even if the
312 original node was expanded already. */
313 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
314 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
315 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
316 result is no longer valid. */
317 else if (TREE_CODE (*tp) == CALL_EXPR)
318 CALL_EXPR_RTL (*tp) = NULL_RTX;
319 }
320
321 /* Keep iterating. */
322 return NULL_TREE;
323 }
324
325 /* Make a copy of the body of FN so that it can be inserted inline in
326 another function. */
327
328 static tree
329 copy_body (id)
330 inline_data *id;
331 {
332 tree body;
333
334 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
335 walk_tree (&body, copy_body_r, id);
336
337 return body;
338 }
339
340 /* Generate code to initialize the parameters of the function at the
341 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
342
343 static tree
344 initialize_inlined_parameters (id, args)
345 inline_data *id;
346 tree args;
347 {
348 tree fn;
349 tree init_stmts;
350 tree parms;
351 tree a;
352 tree p;
353
354 /* Figure out what the parameters are. */
355 fn = VARRAY_TOP_TREE (id->fns);
356 parms = DECL_ARGUMENTS (fn);
357
358 /* Start with no initializations whatsoever. */
359 init_stmts = NULL_TREE;
360
361 /* Loop through the parameter declarations, replacing each with an
362 equivalent VAR_DECL, appropriately initialized. */
363 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
364 {
365 tree init_stmt;
366 tree var;
367
368 /* Make an equivalent VAR_DECL. */
369 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
370 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
371 that way, when the PARM_DECL is encountered, it will be
372 automatically replaced by the VAR_DECL. */
373 splay_tree_insert (id->decl_map,
374 (splay_tree_key) p,
375 (splay_tree_value) var);
376 /* Initialize this VAR_DECL from the equivalent argument. If
377 the argument is an object, created via a constructor or copy,
378 this will not result in an extra copy: the TARGET_EXPR
379 representing the argument will be bound to VAR, and the
380 object will be constructed in VAR. */
381 init_stmt = build_min_nt (EXPR_STMT,
382 build (INIT_EXPR, TREE_TYPE (p),
383 var, TREE_VALUE (a)));
384 /* Declare this new variable. Note that we do this *after* the
385 initialization because we are going to reverse all the
386 initialization statements below. */
387 TREE_CHAIN (init_stmt) = build_min_nt (DECL_STMT, var);
388 /* Add this initialization to the list. */
389 TREE_CHAIN (TREE_CHAIN (init_stmt)) = init_stmts;
390 init_stmts = init_stmt;
391 }
392
393 /* The initialization statements have been built up in reverse
394 order. Straighten them out now. */
395 return nreverse (init_stmts);
396 }
397
398 /* Declare a return variable to replace the RESULT_DECL for the
399 function we are calling. An appropriate DECL_STMT is returned.
400 The USE_STMT is filled in to contain a use of the declaration to
401 indicate the return value of the function. */
402
403 static tree
404 declare_return_variable (id, use_stmt)
405 struct inline_data *id;
406 tree *use_stmt;
407 {
408 tree fn = VARRAY_TOP_TREE (id->fns);
409 tree result = DECL_RESULT (fn);
410 tree var;
411
412 /* We don't need to do anything for functions that don't return
413 anything. */
414 if (!result || same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result)),
415 void_type_node))
416 {
417 *use_stmt = NULL_TREE;
418 return NULL_TREE;
419 }
420
421 /* Make an appropriate copy. */
422 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
423 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
424 way, when the RESULT_DECL is encountered, it will be
425 automatically replaced by the VAR_DECL. */
426 splay_tree_insert (id->decl_map,
427 (splay_tree_key) result,
428 (splay_tree_value) var);
429
430 /* Build the USE_STMT. */
431 *use_stmt = build_min_nt (EXPR_STMT, var);
432
433 /* Build the declaration statement. */
434 return build_min_nt (DECL_STMT, var);
435 }
436
437 /* Returns non-zero if FN is a function that can be inlined. */
438
439 static int
440 inlinable_function_p (fn, id)
441 tree fn;
442 inline_data *id;
443 {
444 int inlinable;
445
446 /* If we've already decided this function shouldn't be inlined,
447 there's no need to check again. */
448 if (DECL_UNINLINABLE (fn))
449 return 0;
450
451 /* Assume it is not inlinable. */
452 inlinable = 0;
453
454 /* If we're not inlining things, then nothing is inlinable. */
455 if (!flag_inline_trees)
456 ;
457 /* If the function was not declared `inline', then we don't inline
458 it. */
459 else if (!DECL_INLINE (fn))
460 ;
461 /* If we don't have the function body available, we can't inline
462 it. */
463 else if (!DECL_SAVED_TREE (fn))
464 ;
465 /* We can't inline varargs functions. */
466 else if (varargs_function_p (fn))
467 ;
468 /* All is well. We can inline this function. Traditionally, GCC
469 has refused to inline functions using setjmp or alloca, or
470 functions whose values are returned in a PARALLEL, and a few
471 other such obscure conditions. We are not equally constrained at
472 the tree level. */
473 else
474 inlinable = 1;
475
476 /* Squirrel away the result so that we don't have to check again. */
477 DECL_UNINLINABLE (fn) = !inlinable;
478
479 /* Don't do recursive inlining, either. We don't record this in
480 DECL_UNLINABLE; we may be able to inline this function later. */
481 if (inlinable)
482 {
483 size_t i;
484
485 for (i = 0; i < id->fns->elements_used; ++i)
486 if (VARRAY_TREE (id->fns, i) == fn)
487 inlinable = 0;
488 }
489
490 /* We can inline a template instantiation only if its fully
491 instantiated. */
492 if (inlinable
493 && DECL_TEMPLATE_INFO (fn)
494 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
495 {
496 fn = instantiate_decl (fn);
497 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
498 }
499
500 /* Return the result. */
501 return inlinable;
502 }
503
504 /* If *TP is CALL_EXPR, replace it with its inline expansion. */
505
506 static tree
507 expand_call_inline (tp, walk_subtrees, data)
508 tree *tp;
509 int *walk_subtrees;
510 void *data;
511 {
512 inline_data *id;
513 tree t;
514 tree expr;
515 tree chain;
516 tree fn;
517 tree scope_stmt;
518 tree use_stmt;
519 splay_tree st;
520
521 /* See what we've got. */
522 id = (inline_data *) data;
523 t = *tp;
524
525 /* Keep track of the last SCOPE_STMT we've seen. */
526 if (TREE_CODE (t) == SCOPE_STMT)
527 {
528 if (SCOPE_STMT_BLOCK (t) && !id->in_target_cleanup_p)
529 id->scope_stmt = t;
530 return NULL_TREE;
531 }
532
533 /* Recurse, but letting recursive invocations know that we are
534 inside the body of a TARGET_EXPR. */
535 if (TREE_CODE (*tp) == TARGET_EXPR)
536 {
537 int i;
538
539 /* We're walking our own subtrees. */
540 *walk_subtrees = 0;
541
542 /* Actually walk over them. This loop is the body of
543 walk_trees, omitting the case where the TARGET_EXPR
544 itself is handled. */
545 for (i = first_rtl_op (TARGET_EXPR) - 1; i >= 0; --i)
546 {
547 if (i == 2)
548 ++id->in_target_cleanup_p;
549 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data);
550 if (i == 2)
551 --id->in_target_cleanup_p;
552 }
553
554 return NULL_TREE;
555 }
556
557 /* From here on, we're only interested in CALL_EXPRs. */
558 if (TREE_CODE (t) != CALL_EXPR)
559 return NULL_TREE;
560
561 /* First, see if we can figure out what function is being called.
562 If we cannot, then there is no hope of inlining the function. */
563 fn = get_callee_fndecl (t);
564 if (!fn)
565 return NULL_TREE;
566
567 /* Don't try to inline functions that are not well-suited to
568 inlining. */
569 if (!inlinable_function_p (fn, id))
570 return NULL_TREE;
571
572 /* Return statements in the function body will be replaced by jumps
573 to the RET_LABEL. */
574 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
575 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
576
577 /* Build a statement-expression containing code to initialize the
578 arguments, the actual inline expansion of the body, and a label
579 for the return statements within the function to jump to. The
580 type of the statement expression is the return type of the
581 function call. */
582 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
583
584 /* Record the function we are about to inline so that we can avoid
585 recursing into it. */
586 VARRAY_PUSH_TREE (id->fns, fn);
587
588 /* Local declarations will be replaced by their equivalents in this
589 map. */
590 st = id->decl_map;
591 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
592 NULL, NULL);
593
594 /* Initialize the parameters. */
595 STMT_EXPR_STMT (expr)
596 = initialize_inlined_parameters (id, TREE_OPERAND (t, 1));
597
598 /* Create a block to put the parameters in. We have to do this
599 after the parameters have been remapped because remapping
600 parameters is different from remapping ordinary variables. */
601 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
602 SCOPE_BEGIN_P (scope_stmt) = 1;
603 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
604 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
605 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
606 STMT_EXPR_STMT (expr) = scope_stmt;
607 id->scope_stmt = scope_stmt;
608
609 /* Declare the return variable for the function. */
610 STMT_EXPR_STMT (expr)
611 = chainon (STMT_EXPR_STMT (expr),
612 declare_return_variable (id, &use_stmt));
613
614 /* After we've initialized the parameters, we insert the body of the
615 function itself. */
616 STMT_EXPR_STMT (expr)
617 = chainon (STMT_EXPR_STMT (expr), copy_body (id));
618
619 /* Close the block for the parameters. */
620 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
621 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
622 my_friendly_assert (DECL_INITIAL (fn)
623 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
624 19991203);
625 remap_block (scope_stmt, NULL_TREE, id);
626 STMT_EXPR_STMT (expr)
627 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
628
629 /* Finally, mention the returned value so that the value of the
630 statement-expression is the returned value of the function. */
631 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
632
633 /* Clean up. */
634 splay_tree_delete (id->decl_map);
635 id->decl_map = st;
636
637 /* After the body of the function comes the RET_LABEL. */
638 STMT_EXPR_STMT (expr)
639 = chainon (STMT_EXPR_STMT (expr),
640 build_min_nt (LABEL_STMT, id->ret_label));
641
642 /* The new expression has side-effects if the old one did. */
643 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
644 /* If the value of the new expression is ignored, that's OK. We
645 don't warn about this for CALL_EXPRs, so we shouldn't warn about
646 the equivalent inlined version either. */
647 TREE_USED (expr) = 1;
648
649 /* Replace the call by the inlined body. */
650 chain = TREE_CHAIN (*tp);
651 *tp = expr;
652 TREE_CHAIN (expr) = chain;
653
654 /* Recurse into the body of the just inlined function. */
655 expand_calls_inline (tp, id);
656 VARRAY_POP (id->fns);
657
658 /* Don't walk into subtrees. We've already handled them above. */
659 *walk_subtrees = 0;
660
661 /* Keep iterating. */
662 return NULL_TREE;
663 }
664
665 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
666 expansions as appropriate. */
667
668 static void
669 expand_calls_inline (tp, id)
670 tree *tp;
671 inline_data *id;
672 {
673 /* Search through *TP, replacing all calls to inline functions by
674 appropriate equivalents. */
675 walk_tree (tp, expand_call_inline, id);
676 }
677
678 /* Optimize the body of FN. */
679
680 void
681 optimize_function (fn)
682 tree fn;
683 {
684 /* Expand calls to inline functions. */
685 if (flag_inline_trees)
686 {
687 inline_data id;
688 tree prev_fn;
689 struct saved_scope *s;
690
691 /* Clear out ID. */
692 bzero (&id, sizeof (id));
693
694 /* Don't allow recursion into FN. */
695 VARRAY_TREE_INIT (id.fns, 32, "fns");
696 VARRAY_PUSH_TREE (id.fns, fn);
697 /* Or any functions that aren't finished yet. */
698 prev_fn = NULL_TREE;
699 if (current_function_decl)
700 {
701 VARRAY_PUSH_TREE (id.fns, current_function_decl);
702 prev_fn = current_function_decl;
703 }
704 for (s = scope_chain; s; s = s->prev)
705 if (s->function_decl && s->function_decl != prev_fn)
706 {
707 VARRAY_PUSH_TREE (id.fns, s->function_decl);
708 prev_fn = s->function_decl;
709 }
710 /* Replace all calls to inline functions with the bodies of those
711 functions. */
712 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
713
714 /* Clean up. */
715 VARRAY_FREE (id.fns);
716 }
717 }