function.c (retrofit_block): Abort if we don't find a suitable insn.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2
3 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
4 Written by Mark Michell (mark@codesourcery.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "rtl.h"
28 #include "insn-config.h"
29 #include "integrate.h"
30 #include "varray.h"
31
32 /* To Do:
33
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
39 are not needed.
40
41 o Provide heuristics to clamp inlining of recursive template
42 calls?
43
44 o It looks like the return label is not being placed in the optimal
45 place. Shouldn't it come before the returned value? */
46
47 /* Data required for function inlining. */
48
49 typedef struct inline_data
50 {
51 /* A stack of the functions we are inlining. For example, if we are
52 compiling `f', which calls `g', which calls `h', and we are
53 inlining the body of `h', the stack will contain, `h', followed
54 by `g', followed by `f'. */
55 varray_type fns;
56 /* The last SCOPE_STMT we have encountered. */
57 tree scope_stmt;
58 /* The label to jump to when a return statement is encountered. */
59 tree ret_label;
60 /* The map from local declarations in the inlined function to
61 equivalents in the function into which it is being inlined. */
62 splay_tree decl_map;
63 /* Nonzero if we are currently within the cleanup for a
64 TARGET_EXPR. */
65 int in_target_cleanup_p;
66 } inline_data;
67
68 /* Prototypes. */
69
70 static tree initialize_inlined_parameters PROTO((inline_data *, tree, tree));
71 static tree declare_return_variable PROTO((inline_data *, tree *));
72 static tree copy_body_r PROTO((tree *, int *, void *));
73 static tree copy_body PROTO((inline_data *));
74 static tree expand_call_inline PROTO((tree *, int *, void *));
75 static void expand_calls_inline PROTO((tree *, inline_data *));
76 static int inlinable_function_p PROTO((tree, inline_data *));
77 static tree remap_decl PROTO((tree, inline_data *));
78 static void remap_block PROTO((tree, tree, inline_data *));
79 static void copy_scope_stmt PROTO((tree *, int *, inline_data *));
80 static tree calls_setjmp_r PROTO((tree *, int *, void *));
81
82 /* Remap DECL during the copying of the BLOCK tree for the function.
83 DATA is really an `inline_data *'. */
84
85 static tree
86 remap_decl (decl, id)
87 tree decl;
88 inline_data *id;
89 {
90 splay_tree_node n;
91 tree fn;
92
93 /* We only remap local variables in the current function. */
94 fn = VARRAY_TOP_TREE (id->fns);
95 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
96 return NULL_TREE;
97
98 /* See if we have remapped this declaration. */
99 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
100 /* If we didn't already have an equivalent for this declaration,
101 create one now. */
102 if (!n)
103 {
104 tree t;
105
106 /* Make a copy of the variable or label. */
107 t = copy_decl_for_inlining (decl, fn,
108 VARRAY_TREE (id->fns, 0));
109 /* Remember it, so that if we encounter this local entity
110 again we can reuse this copy. */
111 n = splay_tree_insert (id->decl_map,
112 (splay_tree_key) decl,
113 (splay_tree_value) t);
114 }
115
116 return (tree) n->value;
117 }
118
119 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
120 remapped versions of the variables therein. And hook the new block
121 into the block-tree. If non-NULL, the DECLS are declarations to
122 add to use instead of the BLOCK_VARS in the old block. */
123
124 static void
125 remap_block (scope_stmt, decls, id)
126 tree scope_stmt;
127 tree decls;
128 inline_data *id;
129 {
130 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
131 not know whether or not expand_expr will actually write out the
132 code we put there. If it does not, then we'll have more BLOCKs
133 than block-notes, and things will go awry. At some point, we
134 should make the back-end handle BLOCK notes in a tidier way,
135 without requiring a strict correspondence to the block-tree; then
136 this check can go. */
137 if (id->in_target_cleanup_p)
138 {
139 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
140 return;
141 }
142
143 /* If this is the beginning of a scope, remap the associated BLOCK. */
144 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
145 {
146 tree old_block;
147 tree new_block;
148 tree old_var;
149
150 /* Make the new block. */
151 old_block = SCOPE_STMT_BLOCK (scope_stmt);
152 new_block = make_node (BLOCK);
153 TREE_USED (new_block) = TREE_USED (old_block);
154 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
155 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
156
157 /* Remap its variables. */
158 for (old_var = decls ? decls : BLOCK_VARS (old_block);
159 old_var;
160 old_var = TREE_CHAIN (old_var))
161 {
162 tree new_var;
163
164 /* Remap the variable. */
165 new_var = remap_decl (old_var, id);
166 if (!new_var)
167 /* We didn't remap this variable, so we can't mess with
168 its TREE_CHAIN. */
169 ;
170 else
171 {
172 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
173 BLOCK_VARS (new_block) = new_var;
174 }
175 }
176 /* We put the BLOCK_VARS in reverse order; fix that now. */
177 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
178 /* Graft the new block into the tree. */
179 insert_block_after_note (new_block,
180 SCOPE_STMT_BLOCK (id->scope_stmt),
181 SCOPE_BEGIN_P (id->scope_stmt));
182 /* Remember that this is now the last scope statement with
183 an associated block. */
184 id->scope_stmt = scope_stmt;
185 /* Remember the remapped block. */
186 splay_tree_insert (id->decl_map,
187 (splay_tree_key) old_block,
188 (splay_tree_value) new_block);
189 }
190 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
191 remapped block. */
192 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
193 {
194 splay_tree_node n;
195
196 /* Find this block in the table of remapped things. */
197 n = splay_tree_lookup (id->decl_map,
198 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
199 my_friendly_assert (n != NULL, 19991203);
200 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
201
202 /* Remember that this is now the last scope statement with an
203 associated block. */
204 id->scope_stmt = scope_stmt;
205 }
206 }
207
208 /* Copy the SCOPE_STMT pointed to by TP. */
209
210 static void
211 copy_scope_stmt (tp, walk_subtrees, id)
212 tree *tp;
213 int *walk_subtrees;
214 inline_data *id;
215 {
216 tree block;
217
218 /* Remember whether or not this statement was nullified. When
219 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
220 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
221 deal with copying BLOCKs if they do not wish to do so. */
222 block = SCOPE_STMT_BLOCK (*tp);
223 /* Copy (and replace) the statement. */
224 copy_tree_r (tp, walk_subtrees, NULL);
225 /* Restore the SCOPE_STMT_BLOCK. */
226 SCOPE_STMT_BLOCK (*tp) = block;
227
228 /* Remap the associated block. */
229 remap_block (*tp, NULL_TREE, id);
230 }
231
232 /* Called from copy_body via walk_tree. DATA is really an
233 `inline_data *'. */
234
235 static tree
236 copy_body_r (tp, walk_subtrees, data)
237 tree *tp;
238 int *walk_subtrees;
239 void *data;
240 {
241 inline_data* id;
242 tree fn;
243
244 /* Set up. */
245 id = (inline_data *) data;
246 fn = VARRAY_TOP_TREE (id->fns);
247
248 /* All automatic variables should have a DECL_CONTEXT indicating
249 what function they come from. */
250 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
251 && DECL_NAMESPACE_SCOPE_P (*tp))
252 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
253 19991113);
254
255 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
256 GOTO_STMT with the RET_LABEL as its target. */
257 if (TREE_CODE (*tp) == RETURN_STMT)
258 {
259 tree return_stmt = *tp;
260 tree goto_stmt;
261
262 /* Build the GOTO_STMT. */
263 goto_stmt = build_min_nt (GOTO_STMT, id->ret_label);
264 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
265
266 /* If we're returning something, just turn that into an
267 assignment into the equivalent of the original
268 RESULT_DECL. */
269 if (RETURN_EXPR (return_stmt))
270 {
271 *tp = build_min_nt (EXPR_STMT,
272 RETURN_EXPR (return_stmt));
273 /* And then jump to the end of the function. */
274 TREE_CHAIN (*tp) = goto_stmt;
275 }
276 /* If we're not returning anything just do the jump. */
277 else
278 *tp = goto_stmt;
279 }
280 /* Local variables and labels need to be replaced by equivalent
281 variables. We don't want to copy static variables; there's only
282 one of those, no matter how many times we inline the containing
283 function. */
284 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
285 {
286 tree new_decl;
287
288 /* Remap the declaration. */
289 new_decl = remap_decl (*tp, id);
290 my_friendly_assert (new_decl != NULL_TREE, 19991203);
291 /* Replace this variable with the copy. */
292 *tp = new_decl;
293 }
294 else if (TREE_CODE (*tp) == SAVE_EXPR)
295 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
296 walk_subtrees);
297 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
298 my_friendly_abort (19991113);
299 /* For a SCOPE_STMT, we must copy the associated block so that we
300 can write out debugging information for the inlined variables. */
301 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
302 copy_scope_stmt (tp, walk_subtrees, id);
303 /* Otherwise, just copy the node. Note that copy_tree_r already
304 knows not to copy VAR_DECLs, etc., so this is safe. */
305 else
306 {
307 copy_tree_r (tp, walk_subtrees, NULL);
308
309 /* The copied TARGET_EXPR has never been expanded, even if the
310 original node was expanded already. */
311 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
312 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
313 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
314 result is no longer valid. */
315 else if (TREE_CODE (*tp) == CALL_EXPR)
316 CALL_EXPR_RTL (*tp) = NULL_RTX;
317 }
318
319 /* Keep iterating. */
320 return NULL_TREE;
321 }
322
323 /* Make a copy of the body of FN so that it can be inserted inline in
324 another function. */
325
326 static tree
327 copy_body (id)
328 inline_data *id;
329 {
330 tree body;
331
332 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
333 walk_tree (&body, copy_body_r, id);
334
335 return body;
336 }
337
338 /* Generate code to initialize the parameters of the function at the
339 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
340
341 static tree
342 initialize_inlined_parameters (id, args, fn)
343 inline_data *id;
344 tree args;
345 tree fn;
346 {
347 tree init_stmts;
348 tree parms;
349 tree a;
350 tree p;
351
352 /* Figure out what the parameters are. */
353 parms = DECL_ARGUMENTS (fn);
354
355 /* Start with no initializations whatsoever. */
356 init_stmts = NULL_TREE;
357
358 /* Loop through the parameter declarations, replacing each with an
359 equivalent VAR_DECL, appropriately initialized. */
360 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
361 {
362 tree init_stmt;
363 tree var;
364
365 /* Make an equivalent VAR_DECL. */
366 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
367 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
368 that way, when the PARM_DECL is encountered, it will be
369 automatically replaced by the VAR_DECL. */
370 splay_tree_insert (id->decl_map,
371 (splay_tree_key) p,
372 (splay_tree_value) var);
373 /* Initialize this VAR_DECL from the equivalent argument. If
374 the argument is an object, created via a constructor or copy,
375 this will not result in an extra copy: the TARGET_EXPR
376 representing the argument will be bound to VAR, and the
377 object will be constructed in VAR. */
378 init_stmt = build_min_nt (EXPR_STMT,
379 build (INIT_EXPR, TREE_TYPE (p),
380 var, TREE_VALUE (a)));
381 /* Declare this new variable. Note that we do this *after* the
382 initialization because we are going to reverse all the
383 initialization statements below. */
384 TREE_CHAIN (init_stmt) = build_min_nt (DECL_STMT, var);
385 /* Add this initialization to the list. */
386 TREE_CHAIN (TREE_CHAIN (init_stmt)) = init_stmts;
387 init_stmts = init_stmt;
388 }
389
390 /* The initialization statements have been built up in reverse
391 order. Straighten them out now. */
392 return nreverse (init_stmts);
393 }
394
395 /* Declare a return variable to replace the RESULT_DECL for the
396 function we are calling. An appropriate DECL_STMT is returned.
397 The USE_STMT is filled in to contain a use of the declaration to
398 indicate the return value of the function. */
399
400 static tree
401 declare_return_variable (id, use_stmt)
402 struct inline_data *id;
403 tree *use_stmt;
404 {
405 tree fn = VARRAY_TOP_TREE (id->fns);
406 tree result = DECL_RESULT (fn);
407 tree var;
408
409 /* We don't need to do anything for functions that don't return
410 anything. */
411 if (!result || same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result)),
412 void_type_node))
413 {
414 *use_stmt = NULL_TREE;
415 return NULL_TREE;
416 }
417
418 /* Make an appropriate copy. */
419 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
420 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
421 way, when the RESULT_DECL is encountered, it will be
422 automatically replaced by the VAR_DECL. */
423 splay_tree_insert (id->decl_map,
424 (splay_tree_key) result,
425 (splay_tree_value) var);
426
427 /* Build the USE_STMT. */
428 *use_stmt = build_min_nt (EXPR_STMT, var);
429
430 /* Build the declaration statement. */
431 return build_min_nt (DECL_STMT, var);
432 }
433
434 /* Returns non-zero if FN is a function that can be inlined. */
435
436 static int
437 inlinable_function_p (fn, id)
438 tree fn;
439 inline_data *id;
440 {
441 int inlinable;
442
443 /* If we've already decided this function shouldn't be inlined,
444 there's no need to check again. */
445 if (DECL_UNINLINABLE (fn))
446 return 0;
447
448 /* Assume it is not inlinable. */
449 inlinable = 0;
450
451 /* If we're not inlining things, then nothing is inlinable. */
452 if (!flag_inline_trees)
453 ;
454 /* If the function was not declared `inline', then we don't inline
455 it. */
456 else if (!DECL_INLINE (fn))
457 ;
458 /* If we don't have the function body available, we can't inline
459 it. */
460 else if (!DECL_SAVED_TREE (fn))
461 ;
462 /* We can't inline varargs functions. */
463 else if (varargs_function_p (fn))
464 ;
465 /* All is well. We can inline this function. Traditionally, GCC
466 has refused to inline functions using setjmp or alloca, or
467 functions whose values are returned in a PARALLEL, and a few
468 other such obscure conditions. We are not equally constrained at
469 the tree level. */
470 else
471 inlinable = 1;
472
473 /* Squirrel away the result so that we don't have to check again. */
474 DECL_UNINLINABLE (fn) = !inlinable;
475
476 /* Don't do recursive inlining, either. We don't record this in
477 DECL_UNLINABLE; we may be able to inline this function later. */
478 if (inlinable)
479 {
480 size_t i;
481
482 for (i = 0; i < id->fns->elements_used; ++i)
483 if (VARRAY_TREE (id->fns, i) == fn)
484 inlinable = 0;
485 }
486
487 /* We can inline a template instantiation only if it's fully
488 instantiated. */
489 if (inlinable
490 && DECL_TEMPLATE_INFO (fn)
491 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
492 {
493 fn = instantiate_decl (fn);
494 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
495 }
496
497 /* Return the result. */
498 return inlinable;
499 }
500
501 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
502
503 static tree
504 expand_call_inline (tp, walk_subtrees, data)
505 tree *tp;
506 int *walk_subtrees;
507 void *data;
508 {
509 inline_data *id;
510 tree t;
511 tree expr;
512 tree chain;
513 tree fn;
514 tree scope_stmt;
515 tree use_stmt;
516 tree arg_inits;
517 splay_tree st;
518
519 /* See what we've got. */
520 id = (inline_data *) data;
521 t = *tp;
522
523 /* Keep track of the last SCOPE_STMT we've seen. */
524 if (TREE_CODE (t) == SCOPE_STMT)
525 {
526 if (SCOPE_STMT_BLOCK (t) && !id->in_target_cleanup_p)
527 id->scope_stmt = t;
528 return NULL_TREE;
529 }
530
531 /* Recurse, but letting recursive invocations know that we are
532 inside the body of a TARGET_EXPR. */
533 if (TREE_CODE (*tp) == TARGET_EXPR)
534 {
535 int i, len = first_rtl_op (TARGET_EXPR);
536
537 /* We're walking our own subtrees. */
538 *walk_subtrees = 0;
539
540 /* Actually walk over them. This loop is the body of
541 walk_trees, omitting the case where the TARGET_EXPR
542 itself is handled. */
543 for (i = 0; i < len; ++i)
544 {
545 if (i == 2)
546 ++id->in_target_cleanup_p;
547 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data);
548 if (i == 2)
549 --id->in_target_cleanup_p;
550 }
551
552 return NULL_TREE;
553 }
554
555 /* From here on, we're only interested in CALL_EXPRs. */
556 if (TREE_CODE (t) != CALL_EXPR)
557 return NULL_TREE;
558
559 /* First, see if we can figure out what function is being called.
560 If we cannot, then there is no hope of inlining the function. */
561 fn = get_callee_fndecl (t);
562 if (!fn)
563 return NULL_TREE;
564
565 /* Don't try to inline functions that are not well-suited to
566 inlining. */
567 if (!inlinable_function_p (fn, id))
568 return NULL_TREE;
569
570 /* Build a statement-expression containing code to initialize the
571 arguments, the actual inline expansion of the body, and a label
572 for the return statements within the function to jump to. The
573 type of the statement expression is the return type of the
574 function call. */
575 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
576
577 /* Local declarations will be replaced by their equivalents in this
578 map. */
579 st = id->decl_map;
580 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
581 NULL, NULL);
582
583 /* Initialize the parameters. */
584 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
585 /* Expand any inlined calls in the initializers. Do this before we
586 push FN on the stack of functions we are inlining; we want to
587 inline calls to FN that appear in the initializers for the
588 parameters. */
589 expand_calls_inline (&arg_inits, id);
590 /* And add them to the tree. */
591 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
592
593 /* Record the function we are about to inline so that we can avoid
594 recursing into it. */
595 VARRAY_PUSH_TREE (id->fns, fn);
596
597 /* Return statements in the function body will be replaced by jumps
598 to the RET_LABEL. */
599 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
600 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
601
602 /* Create a block to put the parameters in. We have to do this
603 after the parameters have been remapped because remapping
604 parameters is different from remapping ordinary variables. */
605 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
606 SCOPE_BEGIN_P (scope_stmt) = 1;
607 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
608 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
609 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
610 STMT_EXPR_STMT (expr) = scope_stmt;
611 id->scope_stmt = scope_stmt;
612
613 /* Tell the debugging backends that this block represents the
614 outermost scope of the inlined function. FIXME what to do for
615 inlines in cleanups? */
616 if (SCOPE_STMT_BLOCK (scope_stmt))
617 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
618
619 /* Declare the return variable for the function. */
620 STMT_EXPR_STMT (expr)
621 = chainon (STMT_EXPR_STMT (expr),
622 declare_return_variable (id, &use_stmt));
623
624 /* After we've initialized the parameters, we insert the body of the
625 function itself. */
626 STMT_EXPR_STMT (expr)
627 = chainon (STMT_EXPR_STMT (expr), copy_body (id));
628
629 /* Close the block for the parameters. */
630 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
631 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
632 my_friendly_assert (DECL_INITIAL (fn)
633 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
634 19991203);
635 remap_block (scope_stmt, NULL_TREE, id);
636 STMT_EXPR_STMT (expr)
637 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
638
639 /* Finally, mention the returned value so that the value of the
640 statement-expression is the returned value of the function. */
641 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
642
643 /* Clean up. */
644 splay_tree_delete (id->decl_map);
645 id->decl_map = st;
646
647 /* After the body of the function comes the RET_LABEL. */
648 STMT_EXPR_STMT (expr)
649 = chainon (STMT_EXPR_STMT (expr),
650 build_min_nt (LABEL_STMT, id->ret_label));
651
652 /* The new expression has side-effects if the old one did. */
653 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
654
655 /* Replace the call by the inlined body. Wrap it in an
656 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
657 pointing to the right place. */
658 chain = TREE_CHAIN (*tp);
659 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
660 /*col=*/0);
661 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
662 TREE_CHAIN (*tp) = chain;
663
664 /* If the value of the new expression is ignored, that's OK. We
665 don't warn about this for CALL_EXPRs, so we shouldn't warn about
666 the equivalent inlined version either. */
667 TREE_USED (*tp) = 1;
668
669 /* Recurse into the body of the just inlined function. */
670 expand_calls_inline (tp, id);
671 VARRAY_POP (id->fns);
672
673 /* Don't walk into subtrees. We've already handled them above. */
674 *walk_subtrees = 0;
675
676 /* Keep iterating. */
677 return NULL_TREE;
678 }
679
680 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
681 expansions as appropriate. */
682
683 static void
684 expand_calls_inline (tp, id)
685 tree *tp;
686 inline_data *id;
687 {
688 /* Search through *TP, replacing all calls to inline functions by
689 appropriate equivalents. */
690 walk_tree (tp, expand_call_inline, id);
691 }
692
693 /* Optimize the body of FN. */
694
695 void
696 optimize_function (fn)
697 tree fn;
698 {
699 /* Expand calls to inline functions. */
700 if (flag_inline_trees)
701 {
702 inline_data id;
703 tree prev_fn;
704 struct saved_scope *s;
705
706 /* Clear out ID. */
707 memset (&id, 0, sizeof (id));
708
709 /* Don't allow recursion into FN. */
710 VARRAY_TREE_INIT (id.fns, 32, "fns");
711 VARRAY_PUSH_TREE (id.fns, fn);
712 /* Or any functions that aren't finished yet. */
713 prev_fn = NULL_TREE;
714 if (current_function_decl)
715 {
716 VARRAY_PUSH_TREE (id.fns, current_function_decl);
717 prev_fn = current_function_decl;
718 }
719 for (s = scope_chain; s; s = s->prev)
720 if (s->function_decl && s->function_decl != prev_fn)
721 {
722 VARRAY_PUSH_TREE (id.fns, s->function_decl);
723 prev_fn = s->function_decl;
724 }
725
726 /* Initialize id->scope_stmt with a fake SCOPE_STMT for the outermost
727 block of the function (i.e. the BLOCK with __FUNCTION__ et al). */
728 id.scope_stmt = build_min_nt (SCOPE_STMT,
729 BLOCK_SUBBLOCKS (DECL_INITIAL (fn)));
730 SCOPE_BEGIN_P (id.scope_stmt) = 1;
731
732 /* Replace all calls to inline functions with the bodies of those
733 functions. */
734 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
735
736 /* Clean up. */
737 VARRAY_FREE (id.fns);
738 }
739 }
740
741 /* Called from calls_setjmp_p via walk_tree. */
742
743 static tree
744 calls_setjmp_r (tp, walk_subtrees, data)
745 tree *tp;
746 int *walk_subtrees ATTRIBUTE_UNUSED;
747 void *data ATTRIBUTE_UNUSED;
748 {
749 int setjmp_p;
750 int longjmp_p;
751 int malloc_p;
752 int alloca_p;
753
754 /* We're only interested in FUNCTION_DECLS. */
755 if (TREE_CODE (*tp) != FUNCTION_DECL)
756 return NULL_TREE;
757
758 special_function_p (*tp, &setjmp_p, &longjmp_p, &malloc_p, &alloca_p);
759
760 return setjmp_p ? *tp : NULL_TREE;
761 }
762
763 /* Returns non-zero if FN calls `setjmp' or some other function that
764 can return more than once. This function is conservative; it may
765 occasionally return a non-zero value even when FN does not actually
766 call `setjmp'. */
767
768 int
769 calls_setjmp_p (fn)
770 tree fn;
771 {
772 return (walk_tree (&DECL_SAVED_TREE (fn), calls_setjmp_r, NULL)
773 != NULL_TREE);
774 }
775