Fix 3 ia64 g++ testsuite failures, and groff miscompilation.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "rtl.h"
27 #include "insn-config.h"
28 #include "input.h"
29 #include "integrate.h"
30 #include "varray.h"
31
32 /* To Do:
33
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
39 are not needed.
40
41 o Provide heuristics to clamp inlining of recursive template
42 calls? */
43
44 /* Data required for function inlining. */
45
46 typedef struct inline_data
47 {
48 /* A stack of the functions we are inlining. For example, if we are
49 compiling `f', which calls `g', which calls `h', and we are
50 inlining the body of `h', the stack will contain, `h', followed
51 by `g', followed by `f'. */
52 varray_type fns;
53 /* The label to jump to when a return statement is encountered. */
54 tree ret_label;
55 /* The map from local declarations in the inlined function to
56 equivalents in the function into which it is being inlined. */
57 splay_tree decl_map;
58 /* Nonzero if we are currently within the cleanup for a
59 TARGET_EXPR. */
60 int in_target_cleanup_p;
61 } inline_data;
62
63 /* Prototypes. */
64
65 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
66 static tree declare_return_variable PARAMS ((inline_data *, tree *));
67 static tree copy_body_r PARAMS ((tree *, int *, void *));
68 static tree copy_body PARAMS ((inline_data *));
69 static tree expand_call_inline PARAMS ((tree *, int *, void *));
70 static void expand_calls_inline PARAMS ((tree *, inline_data *));
71 static int inlinable_function_p PARAMS ((tree, inline_data *));
72 static tree remap_decl PARAMS ((tree, inline_data *));
73 static void remap_block PARAMS ((tree, tree, inline_data *));
74 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
75 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
76
77 /* Remap DECL during the copying of the BLOCK tree for the function.
78 DATA is really an `inline_data *'. */
79
80 static tree
81 remap_decl (decl, id)
82 tree decl;
83 inline_data *id;
84 {
85 splay_tree_node n;
86 tree fn;
87
88 /* We only remap local variables in the current function. */
89 fn = VARRAY_TOP_TREE (id->fns);
90 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
91 return NULL_TREE;
92
93 /* See if we have remapped this declaration. */
94 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
95 /* If we didn't already have an equivalent for this declaration,
96 create one now. */
97 if (!n)
98 {
99 tree t;
100
101 /* Make a copy of the variable or label. */
102 t = copy_decl_for_inlining (decl, fn,
103 VARRAY_TREE (id->fns, 0));
104
105 /* The decl T could be a dynamic array or other variable size type,
106 in which case some fields need to be remapped because they may
107 contain SAVE_EXPRs. */
108 walk_tree (&DECL_SIZE (t), copy_body_r, id);
109 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id);
110 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
111 && TYPE_DOMAIN (TREE_TYPE (t)))
112 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
113 copy_body_r, id);
114
115 /* Remember it, so that if we encounter this local entity
116 again we can reuse this copy. */
117 n = splay_tree_insert (id->decl_map,
118 (splay_tree_key) decl,
119 (splay_tree_value) t);
120 }
121
122 return (tree) n->value;
123 }
124
125 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
126 remapped versions of the variables therein. And hook the new block
127 into the block-tree. If non-NULL, the DECLS are declarations to
128 add to use instead of the BLOCK_VARS in the old block. */
129
130 static void
131 remap_block (scope_stmt, decls, id)
132 tree scope_stmt;
133 tree decls;
134 inline_data *id;
135 {
136 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
137 not know whether or not expand_expr will actually write out the
138 code we put there. If it does not, then we'll have more BLOCKs
139 than block-notes, and things will go awry. At some point, we
140 should make the back-end handle BLOCK notes in a tidier way,
141 without requiring a strict correspondence to the block-tree; then
142 this check can go. */
143 if (id->in_target_cleanup_p)
144 {
145 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
146 return;
147 }
148
149 /* If this is the beginning of a scope, remap the associated BLOCK. */
150 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
151 {
152 tree old_block;
153 tree new_block;
154 tree old_var;
155 tree fn;
156
157 /* Make the new block. */
158 old_block = SCOPE_STMT_BLOCK (scope_stmt);
159 new_block = make_node (BLOCK);
160 TREE_USED (new_block) = TREE_USED (old_block);
161 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
162 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
163
164 /* Remap its variables. */
165 for (old_var = decls ? decls : BLOCK_VARS (old_block);
166 old_var;
167 old_var = TREE_CHAIN (old_var))
168 {
169 tree new_var;
170
171 /* Remap the variable. */
172 new_var = remap_decl (old_var, id);
173 if (!new_var)
174 /* We didn't remap this variable, so we can't mess with
175 its TREE_CHAIN. */
176 ;
177 else
178 {
179 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
180 BLOCK_VARS (new_block) = new_var;
181 }
182 }
183 /* We put the BLOCK_VARS in reverse order; fix that now. */
184 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
185 /* Attach this new block after the DECL_INITIAL block for the
186 function into which this block is being inlined. In
187 rest_of_compilation we will straighten out the BLOCK tree. */
188 fn = VARRAY_TREE (id->fns, 0);
189 BLOCK_CHAIN (new_block) = BLOCK_CHAIN (DECL_INITIAL (fn));
190 BLOCK_CHAIN (DECL_INITIAL (fn)) = new_block;
191 /* Remember the remapped block. */
192 splay_tree_insert (id->decl_map,
193 (splay_tree_key) old_block,
194 (splay_tree_value) new_block);
195 }
196 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
197 remapped block. */
198 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
199 {
200 splay_tree_node n;
201
202 /* Find this block in the table of remapped things. */
203 n = splay_tree_lookup (id->decl_map,
204 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
205 my_friendly_assert (n != NULL, 19991203);
206 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
207 }
208 }
209
210 /* Copy the SCOPE_STMT pointed to by TP. */
211
212 static void
213 copy_scope_stmt (tp, walk_subtrees, id)
214 tree *tp;
215 int *walk_subtrees;
216 inline_data *id;
217 {
218 tree block;
219
220 /* Remember whether or not this statement was nullified. When
221 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
222 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
223 deal with copying BLOCKs if they do not wish to do so. */
224 block = SCOPE_STMT_BLOCK (*tp);
225 /* Copy (and replace) the statement. */
226 copy_tree_r (tp, walk_subtrees, NULL);
227 /* Restore the SCOPE_STMT_BLOCK. */
228 SCOPE_STMT_BLOCK (*tp) = block;
229
230 /* Remap the associated block. */
231 remap_block (*tp, NULL_TREE, id);
232 }
233
234 /* Called from copy_body via walk_tree. DATA is really an
235 `inline_data *'. */
236
237 static tree
238 copy_body_r (tp, walk_subtrees, data)
239 tree *tp;
240 int *walk_subtrees;
241 void *data;
242 {
243 inline_data* id;
244 tree fn;
245
246 /* Set up. */
247 id = (inline_data *) data;
248 fn = VARRAY_TOP_TREE (id->fns);
249
250 /* All automatic variables should have a DECL_CONTEXT indicating
251 what function they come from. */
252 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
253 && DECL_NAMESPACE_SCOPE_P (*tp))
254 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
255 19991113);
256
257 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
258 GOTO_STMT with the RET_LABEL as its target. */
259 if (TREE_CODE (*tp) == RETURN_STMT)
260 {
261 tree return_stmt = *tp;
262 tree goto_stmt;
263
264 /* Build the GOTO_STMT. */
265 goto_stmt = build_min_nt (GOTO_STMT, id->ret_label);
266 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
267
268 /* If we're returning something, just turn that into an
269 assignment into the equivalent of the original
270 RESULT_DECL. */
271 if (RETURN_EXPR (return_stmt))
272 {
273 *tp = build_min_nt (EXPR_STMT,
274 RETURN_EXPR (return_stmt));
275 /* And then jump to the end of the function. */
276 TREE_CHAIN (*tp) = goto_stmt;
277 }
278 /* If we're not returning anything just do the jump. */
279 else
280 *tp = goto_stmt;
281 }
282 /* Local variables and labels need to be replaced by equivalent
283 variables. We don't want to copy static variables; there's only
284 one of those, no matter how many times we inline the containing
285 function. */
286 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
287 {
288 tree new_decl;
289
290 /* Remap the declaration. */
291 new_decl = remap_decl (*tp, id);
292 my_friendly_assert (new_decl != NULL_TREE, 19991203);
293 /* Replace this variable with the copy. */
294 *tp = new_decl;
295 }
296 else if (nonstatic_local_decl_p (*tp)
297 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
298 my_friendly_abort (0);
299 else if (TREE_CODE (*tp) == SAVE_EXPR)
300 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
301 walk_subtrees);
302 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
303 my_friendly_abort (19991113);
304 /* For a SCOPE_STMT, we must copy the associated block so that we
305 can write out debugging information for the inlined variables. */
306 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
307 copy_scope_stmt (tp, walk_subtrees, id);
308 /* Otherwise, just copy the node. Note that copy_tree_r already
309 knows not to copy VAR_DECLs, etc., so this is safe. */
310 else
311 {
312 copy_tree_r (tp, walk_subtrees, NULL);
313
314 /* The copied TARGET_EXPR has never been expanded, even if the
315 original node was expanded already. */
316 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
317 {
318 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
319 TREE_OPERAND (*tp, 3) = NULL_TREE;
320 }
321 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
322 result is no longer valid. */
323 else if (TREE_CODE (*tp) == CALL_EXPR)
324 CALL_EXPR_RTL (*tp) = NULL_RTX;
325 }
326
327 /* Keep iterating. */
328 return NULL_TREE;
329 }
330
331 /* Make a copy of the body of FN so that it can be inserted inline in
332 another function. */
333
334 static tree
335 copy_body (id)
336 inline_data *id;
337 {
338 tree body;
339
340 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
341 walk_tree (&body, copy_body_r, id);
342
343 return body;
344 }
345
346 /* Generate code to initialize the parameters of the function at the
347 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
348
349 static tree
350 initialize_inlined_parameters (id, args, fn)
351 inline_data *id;
352 tree args;
353 tree fn;
354 {
355 tree init_stmts;
356 tree parms;
357 tree a;
358 tree p;
359
360 /* Figure out what the parameters are. */
361 parms = DECL_ARGUMENTS (fn);
362
363 /* Start with no initializations whatsoever. */
364 init_stmts = NULL_TREE;
365
366 /* Loop through the parameter declarations, replacing each with an
367 equivalent VAR_DECL, appropriately initialized. */
368 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
369 {
370 tree init_stmt;
371 tree var;
372
373 /* Make an equivalent VAR_DECL. */
374 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
375 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
376 that way, when the PARM_DECL is encountered, it will be
377 automatically replaced by the VAR_DECL. */
378 splay_tree_insert (id->decl_map,
379 (splay_tree_key) p,
380 (splay_tree_value) var);
381 /* Initialize this VAR_DECL from the equivalent argument. If
382 the argument is an object, created via a constructor or copy,
383 this will not result in an extra copy: the TARGET_EXPR
384 representing the argument will be bound to VAR, and the
385 object will be constructed in VAR. */
386 init_stmt = build_min_nt (EXPR_STMT,
387 build (INIT_EXPR, TREE_TYPE (p),
388 var, TREE_VALUE (a)));
389 /* Declare this new variable. Note that we do this *after* the
390 initialization because we are going to reverse all the
391 initialization statements below. */
392 TREE_CHAIN (init_stmt) = build_min_nt (DECL_STMT, var);
393 /* Add this initialization to the list. */
394 TREE_CHAIN (TREE_CHAIN (init_stmt)) = init_stmts;
395 init_stmts = init_stmt;
396 }
397
398 /* The initialization statements have been built up in reverse
399 order. Straighten them out now. */
400 return nreverse (init_stmts);
401 }
402
403 /* Declare a return variable to replace the RESULT_DECL for the
404 function we are calling. An appropriate DECL_STMT is returned.
405 The USE_STMT is filled in to contain a use of the declaration to
406 indicate the return value of the function. */
407
408 static tree
409 declare_return_variable (id, use_stmt)
410 struct inline_data *id;
411 tree *use_stmt;
412 {
413 tree fn = VARRAY_TOP_TREE (id->fns);
414 tree result = DECL_RESULT (fn);
415 tree var;
416
417 /* We don't need to do anything for functions that don't return
418 anything. */
419 if (!result || same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result)),
420 void_type_node))
421 {
422 *use_stmt = NULL_TREE;
423 return NULL_TREE;
424 }
425
426 /* Make an appropriate copy. */
427 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
428 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
429 way, when the RESULT_DECL is encountered, it will be
430 automatically replaced by the VAR_DECL. */
431 splay_tree_insert (id->decl_map,
432 (splay_tree_key) result,
433 (splay_tree_value) var);
434
435 /* Build the USE_STMT. */
436 *use_stmt = build_min_nt (EXPR_STMT, var);
437
438 /* Build the declaration statement. */
439 return build_min_nt (DECL_STMT, var);
440 }
441
442 /* Returns non-zero if FN is a function that can be inlined. */
443
444 static int
445 inlinable_function_p (fn, id)
446 tree fn;
447 inline_data *id;
448 {
449 int inlinable;
450
451 /* If we've already decided this function shouldn't be inlined,
452 there's no need to check again. */
453 if (DECL_UNINLINABLE (fn))
454 return 0;
455
456 /* Assume it is not inlinable. */
457 inlinable = 0;
458
459 /* If we're not inlining things, then nothing is inlinable. */
460 if (!flag_inline_trees)
461 ;
462 /* If the function was not declared `inline', then we don't inline
463 it. */
464 else if (!DECL_INLINE (fn))
465 ;
466 /* If we don't have the function body available, we can't inline
467 it. */
468 else if (!DECL_SAVED_TREE (fn))
469 ;
470 /* We can't inline varargs functions. */
471 else if (varargs_function_p (fn))
472 ;
473 /* All is well. We can inline this function. Traditionally, GCC
474 has refused to inline functions using setjmp or alloca, or
475 functions whose values are returned in a PARALLEL, and a few
476 other such obscure conditions. We are not equally constrained at
477 the tree level. */
478 else
479 inlinable = 1;
480
481 /* Squirrel away the result so that we don't have to check again. */
482 DECL_UNINLINABLE (fn) = !inlinable;
483
484 /* Don't do recursive inlining, either. We don't record this in
485 DECL_UNLINABLE; we may be able to inline this function later. */
486 if (inlinable)
487 {
488 size_t i;
489
490 for (i = 0; i < id->fns->elements_used; ++i)
491 if (VARRAY_TREE (id->fns, i) == fn)
492 inlinable = 0;
493 }
494
495 /* We can inline a template instantiation only if it's fully
496 instantiated. */
497 if (inlinable
498 && DECL_TEMPLATE_INFO (fn)
499 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
500 {
501 fn = instantiate_decl (fn);
502 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
503 }
504
505 /* Return the result. */
506 return inlinable;
507 }
508
509 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
510
511 static tree
512 expand_call_inline (tp, walk_subtrees, data)
513 tree *tp;
514 int *walk_subtrees;
515 void *data;
516 {
517 inline_data *id;
518 tree t;
519 tree expr;
520 tree chain;
521 tree fn;
522 tree scope_stmt;
523 tree use_stmt;
524 tree arg_inits;
525 splay_tree st;
526
527 /* See what we've got. */
528 id = (inline_data *) data;
529 t = *tp;
530
531 /* Recurse, but letting recursive invocations know that we are
532 inside the body of a TARGET_EXPR. */
533 if (TREE_CODE (*tp) == TARGET_EXPR)
534 {
535 int i, len = first_rtl_op (TARGET_EXPR);
536
537 /* We're walking our own subtrees. */
538 *walk_subtrees = 0;
539
540 /* Actually walk over them. This loop is the body of
541 walk_trees, omitting the case where the TARGET_EXPR
542 itself is handled. */
543 for (i = 0; i < len; ++i)
544 {
545 if (i == 2)
546 ++id->in_target_cleanup_p;
547 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data);
548 if (i == 2)
549 --id->in_target_cleanup_p;
550 }
551
552 return NULL_TREE;
553 }
554
555 /* From here on, we're only interested in CALL_EXPRs. */
556 if (TREE_CODE (t) != CALL_EXPR)
557 return NULL_TREE;
558
559 /* First, see if we can figure out what function is being called.
560 If we cannot, then there is no hope of inlining the function. */
561 fn = get_callee_fndecl (t);
562 if (!fn)
563 return NULL_TREE;
564
565 /* Don't try to inline functions that are not well-suited to
566 inlining. */
567 if (!inlinable_function_p (fn, id))
568 return NULL_TREE;
569
570 /* Set the current filename and line number to the function we are
571 inlining so that when we create new _STMT nodes here they get
572 line numbers corresponding to the function we are calling. We
573 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
574 because individual statements don't record the filename. */
575 push_srcloc (fn->decl.filename, fn->decl.linenum);
576
577 /* Build a statement-expression containing code to initialize the
578 arguments, the actual inline expansion of the body, and a label
579 for the return statements within the function to jump to. The
580 type of the statement expression is the return type of the
581 function call. */
582 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
583
584 /* Local declarations will be replaced by their equivalents in this
585 map. */
586 st = id->decl_map;
587 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
588 NULL, NULL);
589
590 /* Initialize the parameters. */
591 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
592 /* Expand any inlined calls in the initializers. Do this before we
593 push FN on the stack of functions we are inlining; we want to
594 inline calls to FN that appear in the initializers for the
595 parameters. */
596 expand_calls_inline (&arg_inits, id);
597 /* And add them to the tree. */
598 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
599
600 /* Record the function we are about to inline so that we can avoid
601 recursing into it. */
602 VARRAY_PUSH_TREE (id->fns, fn);
603
604 /* Return statements in the function body will be replaced by jumps
605 to the RET_LABEL. */
606 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
607 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
608
609 /* Create a block to put the parameters in. We have to do this
610 after the parameters have been remapped because remapping
611 parameters is different from remapping ordinary variables. */
612 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
613 SCOPE_BEGIN_P (scope_stmt) = 1;
614 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
615 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
616 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
617 STMT_EXPR_STMT (expr) = scope_stmt;
618
619 /* Tell the debugging backends that this block represents the
620 outermost scope of the inlined function. */
621 if (SCOPE_STMT_BLOCK (scope_stmt))
622 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
623
624 /* Declare the return variable for the function. */
625 STMT_EXPR_STMT (expr)
626 = chainon (STMT_EXPR_STMT (expr),
627 declare_return_variable (id, &use_stmt));
628
629 /* After we've initialized the parameters, we insert the body of the
630 function itself. */
631 STMT_EXPR_STMT (expr)
632 = chainon (STMT_EXPR_STMT (expr), copy_body (id));
633
634 /* Close the block for the parameters. */
635 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
636 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
637 my_friendly_assert (DECL_INITIAL (fn)
638 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
639 19991203);
640 remap_block (scope_stmt, NULL_TREE, id);
641 STMT_EXPR_STMT (expr)
642 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
643
644 /* After the body of the function comes the RET_LABEL. This must come
645 before we evaluate the returned value below, because that evalulation
646 may cause RTL to be generated. */
647 STMT_EXPR_STMT (expr)
648 = chainon (STMT_EXPR_STMT (expr),
649 build_min_nt (LABEL_STMT, id->ret_label));
650
651 /* Finally, mention the returned value so that the value of the
652 statement-expression is the returned value of the function. */
653 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
654
655 /* Clean up. */
656 splay_tree_delete (id->decl_map);
657 id->decl_map = st;
658
659 /* The new expression has side-effects if the old one did. */
660 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
661
662 /* Replace the call by the inlined body. Wrap it in an
663 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
664 pointing to the right place. */
665 chain = TREE_CHAIN (*tp);
666 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
667 /*col=*/0);
668 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
669 TREE_CHAIN (*tp) = chain;
670 pop_srcloc ();
671
672 /* If the value of the new expression is ignored, that's OK. We
673 don't warn about this for CALL_EXPRs, so we shouldn't warn about
674 the equivalent inlined version either. */
675 TREE_USED (*tp) = 1;
676
677 /* Recurse into the body of the just inlined function. */
678 expand_calls_inline (tp, id);
679 VARRAY_POP (id->fns);
680
681 /* Don't walk into subtrees. We've already handled them above. */
682 *walk_subtrees = 0;
683
684 /* Keep iterating. */
685 return NULL_TREE;
686 }
687
688 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
689 expansions as appropriate. */
690
691 static void
692 expand_calls_inline (tp, id)
693 tree *tp;
694 inline_data *id;
695 {
696 /* Search through *TP, replacing all calls to inline functions by
697 appropriate equivalents. */
698 walk_tree (tp, expand_call_inline, id);
699 }
700
701 /* Optimize the body of FN. */
702
703 void
704 optimize_function (fn)
705 tree fn;
706 {
707 /* Expand calls to inline functions. */
708 if (flag_inline_trees)
709 {
710 inline_data id;
711 tree prev_fn;
712 struct saved_scope *s;
713
714 /* Clear out ID. */
715 memset (&id, 0, sizeof (id));
716
717 /* Don't allow recursion into FN. */
718 VARRAY_TREE_INIT (id.fns, 32, "fns");
719 VARRAY_PUSH_TREE (id.fns, fn);
720 /* Or any functions that aren't finished yet. */
721 prev_fn = NULL_TREE;
722 if (current_function_decl)
723 {
724 VARRAY_PUSH_TREE (id.fns, current_function_decl);
725 prev_fn = current_function_decl;
726 }
727 for (s = scope_chain; s; s = s->prev)
728 if (s->function_decl && s->function_decl != prev_fn)
729 {
730 VARRAY_PUSH_TREE (id.fns, s->function_decl);
731 prev_fn = s->function_decl;
732 }
733
734 /* Replace all calls to inline functions with the bodies of those
735 functions. */
736 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
737
738 /* Clean up. */
739 VARRAY_FREE (id.fns);
740 }
741 }
742
743 /* Called from calls_setjmp_p via walk_tree. */
744
745 static tree
746 calls_setjmp_r (tp, walk_subtrees, data)
747 tree *tp;
748 int *walk_subtrees ATTRIBUTE_UNUSED;
749 void *data ATTRIBUTE_UNUSED;
750 {
751 int setjmp_p;
752 int longjmp_p;
753 int fork_or_exec_p;
754 int malloc_p;
755 int alloca_p;
756
757 /* We're only interested in FUNCTION_DECLS. */
758 if (TREE_CODE (*tp) != FUNCTION_DECL)
759 return NULL_TREE;
760
761 special_function_p (*tp, &setjmp_p, &longjmp_p, &fork_or_exec_p, &malloc_p,
762 &alloca_p);
763
764 return setjmp_p ? *tp : NULL_TREE;
765 }
766
767 /* Returns non-zero if FN calls `setjmp' or some other function that
768 can return more than once. This function is conservative; it may
769 occasionally return a non-zero value even when FN does not actually
770 call `setjmp'. */
771
772 int
773 calls_setjmp_p (fn)
774 tree fn;
775 {
776 return (walk_tree (&DECL_SAVED_TREE (fn), calls_setjmp_r, NULL)
777 != NULL_TREE);
778 }
779