cp-tree.h (IDENTIFIER_CTOR_OR_DTOR_P): New macro.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "rtl.h"
27 #include "insn-config.h"
28 #include "input.h"
29 #include "integrate.h"
30 #include "varray.h"
31
32 /* To Do:
33
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
39 are not needed.
40
41 o Provide heuristics to clamp inlining of recursive template
42 calls? */
43
44 /* Data required for function inlining. */
45
46 typedef struct inline_data
47 {
48 /* A stack of the functions we are inlining. For example, if we are
49 compiling `f', which calls `g', which calls `h', and we are
50 inlining the body of `h', the stack will contain, `h', followed
51 by `g', followed by `f'. */
52 varray_type fns;
53 /* The label to jump to when a return statement is encountered. If
54 this value is NULL, then return statements will simply be
55 remapped as return statements, rather than as jumps. */
56 tree ret_label;
57 /* The map from local declarations in the inlined function to
58 equivalents in the function into which it is being inlined. */
59 splay_tree decl_map;
60 /* Nonzero if we are currently within the cleanup for a
61 TARGET_EXPR. */
62 int in_target_cleanup_p;
63 } inline_data;
64
65 /* Prototypes. */
66
67 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
68 static tree declare_return_variable PARAMS ((inline_data *, tree *));
69 static tree copy_body_r PARAMS ((tree *, int *, void *));
70 static tree copy_body PARAMS ((inline_data *));
71 static tree expand_call_inline PARAMS ((tree *, int *, void *));
72 static void expand_calls_inline PARAMS ((tree *, inline_data *));
73 static int inlinable_function_p PARAMS ((tree, inline_data *));
74 static tree remap_decl PARAMS ((tree, inline_data *));
75 static void remap_block PARAMS ((tree, tree, inline_data *));
76 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
77 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
78
79 /* Remap DECL during the copying of the BLOCK tree for the function.
80 DATA is really an `inline_data *'. */
81
82 static tree
83 remap_decl (decl, id)
84 tree decl;
85 inline_data *id;
86 {
87 splay_tree_node n;
88 tree fn;
89
90 /* We only remap local variables in the current function. */
91 fn = VARRAY_TOP_TREE (id->fns);
92 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
93 return NULL_TREE;
94
95 /* See if we have remapped this declaration. */
96 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
97 /* If we didn't already have an equivalent for this declaration,
98 create one now. */
99 if (!n)
100 {
101 tree t;
102
103 /* Make a copy of the variable or label. */
104 t = copy_decl_for_inlining (decl, fn,
105 VARRAY_TREE (id->fns, 0));
106
107 /* The decl T could be a dynamic array or other variable size type,
108 in which case some fields need to be remapped because they may
109 contain SAVE_EXPRs. */
110 walk_tree (&DECL_SIZE (t), copy_body_r, id);
111 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id);
112 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
113 && TYPE_DOMAIN (TREE_TYPE (t)))
114 {
115 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
116 TYPE_DOMAIN (TREE_TYPE (t))
117 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
118 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
119 copy_body_r, id);
120 }
121
122 /* Remember it, so that if we encounter this local entity
123 again we can reuse this copy. */
124 n = splay_tree_insert (id->decl_map,
125 (splay_tree_key) decl,
126 (splay_tree_value) t);
127 }
128
129 return (tree) n->value;
130 }
131
132 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
133 remapped versions of the variables therein. And hook the new block
134 into the block-tree. If non-NULL, the DECLS are declarations to
135 add to use instead of the BLOCK_VARS in the old block. */
136
137 static void
138 remap_block (scope_stmt, decls, id)
139 tree scope_stmt;
140 tree decls;
141 inline_data *id;
142 {
143 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
144 not know whether or not expand_expr will actually write out the
145 code we put there. If it does not, then we'll have more BLOCKs
146 than block-notes, and things will go awry. At some point, we
147 should make the back-end handle BLOCK notes in a tidier way,
148 without requiring a strict correspondence to the block-tree; then
149 this check can go. */
150 if (id->in_target_cleanup_p)
151 {
152 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
153 return;
154 }
155
156 /* If this is the beginning of a scope, remap the associated BLOCK. */
157 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
158 {
159 tree old_block;
160 tree new_block;
161 tree old_var;
162 tree *first_block;
163 tree fn;
164
165 /* Make the new block. */
166 old_block = SCOPE_STMT_BLOCK (scope_stmt);
167 new_block = make_node (BLOCK);
168 TREE_USED (new_block) = TREE_USED (old_block);
169 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
170 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
171
172 /* Remap its variables. */
173 for (old_var = decls ? decls : BLOCK_VARS (old_block);
174 old_var;
175 old_var = TREE_CHAIN (old_var))
176 {
177 tree new_var;
178
179 /* Remap the variable. */
180 new_var = remap_decl (old_var, id);
181 /* If we didn't remap this variable, so we can't mess with
182 its TREE_CHAIN. If we remapped this variable to
183 something other than a declaration (say, if we mapped it
184 to a constant), then we must similarly omit any mention
185 of it here. */
186 if (!new_var || !DECL_P (new_var))
187 ;
188 else
189 {
190 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
191 BLOCK_VARS (new_block) = new_var;
192 }
193 }
194 /* We put the BLOCK_VARS in reverse order; fix that now. */
195 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
196 /* Attach this new block after the DECL_INITIAL block for the
197 function into which this block is being inlined. In
198 rest_of_compilation we will straighten out the BLOCK tree. */
199 fn = VARRAY_TREE (id->fns, 0);
200 if (DECL_INITIAL (fn))
201 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
202 else
203 first_block = &DECL_INITIAL (fn);
204 BLOCK_CHAIN (new_block) = *first_block;
205 *first_block = new_block;
206 /* Remember the remapped block. */
207 splay_tree_insert (id->decl_map,
208 (splay_tree_key) old_block,
209 (splay_tree_value) new_block);
210 }
211 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
212 remapped block. */
213 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
214 {
215 splay_tree_node n;
216
217 /* Find this block in the table of remapped things. */
218 n = splay_tree_lookup (id->decl_map,
219 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
220 my_friendly_assert (n != NULL, 19991203);
221 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
222 }
223 }
224
225 /* Copy the SCOPE_STMT pointed to by TP. */
226
227 static void
228 copy_scope_stmt (tp, walk_subtrees, id)
229 tree *tp;
230 int *walk_subtrees;
231 inline_data *id;
232 {
233 tree block;
234
235 /* Remember whether or not this statement was nullified. When
236 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
237 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
238 deal with copying BLOCKs if they do not wish to do so. */
239 block = SCOPE_STMT_BLOCK (*tp);
240 /* Copy (and replace) the statement. */
241 copy_tree_r (tp, walk_subtrees, NULL);
242 /* Restore the SCOPE_STMT_BLOCK. */
243 SCOPE_STMT_BLOCK (*tp) = block;
244
245 /* Remap the associated block. */
246 remap_block (*tp, NULL_TREE, id);
247 }
248
249 /* Called from copy_body via walk_tree. DATA is really an
250 `inline_data *'. */
251
252 static tree
253 copy_body_r (tp, walk_subtrees, data)
254 tree *tp;
255 int *walk_subtrees;
256 void *data;
257 {
258 inline_data* id;
259 tree fn;
260
261 /* Set up. */
262 id = (inline_data *) data;
263 fn = VARRAY_TOP_TREE (id->fns);
264
265 /* All automatic variables should have a DECL_CONTEXT indicating
266 what function they come from. */
267 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
268 && DECL_NAMESPACE_SCOPE_P (*tp))
269 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
270 19991113);
271
272 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
273 GOTO_STMT with the RET_LABEL as its target. */
274 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
275 {
276 tree return_stmt = *tp;
277 tree goto_stmt;
278
279 /* Build the GOTO_STMT. */
280 goto_stmt = build_min_nt (GOTO_STMT, id->ret_label);
281 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
282
283 /* If we're returning something, just turn that into an
284 assignment into the equivalent of the original
285 RESULT_DECL. */
286 if (RETURN_EXPR (return_stmt))
287 {
288 *tp = build_min_nt (EXPR_STMT,
289 RETURN_EXPR (return_stmt));
290 /* And then jump to the end of the function. */
291 TREE_CHAIN (*tp) = goto_stmt;
292 }
293 /* If we're not returning anything just do the jump. */
294 else
295 *tp = goto_stmt;
296 }
297 /* Local variables and labels need to be replaced by equivalent
298 variables. We don't want to copy static variables; there's only
299 one of those, no matter how many times we inline the containing
300 function. */
301 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
302 {
303 tree new_decl;
304
305 /* Remap the declaration. */
306 new_decl = remap_decl (*tp, id);
307 my_friendly_assert (new_decl != NULL_TREE, 19991203);
308 /* Replace this variable with the copy. */
309 *tp = new_decl;
310 }
311 else if (nonstatic_local_decl_p (*tp)
312 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
313 my_friendly_abort (0);
314 else if (TREE_CODE (*tp) == SAVE_EXPR)
315 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
316 walk_subtrees);
317 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
318 my_friendly_abort (19991113);
319 /* For a SCOPE_STMT, we must copy the associated block so that we
320 can write out debugging information for the inlined variables. */
321 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
322 copy_scope_stmt (tp, walk_subtrees, id);
323 /* Otherwise, just copy the node. Note that copy_tree_r already
324 knows not to copy VAR_DECLs, etc., so this is safe. */
325 else
326 {
327 copy_tree_r (tp, walk_subtrees, NULL);
328
329 /* The copied TARGET_EXPR has never been expanded, even if the
330 original node was expanded already. */
331 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
332 {
333 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
334 TREE_OPERAND (*tp, 3) = NULL_TREE;
335 }
336 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
337 result is no longer valid. */
338 else if (TREE_CODE (*tp) == CALL_EXPR)
339 CALL_EXPR_RTL (*tp) = NULL_RTX;
340 }
341
342 /* Keep iterating. */
343 return NULL_TREE;
344 }
345
346 /* Make a copy of the body of FN so that it can be inserted inline in
347 another function. */
348
349 static tree
350 copy_body (id)
351 inline_data *id;
352 {
353 tree body;
354
355 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
356 walk_tree (&body, copy_body_r, id);
357
358 return body;
359 }
360
361 /* Generate code to initialize the parameters of the function at the
362 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
363
364 static tree
365 initialize_inlined_parameters (id, args, fn)
366 inline_data *id;
367 tree args;
368 tree fn;
369 {
370 tree init_stmts;
371 tree parms;
372 tree a;
373 tree p;
374
375 /* Figure out what the parameters are. */
376 parms = DECL_ARGUMENTS (fn);
377
378 /* Start with no initializations whatsoever. */
379 init_stmts = NULL_TREE;
380
381 /* Loop through the parameter declarations, replacing each with an
382 equivalent VAR_DECL, appropriately initialized. */
383 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
384 {
385 tree init_stmt;
386 tree var;
387
388 /* Make an equivalent VAR_DECL. */
389 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
390 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
391 that way, when the PARM_DECL is encountered, it will be
392 automatically replaced by the VAR_DECL. */
393 splay_tree_insert (id->decl_map,
394 (splay_tree_key) p,
395 (splay_tree_value) var);
396 /* Initialize this VAR_DECL from the equivalent argument. If
397 the argument is an object, created via a constructor or copy,
398 this will not result in an extra copy: the TARGET_EXPR
399 representing the argument will be bound to VAR, and the
400 object will be constructed in VAR. */
401 init_stmt = build_min_nt (EXPR_STMT,
402 build (INIT_EXPR, TREE_TYPE (p),
403 var, TREE_VALUE (a)));
404 /* Declare this new variable. Note that we do this *after* the
405 initialization because we are going to reverse all the
406 initialization statements below. */
407 TREE_CHAIN (init_stmt) = build_min_nt (DECL_STMT, var);
408 /* Add this initialization to the list. */
409 TREE_CHAIN (TREE_CHAIN (init_stmt)) = init_stmts;
410 init_stmts = init_stmt;
411 }
412
413 /* The initialization statements have been built up in reverse
414 order. Straighten them out now. */
415 return nreverse (init_stmts);
416 }
417
418 /* Declare a return variable to replace the RESULT_DECL for the
419 function we are calling. An appropriate DECL_STMT is returned.
420 The USE_STMT is filled in to contain a use of the declaration to
421 indicate the return value of the function. */
422
423 static tree
424 declare_return_variable (id, use_stmt)
425 struct inline_data *id;
426 tree *use_stmt;
427 {
428 tree fn = VARRAY_TOP_TREE (id->fns);
429 tree result = DECL_RESULT (fn);
430 tree var;
431
432 /* We don't need to do anything for functions that don't return
433 anything. */
434 if (!result || same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result)),
435 void_type_node))
436 {
437 *use_stmt = NULL_TREE;
438 return NULL_TREE;
439 }
440
441 /* Make an appropriate copy. */
442 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
443 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
444 way, when the RESULT_DECL is encountered, it will be
445 automatically replaced by the VAR_DECL. */
446 splay_tree_insert (id->decl_map,
447 (splay_tree_key) result,
448 (splay_tree_value) var);
449
450 /* Build the USE_STMT. */
451 *use_stmt = build_min_nt (EXPR_STMT, var);
452
453 /* Build the declaration statement. */
454 return build_min_nt (DECL_STMT, var);
455 }
456
457 /* Returns non-zero if FN is a function that can be inlined. */
458
459 static int
460 inlinable_function_p (fn, id)
461 tree fn;
462 inline_data *id;
463 {
464 int inlinable;
465
466 /* If we've already decided this function shouldn't be inlined,
467 there's no need to check again. */
468 if (DECL_UNINLINABLE (fn))
469 return 0;
470
471 /* Assume it is not inlinable. */
472 inlinable = 0;
473
474 /* If we're not inlining things, then nothing is inlinable. */
475 if (!flag_inline_trees)
476 ;
477 /* If the function was not declared `inline', then we don't inline
478 it. */
479 else if (!DECL_INLINE (fn))
480 ;
481 /* We can't inline varargs functions. */
482 else if (varargs_function_p (fn))
483 ;
484 /* All is well. We can inline this function. Traditionally, GCC
485 has refused to inline functions using setjmp or alloca, or
486 functions whose values are returned in a PARALLEL, and a few
487 other such obscure conditions. We are not equally constrained at
488 the tree level. */
489 else
490 inlinable = 1;
491
492 /* Squirrel away the result so that we don't have to check again. */
493 DECL_UNINLINABLE (fn) = !inlinable;
494
495 /* We can inline a template instantiation only if it's fully
496 instantiated. */
497 if (inlinable
498 && DECL_TEMPLATE_INFO (fn)
499 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
500 {
501 fn = instantiate_decl (fn, /*defer_ok=*/0);
502 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
503 }
504
505 /* If we don't have the function body available, we can't inline
506 it. */
507 if (!DECL_SAVED_TREE (fn))
508 inlinable = 0;
509
510 /* Don't do recursive inlining, either. We don't record this in
511 DECL_UNLINABLE; we may be able to inline this function later. */
512 if (inlinable)
513 {
514 size_t i;
515
516 for (i = 0; i < id->fns->elements_used; ++i)
517 if (VARRAY_TREE (id->fns, i) == fn)
518 inlinable = 0;
519 }
520
521 /* Return the result. */
522 return inlinable;
523 }
524
525 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
526
527 static tree
528 expand_call_inline (tp, walk_subtrees, data)
529 tree *tp;
530 int *walk_subtrees;
531 void *data;
532 {
533 inline_data *id;
534 tree t;
535 tree expr;
536 tree chain;
537 tree fn;
538 tree scope_stmt;
539 tree use_stmt;
540 tree arg_inits;
541 splay_tree st;
542
543 /* See what we've got. */
544 id = (inline_data *) data;
545 t = *tp;
546
547 /* Recurse, but letting recursive invocations know that we are
548 inside the body of a TARGET_EXPR. */
549 if (TREE_CODE (*tp) == TARGET_EXPR)
550 {
551 int i, len = first_rtl_op (TARGET_EXPR);
552
553 /* We're walking our own subtrees. */
554 *walk_subtrees = 0;
555
556 /* Actually walk over them. This loop is the body of
557 walk_trees, omitting the case where the TARGET_EXPR
558 itself is handled. */
559 for (i = 0; i < len; ++i)
560 {
561 if (i == 2)
562 ++id->in_target_cleanup_p;
563 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data);
564 if (i == 2)
565 --id->in_target_cleanup_p;
566 }
567
568 return NULL_TREE;
569 }
570
571 /* From here on, we're only interested in CALL_EXPRs. */
572 if (TREE_CODE (t) != CALL_EXPR)
573 return NULL_TREE;
574
575 /* First, see if we can figure out what function is being called.
576 If we cannot, then there is no hope of inlining the function. */
577 fn = get_callee_fndecl (t);
578 if (!fn)
579 return NULL_TREE;
580
581 /* Don't try to inline functions that are not well-suited to
582 inlining. */
583 if (!inlinable_function_p (fn, id))
584 return NULL_TREE;
585
586 /* Set the current filename and line number to the function we are
587 inlining so that when we create new _STMT nodes here they get
588 line numbers corresponding to the function we are calling. We
589 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
590 because individual statements don't record the filename. */
591 push_srcloc (fn->decl.filename, fn->decl.linenum);
592
593 /* Build a statement-expression containing code to initialize the
594 arguments, the actual inline expansion of the body, and a label
595 for the return statements within the function to jump to. The
596 type of the statement expression is the return type of the
597 function call. */
598 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
599
600 /* Local declarations will be replaced by their equivalents in this
601 map. */
602 st = id->decl_map;
603 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
604 NULL, NULL);
605
606 /* Initialize the parameters. */
607 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
608 /* Expand any inlined calls in the initializers. Do this before we
609 push FN on the stack of functions we are inlining; we want to
610 inline calls to FN that appear in the initializers for the
611 parameters. */
612 expand_calls_inline (&arg_inits, id);
613 /* And add them to the tree. */
614 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
615
616 /* Record the function we are about to inline so that we can avoid
617 recursing into it. */
618 VARRAY_PUSH_TREE (id->fns, fn);
619
620 /* Return statements in the function body will be replaced by jumps
621 to the RET_LABEL. */
622 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
623 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
624
625 /* Create a block to put the parameters in. We have to do this
626 after the parameters have been remapped because remapping
627 parameters is different from remapping ordinary variables. */
628 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
629 SCOPE_BEGIN_P (scope_stmt) = 1;
630 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
631 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
632 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
633 STMT_EXPR_STMT (expr) = scope_stmt;
634
635 /* Tell the debugging backends that this block represents the
636 outermost scope of the inlined function. */
637 if (SCOPE_STMT_BLOCK (scope_stmt))
638 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
639
640 /* Declare the return variable for the function. */
641 STMT_EXPR_STMT (expr)
642 = chainon (STMT_EXPR_STMT (expr),
643 declare_return_variable (id, &use_stmt));
644
645 /* After we've initialized the parameters, we insert the body of the
646 function itself. */
647 STMT_EXPR_STMT (expr)
648 = chainon (STMT_EXPR_STMT (expr), copy_body (id));
649
650 /* Close the block for the parameters. */
651 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
652 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
653 my_friendly_assert (DECL_INITIAL (fn)
654 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
655 19991203);
656 remap_block (scope_stmt, NULL_TREE, id);
657 STMT_EXPR_STMT (expr)
658 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
659
660 /* After the body of the function comes the RET_LABEL. This must come
661 before we evaluate the returned value below, because that evalulation
662 may cause RTL to be generated. */
663 STMT_EXPR_STMT (expr)
664 = chainon (STMT_EXPR_STMT (expr),
665 build_min_nt (LABEL_STMT, id->ret_label));
666
667 /* Finally, mention the returned value so that the value of the
668 statement-expression is the returned value of the function. */
669 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
670
671 /* Clean up. */
672 splay_tree_delete (id->decl_map);
673 id->decl_map = st;
674
675 /* The new expression has side-effects if the old one did. */
676 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
677
678 /* Replace the call by the inlined body. Wrap it in an
679 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
680 pointing to the right place. */
681 chain = TREE_CHAIN (*tp);
682 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
683 /*col=*/0);
684 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
685 TREE_CHAIN (*tp) = chain;
686 pop_srcloc ();
687
688 /* If the value of the new expression is ignored, that's OK. We
689 don't warn about this for CALL_EXPRs, so we shouldn't warn about
690 the equivalent inlined version either. */
691 TREE_USED (*tp) = 1;
692
693 /* Recurse into the body of the just inlined function. */
694 expand_calls_inline (tp, id);
695 VARRAY_POP (id->fns);
696
697 /* Don't walk into subtrees. We've already handled them above. */
698 *walk_subtrees = 0;
699
700 /* Keep iterating. */
701 return NULL_TREE;
702 }
703
704 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
705 expansions as appropriate. */
706
707 static void
708 expand_calls_inline (tp, id)
709 tree *tp;
710 inline_data *id;
711 {
712 /* Search through *TP, replacing all calls to inline functions by
713 appropriate equivalents. */
714 walk_tree (tp, expand_call_inline, id);
715 }
716
717 /* Optimize the body of FN. */
718
719 void
720 optimize_function (fn)
721 tree fn;
722 {
723 /* Expand calls to inline functions. */
724 if (flag_inline_trees)
725 {
726 inline_data id;
727 tree prev_fn;
728 struct saved_scope *s;
729
730 /* Clear out ID. */
731 memset (&id, 0, sizeof (id));
732
733 /* Don't allow recursion into FN. */
734 VARRAY_TREE_INIT (id.fns, 32, "fns");
735 VARRAY_PUSH_TREE (id.fns, fn);
736 /* Or any functions that aren't finished yet. */
737 prev_fn = NULL_TREE;
738 if (current_function_decl)
739 {
740 VARRAY_PUSH_TREE (id.fns, current_function_decl);
741 prev_fn = current_function_decl;
742 }
743 for (s = scope_chain; s; s = s->prev)
744 if (s->function_decl && s->function_decl != prev_fn)
745 {
746 VARRAY_PUSH_TREE (id.fns, s->function_decl);
747 prev_fn = s->function_decl;
748 }
749
750 /* Replace all calls to inline functions with the bodies of those
751 functions. */
752 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
753
754 /* Clean up. */
755 VARRAY_FREE (id.fns);
756 }
757 }
758
759 /* Called from calls_setjmp_p via walk_tree. */
760
761 static tree
762 calls_setjmp_r (tp, walk_subtrees, data)
763 tree *tp;
764 int *walk_subtrees ATTRIBUTE_UNUSED;
765 void *data ATTRIBUTE_UNUSED;
766 {
767 /* We're only interested in FUNCTION_DECLS. */
768 if (TREE_CODE (*tp) != FUNCTION_DECL)
769 return NULL_TREE;
770
771 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
772 }
773
774 /* Returns non-zero if FN calls `setjmp' or some other function that
775 can return more than once. This function is conservative; it may
776 occasionally return a non-zero value even when FN does not actually
777 call `setjmp'. */
778
779 int
780 calls_setjmp_p (fn)
781 tree fn;
782 {
783 return (walk_tree (&DECL_SAVED_TREE (fn), calls_setjmp_r, NULL)
784 != NULL_TREE);
785 }
786
787 /* FN is a function that has a complete body. Clone the body as
788 necessary. Returns non-zero if there's no longer any need to
789 process the main body. */
790
791 int
792 maybe_clone_body (fn)
793 tree fn;
794 {
795 inline_data id;
796 tree clone;
797
798 /* We don't clone constructors and destructors under the old ABI. */
799 if (!flag_new_abi)
800 return 0;
801
802 /* We only clone constructors and destructors. */
803 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
804 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
805 return 0;
806
807 /* We know that any clones immediately follow FN in the TYPE_METHODS
808 list. */
809 for (clone = TREE_CHAIN (fn);
810 clone && DECL_CLONED_FUNCTION_P (clone);
811 clone = TREE_CHAIN (clone))
812 {
813 tree parm;
814 tree clone_parm;
815 int parmno;
816
817 /* Update CLONE's source position information to match FN's. */
818 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
819 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
820
821 /* Start processing the function. */
822 push_to_top_level ();
823 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
824 store_parm_decls ();
825
826 /* Just clone the body, as if we were making an inline call.
827 But, remap the parameters in the callee to the parameters of
828 caller. If there's an in-charge parameter, map it to an
829 appropriate constant. */
830 memset (&id, 0, sizeof (id));
831 VARRAY_TREE_INIT (id.fns, 2, "fns");
832 VARRAY_PUSH_TREE (id.fns, clone);
833 VARRAY_PUSH_TREE (id.fns, fn);
834
835 /* Remap the parameters. */
836 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
837 NULL, NULL);
838 for (parmno = 0,
839 parm = DECL_ARGUMENTS (fn),
840 clone_parm = DECL_ARGUMENTS (clone);
841 parm;
842 ++parmno,
843 parm = TREE_CHAIN (parm))
844 {
845 /* Map the in-charge parameter to an appropriate constant. */
846 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
847 {
848 tree in_charge;
849 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
850 splay_tree_insert (id.decl_map,
851 (splay_tree_key) parm,
852 (splay_tree_key) in_charge);
853 }
854 /* Map other parameters to their equivalents in the cloned
855 function. */
856 else
857 {
858 splay_tree_insert (id.decl_map,
859 (splay_tree_key) parm,
860 (splay_tree_value) clone_parm);
861 clone_parm = TREE_CHAIN (clone_parm);
862 }
863 }
864
865 /* Actually copy the body. */
866 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
867
868 /* Clean up. */
869 splay_tree_delete (id.decl_map);
870 VARRAY_FREE (id.fns);
871
872 /* Now, expand this function into RTL, if appropriate. */
873 current_function_name_declared = 1;
874 expand_body (finish_function (0));
875 pop_from_top_level ();
876 }
877
878 /* We don't need to process the original function any further. */
879 return 1;
880 }