optimize.c (copy_body_r): Use STRIP_TYPE_NOPS when copying variables.
[gcc.git] / gcc / cp / optimize.c
1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "rtl.h"
27 #include "insn-config.h"
28 #include "input.h"
29 #include "integrate.h"
30 #include "varray.h"
31
32 /* To Do:
33
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
39 are not needed.
40
41 o Provide heuristics to clamp inlining of recursive template
42 calls? */
43
44 /* Data required for function inlining. */
45
46 typedef struct inline_data
47 {
48 /* A stack of the functions we are inlining. For example, if we are
49 compiling `f', which calls `g', which calls `h', and we are
50 inlining the body of `h', the stack will contain, `h', followed
51 by `g', followed by `f'. */
52 varray_type fns;
53 /* The label to jump to when a return statement is encountered. If
54 this value is NULL, then return statements will simply be
55 remapped as return statements, rather than as jumps. */
56 tree ret_label;
57 /* The map from local declarations in the inlined function to
58 equivalents in the function into which it is being inlined. */
59 splay_tree decl_map;
60 /* Nonzero if we are currently within the cleanup for a
61 TARGET_EXPR. */
62 int in_target_cleanup_p;
63 } inline_data;
64
65 /* Prototypes. */
66
67 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
68 static tree declare_return_variable PARAMS ((inline_data *, tree *));
69 static tree copy_body_r PARAMS ((tree *, int *, void *));
70 static tree copy_body PARAMS ((inline_data *));
71 static tree expand_call_inline PARAMS ((tree *, int *, void *));
72 static void expand_calls_inline PARAMS ((tree *, inline_data *));
73 static int inlinable_function_p PARAMS ((tree, inline_data *));
74 static tree remap_decl PARAMS ((tree, inline_data *));
75 static void remap_block PARAMS ((tree, tree, inline_data *));
76 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
77 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
78
79 /* Remap DECL during the copying of the BLOCK tree for the function.
80 DATA is really an `inline_data *'. */
81
82 static tree
83 remap_decl (decl, id)
84 tree decl;
85 inline_data *id;
86 {
87 splay_tree_node n;
88 tree fn;
89
90 /* We only remap local variables in the current function. */
91 fn = VARRAY_TOP_TREE (id->fns);
92 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
93 return NULL_TREE;
94
95 /* See if we have remapped this declaration. */
96 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
97 /* If we didn't already have an equivalent for this declaration,
98 create one now. */
99 if (!n)
100 {
101 tree t;
102
103 /* Make a copy of the variable or label. */
104 t = copy_decl_for_inlining (decl, fn,
105 VARRAY_TREE (id->fns, 0));
106
107 /* The decl T could be a dynamic array or other variable size type,
108 in which case some fields need to be remapped because they may
109 contain SAVE_EXPRs. */
110 walk_tree (&DECL_SIZE (t), copy_body_r, id);
111 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id);
112 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
113 && TYPE_DOMAIN (TREE_TYPE (t)))
114 {
115 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
116 TYPE_DOMAIN (TREE_TYPE (t))
117 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
118 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
119 copy_body_r, id);
120 }
121
122 /* Remember it, so that if we encounter this local entity
123 again we can reuse this copy. */
124 n = splay_tree_insert (id->decl_map,
125 (splay_tree_key) decl,
126 (splay_tree_value) t);
127 }
128
129 return (tree) n->value;
130 }
131
132 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
133 remapped versions of the variables therein. And hook the new block
134 into the block-tree. If non-NULL, the DECLS are declarations to
135 add to use instead of the BLOCK_VARS in the old block. */
136
137 static void
138 remap_block (scope_stmt, decls, id)
139 tree scope_stmt;
140 tree decls;
141 inline_data *id;
142 {
143 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
144 not know whether or not expand_expr will actually write out the
145 code we put there. If it does not, then we'll have more BLOCKs
146 than block-notes, and things will go awry. At some point, we
147 should make the back-end handle BLOCK notes in a tidier way,
148 without requiring a strict correspondence to the block-tree; then
149 this check can go. */
150 if (id->in_target_cleanup_p)
151 {
152 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
153 return;
154 }
155
156 /* If this is the beginning of a scope, remap the associated BLOCK. */
157 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
158 {
159 tree old_block;
160 tree new_block;
161 tree old_var;
162 tree *first_block;
163 tree fn;
164
165 /* Make the new block. */
166 old_block = SCOPE_STMT_BLOCK (scope_stmt);
167 new_block = make_node (BLOCK);
168 TREE_USED (new_block) = TREE_USED (old_block);
169 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
170 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
171
172 /* Remap its variables. */
173 for (old_var = decls ? decls : BLOCK_VARS (old_block);
174 old_var;
175 old_var = TREE_CHAIN (old_var))
176 {
177 tree new_var;
178
179 /* Remap the variable. */
180 new_var = remap_decl (old_var, id);
181 /* If we didn't remap this variable, so we can't mess with
182 its TREE_CHAIN. If we remapped this variable to
183 something other than a declaration (say, if we mapped it
184 to a constant), then we must similarly omit any mention
185 of it here. */
186 if (!new_var || !DECL_P (new_var))
187 ;
188 else
189 {
190 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
191 BLOCK_VARS (new_block) = new_var;
192 }
193 }
194 /* We put the BLOCK_VARS in reverse order; fix that now. */
195 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
196 /* Attach this new block after the DECL_INITIAL block for the
197 function into which this block is being inlined. In
198 rest_of_compilation we will straighten out the BLOCK tree. */
199 fn = VARRAY_TREE (id->fns, 0);
200 if (DECL_INITIAL (fn))
201 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
202 else
203 first_block = &DECL_INITIAL (fn);
204 BLOCK_CHAIN (new_block) = *first_block;
205 *first_block = new_block;
206 /* Remember the remapped block. */
207 splay_tree_insert (id->decl_map,
208 (splay_tree_key) old_block,
209 (splay_tree_value) new_block);
210 }
211 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
212 remapped block. */
213 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
214 {
215 splay_tree_node n;
216
217 /* Find this block in the table of remapped things. */
218 n = splay_tree_lookup (id->decl_map,
219 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
220 my_friendly_assert (n != NULL, 19991203);
221 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
222 }
223 }
224
225 /* Copy the SCOPE_STMT pointed to by TP. */
226
227 static void
228 copy_scope_stmt (tp, walk_subtrees, id)
229 tree *tp;
230 int *walk_subtrees;
231 inline_data *id;
232 {
233 tree block;
234
235 /* Remember whether or not this statement was nullified. When
236 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
237 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
238 deal with copying BLOCKs if they do not wish to do so. */
239 block = SCOPE_STMT_BLOCK (*tp);
240 /* Copy (and replace) the statement. */
241 copy_tree_r (tp, walk_subtrees, NULL);
242 /* Restore the SCOPE_STMT_BLOCK. */
243 SCOPE_STMT_BLOCK (*tp) = block;
244
245 /* Remap the associated block. */
246 remap_block (*tp, NULL_TREE, id);
247 }
248
249 /* Called from copy_body via walk_tree. DATA is really an
250 `inline_data *'. */
251
252 static tree
253 copy_body_r (tp, walk_subtrees, data)
254 tree *tp;
255 int *walk_subtrees;
256 void *data;
257 {
258 inline_data* id;
259 tree fn;
260
261 /* Set up. */
262 id = (inline_data *) data;
263 fn = VARRAY_TOP_TREE (id->fns);
264
265 /* All automatic variables should have a DECL_CONTEXT indicating
266 what function they come from. */
267 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
268 && DECL_NAMESPACE_SCOPE_P (*tp))
269 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
270 19991113);
271
272 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
273 GOTO_STMT with the RET_LABEL as its target. */
274 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
275 {
276 tree return_stmt = *tp;
277 tree goto_stmt;
278
279 /* Build the GOTO_STMT. */
280 goto_stmt = build_min_nt (GOTO_STMT, id->ret_label);
281 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
282
283 /* If we're returning something, just turn that into an
284 assignment into the equivalent of the original
285 RESULT_DECL. */
286 if (RETURN_EXPR (return_stmt))
287 {
288 *tp = build_min_nt (EXPR_STMT,
289 RETURN_EXPR (return_stmt));
290 /* And then jump to the end of the function. */
291 TREE_CHAIN (*tp) = goto_stmt;
292 }
293 /* If we're not returning anything just do the jump. */
294 else
295 *tp = goto_stmt;
296 }
297 /* Local variables and labels need to be replaced by equivalent
298 variables. We don't want to copy static variables; there's only
299 one of those, no matter how many times we inline the containing
300 function. */
301 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
302 {
303 tree new_decl;
304
305 /* Remap the declaration. */
306 new_decl = remap_decl (*tp, id);
307 my_friendly_assert (new_decl != NULL_TREE, 19991203);
308 /* Replace this variable with the copy. */
309 STRIP_TYPE_NOPS (new_decl);
310 *tp = new_decl;
311 }
312 else if (nonstatic_local_decl_p (*tp)
313 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
314 my_friendly_abort (0);
315 else if (TREE_CODE (*tp) == SAVE_EXPR)
316 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
317 walk_subtrees);
318 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
319 my_friendly_abort (19991113);
320 /* For a SCOPE_STMT, we must copy the associated block so that we
321 can write out debugging information for the inlined variables. */
322 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
323 copy_scope_stmt (tp, walk_subtrees, id);
324 /* Otherwise, just copy the node. Note that copy_tree_r already
325 knows not to copy VAR_DECLs, etc., so this is safe. */
326 else
327 {
328 copy_tree_r (tp, walk_subtrees, NULL);
329
330 /* The copied TARGET_EXPR has never been expanded, even if the
331 original node was expanded already. */
332 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
333 {
334 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
335 TREE_OPERAND (*tp, 3) = NULL_TREE;
336 }
337 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
338 result is no longer valid. */
339 else if (TREE_CODE (*tp) == CALL_EXPR)
340 CALL_EXPR_RTL (*tp) = NULL_RTX;
341 }
342
343 /* Keep iterating. */
344 return NULL_TREE;
345 }
346
347 /* Make a copy of the body of FN so that it can be inserted inline in
348 another function. */
349
350 static tree
351 copy_body (id)
352 inline_data *id;
353 {
354 tree body;
355
356 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
357 walk_tree (&body, copy_body_r, id);
358
359 return body;
360 }
361
362 /* Generate code to initialize the parameters of the function at the
363 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
364
365 static tree
366 initialize_inlined_parameters (id, args, fn)
367 inline_data *id;
368 tree args;
369 tree fn;
370 {
371 tree init_stmts;
372 tree parms;
373 tree a;
374 tree p;
375
376 /* Figure out what the parameters are. */
377 parms = DECL_ARGUMENTS (fn);
378
379 /* Start with no initializations whatsoever. */
380 init_stmts = NULL_TREE;
381
382 /* Loop through the parameter declarations, replacing each with an
383 equivalent VAR_DECL, appropriately initialized. */
384 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
385 {
386 tree init_stmt;
387 tree var;
388 tree value;
389
390 /* Find the initializer. */
391 value = TREE_VALUE (a);
392 /* If the parameter is never assigned to, we may not need to
393 create a new variable here at all. Instead, we may be able
394 to just use the argument value. */
395 if (TREE_READONLY (p) && !TREE_SIDE_EFFECTS (value))
396 {
397 /* Simplify the value, if possible. */
398 value = fold (decl_constant_value (value));
399
400 /* We can't risk substituting complex expressions. They
401 might contain variables that will be assigned to later.
402 Theoretically, we could check the expression to see if
403 all of the variables that determine its value are
404 read-only, but we don't bother. */
405 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
406 {
407 /* If this is a declaration, wrap it a NOP_EXPR so that
408 we don't try to put the VALUE on the list of
409 BLOCK_VARS. */
410 if (DECL_P (value))
411 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
412
413 splay_tree_insert (id->decl_map,
414 (splay_tree_key) p,
415 (splay_tree_value) value);
416 continue;
417 }
418 }
419
420 /* Make an equivalent VAR_DECL. */
421 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
422 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
423 that way, when the PARM_DECL is encountered, it will be
424 automatically replaced by the VAR_DECL. */
425 splay_tree_insert (id->decl_map,
426 (splay_tree_key) p,
427 (splay_tree_value) var);
428 /* Initialize this VAR_DECL from the equivalent argument. If
429 the argument is an object, created via a constructor or copy,
430 this will not result in an extra copy: the TARGET_EXPR
431 representing the argument will be bound to VAR, and the
432 object will be constructed in VAR. */
433 init_stmt = build_min_nt (EXPR_STMT,
434 build (INIT_EXPR, TREE_TYPE (p),
435 var, value));
436 /* Declare this new variable. Note that we do this *after* the
437 initialization because we are going to reverse all the
438 initialization statements below. */
439 TREE_CHAIN (init_stmt) = build_min_nt (DECL_STMT, var);
440 /* Add this initialization to the list. */
441 TREE_CHAIN (TREE_CHAIN (init_stmt)) = init_stmts;
442 init_stmts = init_stmt;
443 }
444
445 /* The initialization statements have been built up in reverse
446 order. Straighten them out now. */
447 return nreverse (init_stmts);
448 }
449
450 /* Declare a return variable to replace the RESULT_DECL for the
451 function we are calling. An appropriate DECL_STMT is returned.
452 The USE_STMT is filled in to contain a use of the declaration to
453 indicate the return value of the function. */
454
455 static tree
456 declare_return_variable (id, use_stmt)
457 struct inline_data *id;
458 tree *use_stmt;
459 {
460 tree fn = VARRAY_TOP_TREE (id->fns);
461 tree result = DECL_RESULT (fn);
462 tree var;
463
464 /* We don't need to do anything for functions that don't return
465 anything. */
466 if (!result || same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (result)),
467 void_type_node))
468 {
469 *use_stmt = NULL_TREE;
470 return NULL_TREE;
471 }
472
473 /* Make an appropriate copy. */
474 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
475 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
476 way, when the RESULT_DECL is encountered, it will be
477 automatically replaced by the VAR_DECL. */
478 splay_tree_insert (id->decl_map,
479 (splay_tree_key) result,
480 (splay_tree_value) var);
481
482 /* Build the USE_STMT. */
483 *use_stmt = build_min_nt (EXPR_STMT, var);
484
485 /* Build the declaration statement. */
486 return build_min_nt (DECL_STMT, var);
487 }
488
489 /* Returns non-zero if FN is a function that can be inlined. */
490
491 static int
492 inlinable_function_p (fn, id)
493 tree fn;
494 inline_data *id;
495 {
496 int inlinable;
497
498 /* If we've already decided this function shouldn't be inlined,
499 there's no need to check again. */
500 if (DECL_UNINLINABLE (fn))
501 return 0;
502
503 /* Assume it is not inlinable. */
504 inlinable = 0;
505
506 /* If we're not inlining things, then nothing is inlinable. */
507 if (!flag_inline_trees)
508 ;
509 /* If the function was not declared `inline', then we don't inline
510 it. */
511 else if (!DECL_INLINE (fn))
512 ;
513 /* We can't inline varargs functions. */
514 else if (varargs_function_p (fn))
515 ;
516 /* All is well. We can inline this function. Traditionally, GCC
517 has refused to inline functions using alloca, or functions whose
518 values are returned in a PARALLEL, and a few other such obscure
519 conditions. We are not equally constrained at the tree level. */
520 else
521 inlinable = 1;
522
523 /* Squirrel away the result so that we don't have to check again. */
524 DECL_UNINLINABLE (fn) = !inlinable;
525
526 /* We can inline a template instantiation only if it's fully
527 instantiated. */
528 if (inlinable
529 && DECL_TEMPLATE_INFO (fn)
530 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
531 {
532 fn = instantiate_decl (fn, /*defer_ok=*/0);
533 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
534 }
535
536 /* If we don't have the function body available, we can't inline
537 it. */
538 if (!DECL_SAVED_TREE (fn))
539 inlinable = 0;
540
541 /* Don't do recursive inlining, either. We don't record this in
542 DECL_UNLINABLE; we may be able to inline this function later. */
543 if (inlinable)
544 {
545 size_t i;
546
547 for (i = 0; i < id->fns->elements_used; ++i)
548 if (VARRAY_TREE (id->fns, i) == fn)
549 inlinable = 0;
550 }
551
552 /* Return the result. */
553 return inlinable;
554 }
555
556 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
557
558 static tree
559 expand_call_inline (tp, walk_subtrees, data)
560 tree *tp;
561 int *walk_subtrees;
562 void *data;
563 {
564 inline_data *id;
565 tree t;
566 tree expr;
567 tree chain;
568 tree fn;
569 tree scope_stmt;
570 tree use_stmt;
571 tree arg_inits;
572 splay_tree st;
573
574 /* See what we've got. */
575 id = (inline_data *) data;
576 t = *tp;
577
578 /* Recurse, but letting recursive invocations know that we are
579 inside the body of a TARGET_EXPR. */
580 if (TREE_CODE (*tp) == TARGET_EXPR)
581 {
582 int i, len = first_rtl_op (TARGET_EXPR);
583
584 /* We're walking our own subtrees. */
585 *walk_subtrees = 0;
586
587 /* Actually walk over them. This loop is the body of
588 walk_trees, omitting the case where the TARGET_EXPR
589 itself is handled. */
590 for (i = 0; i < len; ++i)
591 {
592 if (i == 2)
593 ++id->in_target_cleanup_p;
594 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data);
595 if (i == 2)
596 --id->in_target_cleanup_p;
597 }
598
599 return NULL_TREE;
600 }
601
602 /* From here on, we're only interested in CALL_EXPRs. */
603 if (TREE_CODE (t) != CALL_EXPR)
604 return NULL_TREE;
605
606 /* First, see if we can figure out what function is being called.
607 If we cannot, then there is no hope of inlining the function. */
608 fn = get_callee_fndecl (t);
609 if (!fn)
610 return NULL_TREE;
611
612 /* Don't try to inline functions that are not well-suited to
613 inlining. */
614 if (!inlinable_function_p (fn, id))
615 return NULL_TREE;
616
617 /* Set the current filename and line number to the function we are
618 inlining so that when we create new _STMT nodes here they get
619 line numbers corresponding to the function we are calling. We
620 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
621 because individual statements don't record the filename. */
622 push_srcloc (fn->decl.filename, fn->decl.linenum);
623
624 /* Build a statement-expression containing code to initialize the
625 arguments, the actual inline expansion of the body, and a label
626 for the return statements within the function to jump to. The
627 type of the statement expression is the return type of the
628 function call. */
629 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
630
631 /* Local declarations will be replaced by their equivalents in this
632 map. */
633 st = id->decl_map;
634 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
635 NULL, NULL);
636
637 /* Initialize the parameters. */
638 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
639 /* Expand any inlined calls in the initializers. Do this before we
640 push FN on the stack of functions we are inlining; we want to
641 inline calls to FN that appear in the initializers for the
642 parameters. */
643 expand_calls_inline (&arg_inits, id);
644 /* And add them to the tree. */
645 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
646
647 /* Record the function we are about to inline so that we can avoid
648 recursing into it. */
649 VARRAY_PUSH_TREE (id->fns, fn);
650
651 /* Return statements in the function body will be replaced by jumps
652 to the RET_LABEL. */
653 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
654 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
655
656 /* Create a block to put the parameters in. We have to do this
657 after the parameters have been remapped because remapping
658 parameters is different from remapping ordinary variables. */
659 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
660 SCOPE_BEGIN_P (scope_stmt) = 1;
661 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
662 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
663 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
664 STMT_EXPR_STMT (expr) = scope_stmt;
665
666 /* Tell the debugging backends that this block represents the
667 outermost scope of the inlined function. */
668 if (SCOPE_STMT_BLOCK (scope_stmt))
669 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
670
671 /* Declare the return variable for the function. */
672 STMT_EXPR_STMT (expr)
673 = chainon (STMT_EXPR_STMT (expr),
674 declare_return_variable (id, &use_stmt));
675
676 /* After we've initialized the parameters, we insert the body of the
677 function itself. */
678 STMT_EXPR_STMT (expr)
679 = chainon (STMT_EXPR_STMT (expr), copy_body (id));
680
681 /* Close the block for the parameters. */
682 scope_stmt = build_min_nt (SCOPE_STMT, DECL_INITIAL (fn));
683 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
684 my_friendly_assert (DECL_INITIAL (fn)
685 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
686 19991203);
687 remap_block (scope_stmt, NULL_TREE, id);
688 STMT_EXPR_STMT (expr)
689 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
690
691 /* After the body of the function comes the RET_LABEL. This must come
692 before we evaluate the returned value below, because that evalulation
693 may cause RTL to be generated. */
694 STMT_EXPR_STMT (expr)
695 = chainon (STMT_EXPR_STMT (expr),
696 build_min_nt (LABEL_STMT, id->ret_label));
697
698 /* Finally, mention the returned value so that the value of the
699 statement-expression is the returned value of the function. */
700 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
701
702 /* Clean up. */
703 splay_tree_delete (id->decl_map);
704 id->decl_map = st;
705
706 /* The new expression has side-effects if the old one did. */
707 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
708
709 /* Replace the call by the inlined body. Wrap it in an
710 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
711 pointing to the right place. */
712 chain = TREE_CHAIN (*tp);
713 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
714 /*col=*/0);
715 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
716 TREE_CHAIN (*tp) = chain;
717 pop_srcloc ();
718
719 /* If the value of the new expression is ignored, that's OK. We
720 don't warn about this for CALL_EXPRs, so we shouldn't warn about
721 the equivalent inlined version either. */
722 TREE_USED (*tp) = 1;
723
724 /* Recurse into the body of the just inlined function. */
725 expand_calls_inline (tp, id);
726 VARRAY_POP (id->fns);
727
728 /* Don't walk into subtrees. We've already handled them above. */
729 *walk_subtrees = 0;
730
731 /* Keep iterating. */
732 return NULL_TREE;
733 }
734
735 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
736 expansions as appropriate. */
737
738 static void
739 expand_calls_inline (tp, id)
740 tree *tp;
741 inline_data *id;
742 {
743 /* Search through *TP, replacing all calls to inline functions by
744 appropriate equivalents. */
745 walk_tree (tp, expand_call_inline, id);
746 }
747
748 /* Optimize the body of FN. */
749
750 void
751 optimize_function (fn)
752 tree fn;
753 {
754 /* Expand calls to inline functions. */
755 if (flag_inline_trees)
756 {
757 inline_data id;
758 tree prev_fn;
759 struct saved_scope *s;
760
761 /* Clear out ID. */
762 memset (&id, 0, sizeof (id));
763
764 /* Don't allow recursion into FN. */
765 VARRAY_TREE_INIT (id.fns, 32, "fns");
766 VARRAY_PUSH_TREE (id.fns, fn);
767 /* Or any functions that aren't finished yet. */
768 prev_fn = NULL_TREE;
769 if (current_function_decl)
770 {
771 VARRAY_PUSH_TREE (id.fns, current_function_decl);
772 prev_fn = current_function_decl;
773 }
774 for (s = scope_chain; s; s = s->prev)
775 if (s->function_decl && s->function_decl != prev_fn)
776 {
777 VARRAY_PUSH_TREE (id.fns, s->function_decl);
778 prev_fn = s->function_decl;
779 }
780
781 /* Replace all calls to inline functions with the bodies of those
782 functions. */
783 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
784
785 /* Clean up. */
786 VARRAY_FREE (id.fns);
787 }
788 }
789
790 /* Called from calls_setjmp_p via walk_tree. */
791
792 static tree
793 calls_setjmp_r (tp, walk_subtrees, data)
794 tree *tp;
795 int *walk_subtrees ATTRIBUTE_UNUSED;
796 void *data ATTRIBUTE_UNUSED;
797 {
798 /* We're only interested in FUNCTION_DECLS. */
799 if (TREE_CODE (*tp) != FUNCTION_DECL)
800 return NULL_TREE;
801
802 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
803 }
804
805 /* Returns non-zero if FN calls `setjmp' or some other function that
806 can return more than once. This function is conservative; it may
807 occasionally return a non-zero value even when FN does not actually
808 call `setjmp'. */
809
810 int
811 calls_setjmp_p (fn)
812 tree fn;
813 {
814 return (walk_tree (&DECL_SAVED_TREE (fn), calls_setjmp_r, NULL)
815 != NULL_TREE);
816 }
817
818 /* FN is a function that has a complete body. Clone the body as
819 necessary. Returns non-zero if there's no longer any need to
820 process the main body. */
821
822 int
823 maybe_clone_body (fn)
824 tree fn;
825 {
826 inline_data id;
827 tree clone;
828
829 /* We don't clone constructors and destructors under the old ABI. */
830 if (!flag_new_abi)
831 return 0;
832
833 /* We only clone constructors and destructors. */
834 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
835 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
836 return 0;
837
838 /* We know that any clones immediately follow FN in the TYPE_METHODS
839 list. */
840 for (clone = TREE_CHAIN (fn);
841 clone && DECL_CLONED_FUNCTION_P (clone);
842 clone = TREE_CHAIN (clone))
843 {
844 tree parm;
845 tree clone_parm;
846 int parmno;
847
848 /* Update CLONE's source position information to match FN's. */
849 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
850 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
851
852 /* Start processing the function. */
853 push_to_top_level ();
854 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
855 store_parm_decls ();
856
857 /* Just clone the body, as if we were making an inline call.
858 But, remap the parameters in the callee to the parameters of
859 caller. If there's an in-charge parameter, map it to an
860 appropriate constant. */
861 memset (&id, 0, sizeof (id));
862 VARRAY_TREE_INIT (id.fns, 2, "fns");
863 VARRAY_PUSH_TREE (id.fns, clone);
864 VARRAY_PUSH_TREE (id.fns, fn);
865
866 /* Remap the parameters. */
867 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
868 NULL, NULL);
869 for (parmno = 0,
870 parm = DECL_ARGUMENTS (fn),
871 clone_parm = DECL_ARGUMENTS (clone);
872 parm;
873 ++parmno,
874 parm = TREE_CHAIN (parm))
875 {
876 /* Map the in-charge parameter to an appropriate constant. */
877 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
878 {
879 tree in_charge;
880 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
881 splay_tree_insert (id.decl_map,
882 (splay_tree_key) parm,
883 (splay_tree_key) in_charge);
884 }
885 /* Map other parameters to their equivalents in the cloned
886 function. */
887 else
888 {
889 splay_tree_insert (id.decl_map,
890 (splay_tree_key) parm,
891 (splay_tree_value) clone_parm);
892 clone_parm = TREE_CHAIN (clone_parm);
893 }
894 }
895
896 /* Actually copy the body. */
897 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
898
899 /* Clean up. */
900 splay_tree_delete (id.decl_map);
901 VARRAY_FREE (id.fns);
902
903 /* Now, expand this function into RTL, if appropriate. */
904 current_function_name_declared = 1;
905 expand_body (finish_function (0));
906 pop_from_top_level ();
907 }
908
909 /* We don't need to process the original function any further. */
910 return 1;
911 }