c-objc-common.c (c_cannot_inline_tree_fn): Warn on why function is not inlinable...
[gcc.git] / gcc / tree-inline.c
1 /* Control and data flow functions for trees.
2 Copyright 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41
42 /* This should be eventually be generalized to other languages, but
43 this would require a shared function-as-trees infrastructure. */
44 #ifndef INLINER_FOR_JAVA
45 #include "c-common.h"
46 #else /* INLINER_FOR_JAVA */
47 #include "parse.h"
48 #include "java-tree.h"
49 #endif /* INLINER_FOR_JAVA */
50
51 /* 0 if we should not perform inlining.
52 1 if we should expand functions calls inline at the tree level.
53 2 if we should consider *all* functions to be inline
54 candidates. */
55
56 int flag_inline_trees = 0;
57
58 /* To Do:
59
60 o In order to make inlining-on-trees work, we pessimized
61 function-local static constants. In particular, they are now
62 always output, even when not addressed. Fix this by treating
63 function-local static constants just like global static
64 constants; the back-end already knows not to output them if they
65 are not needed.
66
67 o Provide heuristics to clamp inlining of recursive template
68 calls? */
69
70 /* Data required for function inlining. */
71
72 typedef struct inline_data
73 {
74 /* A stack of the functions we are inlining. For example, if we are
75 compiling `f', which calls `g', which calls `h', and we are
76 inlining the body of `h', the stack will contain, `h', followed
77 by `g', followed by `f'. The first few elements of the stack may
78 contain other functions that we know we should not recurse into,
79 even though they are not directly being inlined. */
80 varray_type fns;
81 /* The index of the first element of FNS that really represents an
82 inlined function. */
83 unsigned first_inlined_fn;
84 /* The label to jump to when a return statement is encountered. If
85 this value is NULL, then return statements will simply be
86 remapped as return statements, rather than as jumps. */
87 tree ret_label;
88 /* The map from local declarations in the inlined function to
89 equivalents in the function into which it is being inlined. */
90 splay_tree decl_map;
91 /* Nonzero if we are currently within the cleanup for a
92 TARGET_EXPR. */
93 int in_target_cleanup_p;
94 /* A list of the functions current function has inlined. */
95 varray_type inlined_fns;
96 /* The approximate number of instructions we have inlined in the
97 current call stack. */
98 int inlined_insns;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
103 bool cloning_p;
104 /* Hash table used to prevent walk_tree from visiting the same node
105 umpteen million times. */
106 htab_t tree_pruner;
107 /* Decl of function we are inlining into. */
108 tree decl;
109 tree current_decl;
110 } inline_data;
111
112 /* Prototypes. */
113
114 static tree declare_return_variable (inline_data *, tree, tree *);
115 static tree copy_body_r (tree *, int *, void *);
116 static tree copy_body (inline_data *);
117 static tree expand_call_inline (tree *, int *, void *);
118 static void expand_calls_inline (tree *, inline_data *);
119 static bool inlinable_function_p (tree);
120 static int limits_allow_inlining (tree, inline_data *);
121 static tree remap_decl (tree, inline_data *);
122 #ifndef INLINER_FOR_JAVA
123 static tree initialize_inlined_parameters (inline_data *, tree, tree);
124 static void remap_block (tree, tree, inline_data *);
125 static void copy_scope_stmt (tree *, int *, inline_data *);
126 #else /* INLINER_FOR_JAVA */
127 static tree initialize_inlined_parameters (inline_data *, tree, tree, tree);
128 static void remap_block (tree *, tree, inline_data *);
129 static tree add_stmt_to_compound (tree, tree, tree);
130 #endif /* INLINER_FOR_JAVA */
131
132 /* Remap DECL during the copying of the BLOCK tree for the function. */
133
134 static tree
135 remap_decl (tree decl, inline_data *id)
136 {
137 splay_tree_node n;
138 tree fn;
139
140 /* We only remap local variables in the current function. */
141 fn = VARRAY_TOP_TREE (id->fns);
142 if (! (*lang_hooks.tree_inlining.auto_var_in_fn_p) (decl, fn))
143 return NULL_TREE;
144
145 /* See if we have remapped this declaration. */
146 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
147 /* If we didn't already have an equivalent for this declaration,
148 create one now. */
149 if (!n)
150 {
151 tree t;
152
153 /* Make a copy of the variable or label. */
154 t = copy_decl_for_inlining (decl, fn,
155 VARRAY_TREE (id->fns, 0));
156
157 /* The decl T could be a dynamic array or other variable size type,
158 in which case some fields need to be remapped because they may
159 contain SAVE_EXPRs. */
160 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
161 && TYPE_DOMAIN (TREE_TYPE (t)))
162 {
163 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
164 TYPE_DOMAIN (TREE_TYPE (t))
165 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
166 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
167 copy_body_r, id, NULL);
168 }
169
170 #ifndef INLINER_FOR_JAVA
171 if (! DECL_NAME (t) && TREE_TYPE (t)
172 && (*lang_hooks.tree_inlining.anon_aggr_type_p) (TREE_TYPE (t)))
173 {
174 /* For a VAR_DECL of anonymous type, we must also copy the
175 member VAR_DECLS here and rechain the
176 DECL_ANON_UNION_ELEMS. */
177 tree members = NULL;
178 tree src;
179
180 for (src = DECL_ANON_UNION_ELEMS (t); src;
181 src = TREE_CHAIN (src))
182 {
183 tree member = remap_decl (TREE_VALUE (src), id);
184
185 if (TREE_PURPOSE (src))
186 abort ();
187 members = tree_cons (NULL, member, members);
188 }
189 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
190 }
191 #endif /* not INLINER_FOR_JAVA */
192
193 /* Remember it, so that if we encounter this local entity
194 again we can reuse this copy. */
195 n = splay_tree_insert (id->decl_map,
196 (splay_tree_key) decl,
197 (splay_tree_value) t);
198 }
199
200 return (tree) n->value;
201 }
202
203 #ifndef INLINER_FOR_JAVA
204 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
205 remapped versions of the variables therein. And hook the new block
206 into the block-tree. If non-NULL, the DECLS are declarations to
207 add to use instead of the BLOCK_VARS in the old block. */
208 #else /* INLINER_FOR_JAVA */
209 /* Copy the BLOCK to contain remapped versions of the variables
210 therein. And hook the new block into the block-tree. */
211 #endif /* INLINER_FOR_JAVA */
212
213 static void
214 #ifndef INLINER_FOR_JAVA
215 remap_block (tree scope_stmt, tree decls, inline_data *id)
216 #else /* INLINER_FOR_JAVA */
217 remap_block (tree *block, tree decls, inline_data *id)
218 #endif /* INLINER_FOR_JAVA */
219 {
220 #ifndef INLINER_FOR_JAVA
221 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
222 not know whether or not expand_expr will actually write out the
223 code we put there. If it does not, then we'll have more BLOCKs
224 than block-notes, and things will go awry. At some point, we
225 should make the back-end handle BLOCK notes in a tidier way,
226 without requiring a strict correspondence to the block-tree; then
227 this check can go. */
228 if (id->in_target_cleanup_p)
229 {
230 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
231 return;
232 }
233
234 /* If this is the beginning of a scope, remap the associated BLOCK. */
235 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
236 {
237 tree old_block;
238 tree new_block;
239 tree old_var;
240 tree fn;
241
242 /* Make the new block. */
243 old_block = SCOPE_STMT_BLOCK (scope_stmt);
244 new_block = make_node (BLOCK);
245 TREE_USED (new_block) = TREE_USED (old_block);
246 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
247 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
248
249 /* Remap its variables. */
250 for (old_var = decls ? decls : BLOCK_VARS (old_block);
251 old_var;
252 old_var = TREE_CHAIN (old_var))
253 {
254 tree new_var;
255
256 /* Remap the variable. */
257 new_var = remap_decl (old_var, id);
258 /* If we didn't remap this variable, so we can't mess with
259 its TREE_CHAIN. If we remapped this variable to
260 something other than a declaration (say, if we mapped it
261 to a constant), then we must similarly omit any mention
262 of it here. */
263 if (!new_var || !DECL_P (new_var))
264 ;
265 else
266 {
267 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
268 BLOCK_VARS (new_block) = new_var;
269 }
270 }
271 /* We put the BLOCK_VARS in reverse order; fix that now. */
272 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
273 fn = VARRAY_TREE (id->fns, 0);
274 if (id->cloning_p)
275 /* We're building a clone; DECL_INITIAL is still
276 error_mark_node, and current_binding_level is the parm
277 binding level. */
278 (*lang_hooks.decls.insert_block) (new_block);
279 else
280 {
281 /* Attach this new block after the DECL_INITIAL block for the
282 function into which this block is being inlined. In
283 rest_of_compilation we will straighten out the BLOCK tree. */
284 tree *first_block;
285 if (DECL_INITIAL (fn))
286 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
287 else
288 first_block = &DECL_INITIAL (fn);
289 BLOCK_CHAIN (new_block) = *first_block;
290 *first_block = new_block;
291 }
292 /* Remember the remapped block. */
293 splay_tree_insert (id->decl_map,
294 (splay_tree_key) old_block,
295 (splay_tree_value) new_block);
296 }
297 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
298 remapped block. */
299 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
300 {
301 splay_tree_node n;
302
303 /* Find this block in the table of remapped things. */
304 n = splay_tree_lookup (id->decl_map,
305 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
306 if (! n)
307 abort ();
308 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
309 }
310 #else /* INLINER_FOR_JAVA */
311 tree old_block;
312 tree new_block;
313 tree old_var;
314 tree fn;
315
316 /* Make the new block. */
317 old_block = *block;
318 new_block = make_node (BLOCK);
319 TREE_USED (new_block) = TREE_USED (old_block);
320 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
321 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (old_block);
322 TREE_SIDE_EFFECTS (new_block) = TREE_SIDE_EFFECTS (old_block);
323 TREE_TYPE (new_block) = TREE_TYPE (old_block);
324 *block = new_block;
325
326 /* Remap its variables. */
327 for (old_var = decls ? decls : BLOCK_VARS (old_block);
328 old_var;
329 old_var = TREE_CHAIN (old_var))
330 {
331 tree new_var;
332
333 /* All local class initialization flags go in the outermost
334 scope. */
335 if (LOCAL_CLASS_INITIALIZATION_FLAG_P (old_var))
336 {
337 /* We may already have one. */
338 if (! splay_tree_lookup (id->decl_map, (splay_tree_key) old_var))
339 {
340 tree outermost_block;
341 new_var = remap_decl (old_var, id);
342 DECL_ABSTRACT_ORIGIN (new_var) = NULL;
343 outermost_block = DECL_SAVED_TREE (current_function_decl);
344 TREE_CHAIN (new_var) = BLOCK_VARS (outermost_block);
345 BLOCK_VARS (outermost_block) = new_var;
346 }
347 continue;
348 }
349
350 /* Remap the variable. */
351 new_var = remap_decl (old_var, id);
352 /* If we didn't remap this variable, so we can't mess with
353 its TREE_CHAIN. If we remapped this variable to
354 something other than a declaration (say, if we mapped it
355 to a constant), then we must similarly omit any mention
356 of it here. */
357 if (!new_var || !DECL_P (new_var))
358 ;
359 else
360 {
361 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
362 BLOCK_VARS (new_block) = new_var;
363 }
364 }
365 /* We put the BLOCK_VARS in reverse order; fix that now. */
366 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
367 fn = VARRAY_TREE (id->fns, 0);
368 /* Remember the remapped block. */
369 splay_tree_insert (id->decl_map,
370 (splay_tree_key) old_block,
371 (splay_tree_value) new_block);
372 #endif /* INLINER_FOR_JAVA */
373 }
374
375 #ifndef INLINER_FOR_JAVA
376 /* Copy the SCOPE_STMT pointed to by TP. */
377
378 static void
379 copy_scope_stmt (tree *tp, int *walk_subtrees, inline_data *id)
380 {
381 tree block;
382
383 /* Remember whether or not this statement was nullified. When
384 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
385 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
386 deal with copying BLOCKs if they do not wish to do so. */
387 block = SCOPE_STMT_BLOCK (*tp);
388 /* Copy (and replace) the statement. */
389 copy_tree_r (tp, walk_subtrees, NULL);
390 /* Restore the SCOPE_STMT_BLOCK. */
391 SCOPE_STMT_BLOCK (*tp) = block;
392
393 /* Remap the associated block. */
394 remap_block (*tp, NULL_TREE, id);
395 }
396 #endif /* not INLINER_FOR_JAVA */
397
398 /* Called from copy_body via walk_tree. DATA is really an
399 `inline_data *'. */
400 static tree
401 copy_body_r (tree *tp, int *walk_subtrees, void *data)
402 {
403 inline_data* id;
404 tree fn;
405
406 /* Set up. */
407 id = (inline_data *) data;
408 fn = VARRAY_TOP_TREE (id->fns);
409
410 #if 0
411 /* All automatic variables should have a DECL_CONTEXT indicating
412 what function they come from. */
413 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
414 && DECL_NAMESPACE_SCOPE_P (*tp))
415 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
416 abort ();
417 #endif
418
419 #ifdef INLINER_FOR_JAVA
420 if (TREE_CODE (*tp) == BLOCK)
421 remap_block (tp, NULL_TREE, id);
422 #endif
423
424 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
425 GOTO_STMT with the RET_LABEL as its target. */
426 #ifndef INLINER_FOR_JAVA
427 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
428 #else /* INLINER_FOR_JAVA */
429 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
430 #endif /* INLINER_FOR_JAVA */
431 {
432 tree return_stmt = *tp;
433 tree goto_stmt;
434
435 /* Build the GOTO_STMT. */
436 #ifndef INLINER_FOR_JAVA
437 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
438 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
439 GOTO_FAKE_P (goto_stmt) = 1;
440 #else /* INLINER_FOR_JAVA */
441 tree assignment = TREE_OPERAND (return_stmt, 0);
442 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
443 TREE_SIDE_EFFECTS (goto_stmt) = 1;
444 #endif /* INLINER_FOR_JAVA */
445
446 /* If we're returning something, just turn that into an
447 assignment into the equivalent of the original
448 RESULT_DECL. */
449 #ifndef INLINER_FOR_JAVA
450 if (RETURN_STMT_EXPR (return_stmt))
451 {
452 *tp = build_stmt (EXPR_STMT,
453 RETURN_STMT_EXPR (return_stmt));
454 STMT_IS_FULL_EXPR_P (*tp) = 1;
455 /* And then jump to the end of the function. */
456 TREE_CHAIN (*tp) = goto_stmt;
457 }
458 #else /* INLINER_FOR_JAVA */
459 if (assignment)
460 {
461 copy_body_r (&assignment, walk_subtrees, data);
462 *tp = build (COMPOUND_EXPR, void_type_node, assignment, goto_stmt);
463 TREE_SIDE_EFFECTS (*tp) = 1;
464 }
465 #endif /* INLINER_FOR_JAVA */
466 /* If we're not returning anything just do the jump. */
467 else
468 *tp = goto_stmt;
469 }
470 /* Local variables and labels need to be replaced by equivalent
471 variables. We don't want to copy static variables; there's only
472 one of those, no matter how many times we inline the containing
473 function. */
474 else if ((*lang_hooks.tree_inlining.auto_var_in_fn_p) (*tp, fn))
475 {
476 tree new_decl;
477
478 /* Remap the declaration. */
479 new_decl = remap_decl (*tp, id);
480 if (! new_decl)
481 abort ();
482 /* Replace this variable with the copy. */
483 STRIP_TYPE_NOPS (new_decl);
484 *tp = new_decl;
485 }
486 #if 0
487 else if (nonstatic_local_decl_p (*tp)
488 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
489 abort ();
490 #endif
491 else if (TREE_CODE (*tp) == SAVE_EXPR)
492 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
493 walk_subtrees);
494 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
495 /* UNSAVE_EXPRs should not be generated until expansion time. */
496 abort ();
497 #ifndef INLINER_FOR_JAVA
498 /* For a SCOPE_STMT, we must copy the associated block so that we
499 can write out debugging information for the inlined variables. */
500 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
501 copy_scope_stmt (tp, walk_subtrees, id);
502 #else /* INLINER_FOR_JAVA */
503 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
504 {
505 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
506 will refer to it, so save a copy ready for remapping. We
507 save it in the decl_map, although it isn't a decl. */
508 tree new_block = copy_node (*tp);
509 splay_tree_insert (id->decl_map,
510 (splay_tree_key) *tp,
511 (splay_tree_value) new_block);
512 *tp = new_block;
513 }
514 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
515 {
516 splay_tree_node n
517 = splay_tree_lookup (id->decl_map,
518 (splay_tree_key) TREE_OPERAND (*tp, 0));
519 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
520 if (! n)
521 abort ();
522 *tp = copy_node (*tp);
523 TREE_OPERAND (*tp, 0) = (tree) n->value;
524 }
525 #endif /* INLINER_FOR_JAVA */
526 /* Otherwise, just copy the node. Note that copy_tree_r already
527 knows not to copy VAR_DECLs, etc., so this is safe. */
528 else
529 {
530 if (TREE_CODE (*tp) == MODIFY_EXPR
531 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
532 && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
533 (TREE_OPERAND (*tp, 0), fn)))
534 {
535 /* Some assignments VAR = VAR; don't generate any rtl code
536 and thus don't count as variable modification. Avoid
537 keeping bogosities like 0 = 0. */
538 tree decl = TREE_OPERAND (*tp, 0), value;
539 splay_tree_node n;
540
541 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
542 if (n)
543 {
544 value = (tree) n->value;
545 STRIP_TYPE_NOPS (value);
546 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
547 {
548 *tp = value;
549 return copy_body_r (tp, walk_subtrees, data);
550 }
551 }
552 }
553 else if (TREE_CODE (*tp) == ADDR_EXPR
554 && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
555 (TREE_OPERAND (*tp, 0), fn)))
556 {
557 /* Get rid of &* from inline substitutions. It can occur when
558 someone takes the address of a parm or return slot passed by
559 invisible reference. */
560 tree decl = TREE_OPERAND (*tp, 0), value;
561 splay_tree_node n;
562
563 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
564 if (n)
565 {
566 value = (tree) n->value;
567 if (TREE_CODE (value) == INDIRECT_REF)
568 {
569 *tp = convert (TREE_TYPE (*tp), TREE_OPERAND (value, 0));
570 return copy_body_r (tp, walk_subtrees, data);
571 }
572 }
573 }
574
575 copy_tree_r (tp, walk_subtrees, NULL);
576
577 /* The copied TARGET_EXPR has never been expanded, even if the
578 original node was expanded already. */
579 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
580 {
581 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
582 TREE_OPERAND (*tp, 3) = NULL_TREE;
583 }
584 }
585
586 /* Keep iterating. */
587 return NULL_TREE;
588 }
589
590 /* Make a copy of the body of FN so that it can be inserted inline in
591 another function. */
592
593 static tree
594 copy_body (inline_data *id)
595 {
596 tree body;
597
598 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
599 walk_tree (&body, copy_body_r, id, NULL);
600
601 return body;
602 }
603
604 /* Generate code to initialize the parameters of the function at the
605 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
606
607 static tree
608 #ifndef INLINER_FOR_JAVA
609 initialize_inlined_parameters (inline_data *id, tree args, tree fn)
610 #else /* INLINER_FOR_JAVA */
611 initialize_inlined_parameters (inline_data *id, tree args, tree fn, tree block)
612 #endif /* INLINER_FOR_JAVA */
613 {
614 tree init_stmts;
615 tree parms;
616 tree a;
617 tree p;
618 #ifdef INLINER_FOR_JAVA
619 tree vars = NULL_TREE;
620 #endif /* INLINER_FOR_JAVA */
621
622 /* Figure out what the parameters are. */
623 parms = DECL_ARGUMENTS (fn);
624
625 /* Start with no initializations whatsoever. */
626 init_stmts = NULL_TREE;
627
628 /* Loop through the parameter declarations, replacing each with an
629 equivalent VAR_DECL, appropriately initialized. */
630 for (p = parms, a = args; p;
631 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
632 {
633 #ifndef INLINER_FOR_JAVA
634 tree init_stmt;
635 tree cleanup;
636 #endif /* not INLINER_FOR_JAVA */
637 tree var;
638 tree value;
639 tree var_sub;
640
641 /* Find the initializer. */
642 value = (*lang_hooks.tree_inlining.convert_parm_for_inlining)
643 (p, a ? TREE_VALUE (a) : NULL_TREE, fn);
644
645 /* If the parameter is never assigned to, we may not need to
646 create a new variable here at all. Instead, we may be able
647 to just use the argument value. */
648 if (TREE_READONLY (p)
649 && !TREE_ADDRESSABLE (p)
650 && value && !TREE_SIDE_EFFECTS (value))
651 {
652 /* Simplify the value, if possible. */
653 value = fold (DECL_P (value) ? decl_constant_value (value) : value);
654
655 /* We can't risk substituting complex expressions. They
656 might contain variables that will be assigned to later.
657 Theoretically, we could check the expression to see if
658 all of the variables that determine its value are
659 read-only, but we don't bother. */
660 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
661 {
662 /* If this is a declaration, wrap it a NOP_EXPR so that
663 we don't try to put the VALUE on the list of
664 BLOCK_VARS. */
665 if (DECL_P (value))
666 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
667
668 /* If this is a constant, make sure it has the right type. */
669 else if (TREE_TYPE (value) != TREE_TYPE (p))
670 value = fold (build1 (NOP_EXPR, TREE_TYPE (p), value));
671
672 splay_tree_insert (id->decl_map,
673 (splay_tree_key) p,
674 (splay_tree_value) value);
675 continue;
676 }
677 }
678
679 /* Make an equivalent VAR_DECL. */
680 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
681
682 /* See if the frontend wants to pass this by invisible reference. If
683 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
684 replace uses of the PARM_DECL with dereferences. */
685 if (TREE_TYPE (var) != TREE_TYPE (p)
686 && POINTER_TYPE_P (TREE_TYPE (var))
687 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
688 var_sub = build1 (INDIRECT_REF, TREE_TYPE (p), var);
689 else
690 var_sub = var;
691
692 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
693 that way, when the PARM_DECL is encountered, it will be
694 automatically replaced by the VAR_DECL. */
695 splay_tree_insert (id->decl_map,
696 (splay_tree_key) p,
697 (splay_tree_value) var_sub);
698
699 /* Declare this new variable. */
700 #ifndef INLINER_FOR_JAVA
701 init_stmt = build_stmt (DECL_STMT, var);
702 TREE_CHAIN (init_stmt) = init_stmts;
703 init_stmts = init_stmt;
704 #else /* INLINER_FOR_JAVA */
705 TREE_CHAIN (var) = vars;
706 vars = var;
707 #endif /* INLINER_FOR_JAVA */
708
709 /* Initialize this VAR_DECL from the equivalent argument. If
710 the argument is an object, created via a constructor or copy,
711 this will not result in an extra copy: the TARGET_EXPR
712 representing the argument will be bound to VAR, and the
713 object will be constructed in VAR. */
714 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
715 #ifndef INLINER_FOR_JAVA
716 DECL_INITIAL (var) = value;
717 else
718 {
719 /* Even if P was TREE_READONLY, the new VAR should not be.
720 In the original code, we would have constructed a
721 temporary, and then the function body would have never
722 changed the value of P. However, now, we will be
723 constructing VAR directly. The constructor body may
724 change its value multiple times as it is being
725 constructed. Therefore, it must not be TREE_READONLY;
726 the back-end assumes that TREE_READONLY variable is
727 assigned to only once. */
728 TREE_READONLY (var) = 0;
729
730 /* Build a run-time initialization. */
731 init_stmt = build_stmt (EXPR_STMT,
732 build (INIT_EXPR, TREE_TYPE (p),
733 var, value));
734 /* Add this initialization to the list. Note that we want the
735 declaration *after* the initialization because we are going
736 to reverse all the initialization statements below. */
737 TREE_CHAIN (init_stmt) = init_stmts;
738 init_stmts = init_stmt;
739 }
740
741 /* See if we need to clean up the declaration. */
742 cleanup = (*lang_hooks.maybe_build_cleanup) (var);
743 if (cleanup)
744 {
745 tree cleanup_stmt;
746 /* Build the cleanup statement. */
747 cleanup_stmt = build_stmt (CLEANUP_STMT, var, cleanup);
748 /* Add it to the *front* of the list; the list will be
749 reversed below. */
750 TREE_CHAIN (cleanup_stmt) = init_stmts;
751 init_stmts = cleanup_stmt;
752 }
753 #else /* INLINER_FOR_JAVA */
754 {
755 tree assignment = build (MODIFY_EXPR, TREE_TYPE (p), var, value);
756 init_stmts = add_stmt_to_compound (init_stmts, TREE_TYPE (p),
757 assignment);
758 }
759 else
760 {
761 /* Java objects don't ever need constructing when being
762 passed as arguments because only call by reference is
763 supported. */
764 abort ();
765 }
766 #endif /* INLINER_FOR_JAVA */
767 }
768
769 #ifndef INLINER_FOR_JAVA
770 /* Evaluate trailing arguments. */
771 for (; a; a = TREE_CHAIN (a))
772 {
773 tree init_stmt;
774 tree value = TREE_VALUE (a);
775
776 if (! value || ! TREE_SIDE_EFFECTS (value))
777 continue;
778
779 init_stmt = build_stmt (EXPR_STMT, value);
780 TREE_CHAIN (init_stmt) = init_stmts;
781 init_stmts = init_stmt;
782 }
783
784 /* The initialization statements have been built up in reverse
785 order. Straighten them out now. */
786 return nreverse (init_stmts);
787 #else /* INLINER_FOR_JAVA */
788 BLOCK_VARS (block) = nreverse (vars);
789 return init_stmts;
790 #endif /* INLINER_FOR_JAVA */
791 }
792
793 /* Declare a return variable to replace the RESULT_DECL for the
794 function we are calling. An appropriate DECL_STMT is returned.
795 The USE_STMT is filled in to contain a use of the declaration to
796 indicate the return value of the function. */
797
798 #ifndef INLINER_FOR_JAVA
799 static tree
800 declare_return_variable (struct inline_data *id, tree return_slot_addr,
801 tree *use_stmt)
802 #else /* INLINER_FOR_JAVA */
803 static tree
804 declare_return_variable (struct inline_data *id, tree return_slot_addr,
805 tree *var)
806 #endif /* INLINER_FOR_JAVA */
807 {
808 tree fn = VARRAY_TOP_TREE (id->fns);
809 tree result = DECL_RESULT (fn);
810 #ifndef INLINER_FOR_JAVA
811 tree var;
812 #endif /* not INLINER_FOR_JAVA */
813 int need_return_decl = 1;
814
815 /* We don't need to do anything for functions that don't return
816 anything. */
817 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
818 {
819 #ifndef INLINER_FOR_JAVA
820 *use_stmt = NULL_TREE;
821 #else /* INLINER_FOR_JAVA */
822 *var = NULL_TREE;
823 #endif /* INLINER_FOR_JAVA */
824 return NULL_TREE;
825 }
826
827 #ifndef INLINER_FOR_JAVA
828 var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
829 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
830 &need_return_decl, return_slot_addr));
831
832 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
833 way, when the RESULT_DECL is encountered, it will be
834 automatically replaced by the VAR_DECL. */
835 splay_tree_insert (id->decl_map,
836 (splay_tree_key) result,
837 (splay_tree_value) var);
838
839 /* Build the USE_STMT. If the return type of the function was
840 promoted, convert it back to the expected type. */
841 if (TREE_TYPE (var) == TREE_TYPE (TREE_TYPE (fn)))
842 *use_stmt = build_stmt (EXPR_STMT, var);
843 else
844 *use_stmt = build_stmt (EXPR_STMT,
845 build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)),
846 var));
847 TREE_ADDRESSABLE (*use_stmt) = 1;
848
849 /* Build the declaration statement if FN does not return an
850 aggregate. */
851 if (need_return_decl)
852 return build_stmt (DECL_STMT, var);
853 #else /* INLINER_FOR_JAVA */
854 *var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
855 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
856 &need_return_decl, return_slot_addr));
857
858 splay_tree_insert (id->decl_map,
859 (splay_tree_key) result,
860 (splay_tree_value) *var);
861 DECL_IGNORED_P (*var) = 1;
862 if (need_return_decl)
863 return *var;
864 #endif /* INLINER_FOR_JAVA */
865 /* If FN does return an aggregate, there's no need to declare the
866 return variable; we're using a variable in our caller's frame. */
867 else
868 return NULL_TREE;
869 }
870
871 /* Returns nonzero if a function can be inlined as a tree. */
872
873 bool
874 tree_inlinable_function_p (tree fn)
875 {
876 return inlinable_function_p (fn);
877 }
878
879 static const char *inline_forbidden_reason;
880
881 static tree
882 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
883 void *fn)
884 {
885 tree node = *nodep;
886 tree t;
887
888 switch (TREE_CODE (node))
889 {
890 case CALL_EXPR:
891 /* Refuse to inline alloca call unless user explicitly forced so as this
892 may change program's memory overhead drastically when the function
893 using alloca is called in loop. In GCC present in SPEC2000 inlining
894 into schedule_block cause it to require 2GB of ram instead of 256MB. */
895 if (alloca_call_p (node)
896 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
897 {
898 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
899 "because it uses alloca (override using "
900 "the always_inline attribute)";
901 return node;
902 }
903 t = get_callee_fndecl (node);
904 if (! t)
905 break;
906
907
908 /* We cannot inline functions that call setjmp. */
909 if (setjmp_call_p (t))
910 {
911 inline_forbidden_reason = "%Hfunction '%F' can never be inlined"
912 " because it uses setjmp";
913 return node;
914 }
915
916 switch (DECL_FUNCTION_CODE (t))
917 {
918 /* We cannot inline functions that take a variable number of
919 arguments. */
920 case BUILT_IN_VA_START:
921 case BUILT_IN_STDARG_START:
922 {
923 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
924 "because it uses variable argument lists";
925 return node;
926 }
927 case BUILT_IN_LONGJMP:
928 {
929 /* We can't inline functions that call __builtin_longjmp at all.
930 The non-local goto machinery really requires the destination
931 be in a different function. If we allow the function calling
932 __builtin_longjmp to be inlined into the function calling
933 __builtin_setjmp, Things will Go Awry. */
934 /* ??? Need front end help to identify "regular" non-local goto. */
935 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
936 {
937 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
938 "because it uses setjmp-longjmp "
939 "exception handling";
940 return node;
941 }
942 }
943
944 default:
945 break;
946 }
947 break;
948
949 #ifndef INLINER_FOR_JAVA
950 case DECL_STMT:
951 /* We cannot inline functions that contain other functions. */
952 if (TREE_CODE (TREE_OPERAND (node, 0)) == FUNCTION_DECL
953 && DECL_INITIAL (TREE_OPERAND (node, 0)))
954 {
955 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
956 "because it contains a nested function";
957 return node;
958 }
959 break;
960
961 case GOTO_STMT:
962 case GOTO_EXPR:
963 t = TREE_OPERAND (node, 0);
964
965 /* We will not inline a function which uses computed goto. The
966 addresses of its local labels, which may be tucked into
967 global storage, are of course not constant across
968 instantiations, which causes unexpected behavior. */
969 if (TREE_CODE (t) != LABEL_DECL)
970 {
971 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
972 "because it contains a nonlocal label";
973 return node;
974 }
975
976 /* We cannot inline a nested function that jumps to a nonlocal
977 label. */
978 if (TREE_CODE (t) == LABEL_DECL && DECL_CONTEXT (t) != fn)
979 {
980 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
981 "because it contains a nonlocal goto";
982 return node;
983 }
984
985 break;
986
987 case RECORD_TYPE:
988 case UNION_TYPE:
989 /* We cannot inline a function of the form
990
991 void F (int i) { struct S { int ar[i]; } s; }
992
993 Attempting to do so produces a catch-22.
994 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
995 UNION_TYPE nodes, then it goes into infinite recursion on a
996 structure containing a pointer to its own type. If it doesn't,
997 then the type node for S doesn't get adjusted properly when
998 F is inlined, and we abort in find_function_data. */
999 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1000 if (variably_modified_type_p (TREE_TYPE (t)))
1001 {
1002 inline_forbidden_reason = "%Hfunction '%F' can never be inlined "
1003 "because it uses variable sized variables";
1004 return node;
1005 }
1006 #endif
1007 default:
1008 break;
1009 }
1010
1011 return NULL_TREE;
1012 }
1013
1014 /* Return subexpression representing possible alloca call, if any. */
1015 static tree
1016 inline_forbidden_p (tree fndecl)
1017 {
1018 location_t saved_loc = input_location;
1019 tree ret = walk_tree_without_duplicates
1020 (&DECL_SAVED_TREE (fndecl), inline_forbidden_p_1, fndecl);
1021 input_location = saved_loc;
1022 return ret;
1023 }
1024
1025 /* Returns nonzero if FN is a function that does not have any
1026 fundamental inline blocking properties. */
1027
1028 static bool
1029 inlinable_function_p (tree fn)
1030 {
1031 bool inlinable = true;
1032
1033 /* If we've already decided this function shouldn't be inlined,
1034 there's no need to check again. */
1035 if (DECL_UNINLINABLE (fn))
1036 return false;
1037
1038 /* See if there is any language-specific reason it cannot be
1039 inlined. (It is important that this hook be called early because
1040 in C++ it may result in template instantiation.)
1041 If the function is not inlinable for language-specific reasons,
1042 it is left up to the langhook to explain why. */
1043 inlinable = !(*lang_hooks.tree_inlining.cannot_inline_tree_fn) (&fn);
1044
1045 /* If we don't have the function body available, we can't inline it.
1046 However, this should not be recorded since we also get here for
1047 forward declared inline functions. Therefore, return at once. */
1048 if (!DECL_SAVED_TREE (fn))
1049 return false;
1050
1051 /* If we're not inlining at all, then we cannot inline this function. */
1052 else if (!flag_inline_trees)
1053 inlinable = false;
1054
1055 /* Only try to inline functions if DECL_INLINE is set. This should be
1056 true for all functions declared `inline', and for all other functions
1057 as well with -finline-functions.
1058
1059 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1060 it's the front-end that must set DECL_INLINE in this case, because
1061 dwarf2out loses if a function that does not have DECL_INLINE set is
1062 inlined anyway. That is why we have both DECL_INLINE and
1063 DECL_DECLARED_INLINE_P. */
1064 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1065 here should be redundant. */
1066 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1067 inlinable = false;
1068
1069 #ifdef INLINER_FOR_JAVA
1070 /* Synchronized methods can't be inlined. This is a bug. */
1071 else if (METHOD_SYNCHRONIZED (fn))
1072 inlinable = false;
1073 #endif /* INLINER_FOR_JAVA */
1074
1075 else if (inline_forbidden_p (fn))
1076 {
1077 /* See if we should warn about uninlinable functions. Previously,
1078 some of these warnings would be issued while trying to expand
1079 the function inline, but that would cause multiple warnings
1080 about functions that would for example call alloca. But since
1081 this a property of the function, just one warning is enough.
1082 As a bonus we can now give more details about the reason why a
1083 function is not inlinable.
1084 We only warn for functions declared `inline' by the user. */
1085 bool do_warning = (warn_inline
1086 && DECL_INLINE (fn)
1087 && DECL_DECLARED_INLINE_P (fn)
1088 && !DECL_IN_SYSTEM_HEADER (fn));
1089
1090 if (do_warning)
1091 warning (inline_forbidden_reason,
1092 &DECL_SOURCE_LOCATION (fn), fn);
1093
1094 inlinable = false;
1095 }
1096
1097 /* Squirrel away the result so that we don't have to check again. */
1098 DECL_UNINLINABLE (fn) = !inlinable;
1099
1100 return inlinable;
1101 }
1102
1103 /* We can't inline functions that are too big. Only allow a single
1104 function to be of MAX_INLINE_INSNS_SINGLE size. Make special
1105 allowance for extern inline functions, though.
1106
1107 Return nonzero if the function FN can be inlined into the inlining
1108 context ID. */
1109
1110 static int
1111 limits_allow_inlining (tree fn, inline_data *id)
1112 {
1113 int estimated_insns = 0;
1114 size_t i;
1115
1116 /* Don't even bother if the function is not inlinable. */
1117 if (!inlinable_function_p (fn))
1118 return 0;
1119
1120 /* Investigate the size of the function. Return at once
1121 if the function body size is too large. */
1122 if (!(*lang_hooks.tree_inlining.disregard_inline_limits) (fn))
1123 {
1124 int currfn_max_inline_insns;
1125
1126 /* If we haven't already done so, get an estimate of the number of
1127 instructions that will be produces when expanding this function. */
1128 if (!DECL_ESTIMATED_INSNS (fn))
1129 DECL_ESTIMATED_INSNS (fn)
1130 = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
1131 estimated_insns = DECL_ESTIMATED_INSNS (fn);
1132
1133 /* We may be here either because fn is declared inline or because
1134 we use -finline-functions. For the second case, we are more
1135 restrictive.
1136
1137 FIXME: -finline-functions should imply -funit-at-a-time, it's
1138 about equally expensive but unit-at-a-time produces
1139 better code. */
1140 currfn_max_inline_insns = DECL_DECLARED_INLINE_P (fn) ?
1141 MAX_INLINE_INSNS_SINGLE : MAX_INLINE_INSNS_AUTO;
1142
1143 /* If the function is too big to be inlined, adieu. */
1144 if (estimated_insns > currfn_max_inline_insns)
1145 return 0;
1146
1147 /* We now know that we don't disregard the inlining limits and that
1148 we basically should be able to inline this function.
1149 We always allow inlining functions if we estimate that they are
1150 smaller than MIN_INLINE_INSNS. Otherwise, investigate further. */
1151 if (estimated_insns > MIN_INLINE_INSNS)
1152 {
1153 int sum_insns = (id ? id->inlined_insns : 0) + estimated_insns;
1154
1155 /* In the extreme case that we have exceeded the recursive inlining
1156 limit by a huge factor (128), we just say no.
1157
1158 FIXME: Should not happen in real life, but people have reported
1159 that it actually does!? */
1160 if (sum_insns > MAX_INLINE_INSNS * 128)
1161 return 0;
1162
1163 /* If we did not hit the extreme limit, we use a linear function
1164 with slope -1/MAX_INLINE_SLOPE to exceedingly decrease the
1165 allowable size. */
1166 else if (sum_insns > MAX_INLINE_INSNS)
1167 {
1168 if (estimated_insns > currfn_max_inline_insns
1169 - (sum_insns - MAX_INLINE_INSNS) / MAX_INLINE_SLOPE)
1170 return 0;
1171 }
1172 }
1173 }
1174
1175 /* Don't allow recursive inlining. */
1176 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
1177 if (VARRAY_TREE (id->fns, i) == fn)
1178 return 0;
1179
1180 if (DECL_INLINED_FNS (fn))
1181 {
1182 int j;
1183 tree inlined_fns = DECL_INLINED_FNS (fn);
1184
1185 for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
1186 if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
1187 return 0;
1188 }
1189
1190 /* Go ahead, this function can be inlined. */
1191 return 1;
1192 }
1193
1194 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1195
1196 static tree
1197 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1198 {
1199 inline_data *id;
1200 tree t;
1201 tree expr;
1202 tree stmt;
1203 #ifndef INLINER_FOR_JAVA
1204 tree chain;
1205 tree scope_stmt;
1206 tree use_stmt;
1207 #else /* INLINER_FOR_JAVA */
1208 tree retvar;
1209 #endif /* INLINER_FOR_JAVA */
1210 tree fn;
1211 tree arg_inits;
1212 tree *inlined_body;
1213 splay_tree st;
1214 tree args;
1215 tree return_slot_addr;
1216
1217 /* See what we've got. */
1218 id = (inline_data *) data;
1219 t = *tp;
1220
1221 /* Recurse, but letting recursive invocations know that we are
1222 inside the body of a TARGET_EXPR. */
1223 if (TREE_CODE (*tp) == TARGET_EXPR)
1224 {
1225 #ifndef INLINER_FOR_JAVA
1226 int i, len = first_rtl_op (TARGET_EXPR);
1227
1228 /* We're walking our own subtrees. */
1229 *walk_subtrees = 0;
1230
1231 /* Actually walk over them. This loop is the body of
1232 walk_trees, omitting the case where the TARGET_EXPR
1233 itself is handled. */
1234 for (i = 0; i < len; ++i)
1235 {
1236 if (i == 2)
1237 ++id->in_target_cleanup_p;
1238 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1239 id->tree_pruner);
1240 if (i == 2)
1241 --id->in_target_cleanup_p;
1242 }
1243
1244 return NULL_TREE;
1245 #else /* INLINER_FOR_JAVA */
1246 abort ();
1247 #endif /* INLINER_FOR_JAVA */
1248 }
1249 else if (TREE_CODE (t) == EXPR_WITH_FILE_LOCATION)
1250 {
1251 /* We're walking the subtree directly. */
1252 *walk_subtrees = 0;
1253 /* Update the source position. */
1254 push_srcloc (EXPR_WFL_FILENAME (t), EXPR_WFL_LINENO (t));
1255 walk_tree (&EXPR_WFL_NODE (t), expand_call_inline, data,
1256 id->tree_pruner);
1257 /* Restore the original source position. */
1258 pop_srcloc ();
1259
1260 return NULL_TREE;
1261 }
1262
1263 if (TYPE_P (t))
1264 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1265 them should not be expanded. This can happen if the type is a
1266 dynamic array type, for example. */
1267 *walk_subtrees = 0;
1268
1269 /* From here on, we're only interested in CALL_EXPRs. */
1270 if (TREE_CODE (t) != CALL_EXPR)
1271 return NULL_TREE;
1272
1273 /* First, see if we can figure out what function is being called.
1274 If we cannot, then there is no hope of inlining the function. */
1275 fn = get_callee_fndecl (t);
1276 if (!fn)
1277 return NULL_TREE;
1278
1279 /* Turn forward declarations into real ones. */
1280 if (flag_unit_at_a_time)
1281 fn = cgraph_node (fn)->decl;
1282
1283 /* If fn is a declaration of a function in a nested scope that was
1284 globally declared inline, we don't set its DECL_INITIAL.
1285 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1286 C++ front-end uses it for cdtors to refer to their internal
1287 declarations, that are not real functions. Fortunately those
1288 don't have trees to be saved, so we can tell by checking their
1289 DECL_SAVED_TREE. */
1290 if (! DECL_INITIAL (fn)
1291 && DECL_ABSTRACT_ORIGIN (fn)
1292 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1293 fn = DECL_ABSTRACT_ORIGIN (fn);
1294
1295 /* Don't try to inline functions that are not well-suited to
1296 inlining. */
1297 if ((flag_unit_at_a_time
1298 && (!DECL_SAVED_TREE (fn) || !cgraph_inline_p (id->current_decl, fn)))
1299 || (!flag_unit_at_a_time && !limits_allow_inlining (fn, id)))
1300 {
1301 if (warn_inline && DECL_INLINE (fn) && DECL_DECLARED_INLINE_P (fn)
1302 && !DECL_IN_SYSTEM_HEADER (fn))
1303 {
1304 warning ("%Hinlining failed in call to '%F'",
1305 &DECL_SOURCE_LOCATION (fn), fn);
1306 warning ("called from here");
1307 }
1308 return NULL_TREE;
1309 }
1310
1311 if (! (*lang_hooks.tree_inlining.start_inlining) (fn))
1312 return NULL_TREE;
1313
1314 /* Set the current filename and line number to the function we are
1315 inlining so that when we create new _STMT nodes here they get
1316 line numbers corresponding to the function we are calling. We
1317 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
1318 because individual statements don't record the filename. */
1319 push_srcloc (DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn));
1320
1321 #ifndef INLINER_FOR_JAVA
1322 /* Build a statement-expression containing code to initialize the
1323 arguments, the actual inline expansion of the body, and a label
1324 for the return statements within the function to jump to. The
1325 type of the statement expression is the return type of the
1326 function call. */
1327 expr = build1 (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), make_node (COMPOUND_STMT));
1328 /* There is no scope associated with the statement-expression. */
1329 STMT_EXPR_NO_SCOPE (expr) = 1;
1330 stmt = STMT_EXPR_STMT (expr);
1331 #else /* INLINER_FOR_JAVA */
1332 /* Build a block containing code to initialize the arguments, the
1333 actual inline expansion of the body, and a label for the return
1334 statements within the function to jump to. The type of the
1335 statement expression is the return type of the function call. */
1336 stmt = NULL;
1337 expr = build (BLOCK, TREE_TYPE (TREE_TYPE (fn)), stmt);
1338 #endif /* INLINER_FOR_JAVA */
1339
1340 /* Local declarations will be replaced by their equivalents in this
1341 map. */
1342 st = id->decl_map;
1343 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1344 NULL, NULL);
1345
1346 /* Initialize the parameters. */
1347 args = TREE_OPERAND (t, 1);
1348 return_slot_addr = NULL_TREE;
1349 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1350 {
1351 return_slot_addr = TREE_VALUE (args);
1352 args = TREE_CHAIN (args);
1353 }
1354
1355 #ifndef INLINER_FOR_JAVA
1356 arg_inits = initialize_inlined_parameters (id, args, fn);
1357 /* Expand any inlined calls in the initializers. Do this before we
1358 push FN on the stack of functions we are inlining; we want to
1359 inline calls to FN that appear in the initializers for the
1360 parameters. */
1361 expand_calls_inline (&arg_inits, id);
1362 /* And add them to the tree. */
1363 COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), arg_inits);
1364 #else /* INLINER_FOR_JAVA */
1365 arg_inits = initialize_inlined_parameters (id, args, fn, expr);
1366 if (arg_inits)
1367 {
1368 /* Expand any inlined calls in the initializers. Do this before we
1369 push FN on the stack of functions we are inlining; we want to
1370 inline calls to FN that appear in the initializers for the
1371 parameters. */
1372 expand_calls_inline (&arg_inits, id);
1373
1374 /* And add them to the tree. */
1375 BLOCK_EXPR_BODY (expr) = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1376 TREE_TYPE (arg_inits),
1377 arg_inits);
1378 }
1379 #endif /* INLINER_FOR_JAVA */
1380
1381 /* Record the function we are about to inline so that we can avoid
1382 recursing into it. */
1383 VARRAY_PUSH_TREE (id->fns, fn);
1384
1385 /* Record the function we are about to inline if optimize_function
1386 has not been called on it yet and we don't have it in the list. */
1387 if (! DECL_INLINED_FNS (fn))
1388 {
1389 int i;
1390
1391 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1392 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1393 break;
1394 if (i < 0)
1395 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1396 }
1397
1398 /* Return statements in the function body will be replaced by jumps
1399 to the RET_LABEL. */
1400 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1401 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1402
1403 if (! DECL_INITIAL (fn)
1404 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1405 abort ();
1406
1407 #ifndef INLINER_FOR_JAVA
1408 /* Create a block to put the parameters in. We have to do this
1409 after the parameters have been remapped because remapping
1410 parameters is different from remapping ordinary variables. */
1411 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
1412 SCOPE_BEGIN_P (scope_stmt) = 1;
1413 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
1414 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
1415 TREE_CHAIN (scope_stmt) = COMPOUND_BODY (stmt);
1416 COMPOUND_BODY (stmt) = scope_stmt;
1417
1418 /* Tell the debugging backends that this block represents the
1419 outermost scope of the inlined function. */
1420 if (SCOPE_STMT_BLOCK (scope_stmt))
1421 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
1422
1423 /* Declare the return variable for the function. */
1424 COMPOUND_BODY (stmt)
1425 = chainon (COMPOUND_BODY (stmt),
1426 declare_return_variable (id, return_slot_addr, &use_stmt));
1427 #else /* INLINER_FOR_JAVA */
1428 {
1429 /* Declare the return variable for the function. */
1430 tree decl = declare_return_variable (id, return_slot_addr, &retvar);
1431 if (retvar)
1432 {
1433 tree *next = &BLOCK_VARS (expr);
1434 while (*next)
1435 next = &TREE_CHAIN (*next);
1436 *next = decl;
1437 }
1438 }
1439 #endif /* INLINER_FOR_JAVA */
1440
1441 /* After we've initialized the parameters, we insert the body of the
1442 function itself. */
1443 #ifndef INLINER_FOR_JAVA
1444 inlined_body = &COMPOUND_BODY (stmt);
1445 while (*inlined_body)
1446 inlined_body = &TREE_CHAIN (*inlined_body);
1447 *inlined_body = copy_body (id);
1448 #else /* INLINER_FOR_JAVA */
1449 {
1450 tree new_body;
1451 java_inlining_map_static_initializers (fn, id->decl_map);
1452 new_body = copy_body (id);
1453 TREE_TYPE (new_body) = TREE_TYPE (TREE_TYPE (fn));
1454 BLOCK_EXPR_BODY (expr)
1455 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1456 TREE_TYPE (new_body), new_body);
1457 inlined_body = &BLOCK_EXPR_BODY (expr);
1458 }
1459 #endif /* INLINER_FOR_JAVA */
1460
1461 /* After the body of the function comes the RET_LABEL. This must come
1462 before we evaluate the returned value below, because that evaluation
1463 may cause RTL to be generated. */
1464 #ifndef INLINER_FOR_JAVA
1465 COMPOUND_BODY (stmt)
1466 = chainon (COMPOUND_BODY (stmt),
1467 build_stmt (LABEL_STMT, id->ret_label));
1468 #else /* INLINER_FOR_JAVA */
1469 {
1470 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1471 BLOCK_EXPR_BODY (expr)
1472 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr), void_type_node, label);
1473 TREE_SIDE_EFFECTS (label) = TREE_SIDE_EFFECTS (t);
1474 }
1475 #endif /* INLINER_FOR_JAVA */
1476
1477 /* Finally, mention the returned value so that the value of the
1478 statement-expression is the returned value of the function. */
1479 #ifndef INLINER_FOR_JAVA
1480 COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), use_stmt);
1481
1482 /* Close the block for the parameters. */
1483 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
1484 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
1485 remap_block (scope_stmt, NULL_TREE, id);
1486 COMPOUND_BODY (stmt)
1487 = chainon (COMPOUND_BODY (stmt), scope_stmt);
1488 #else /* INLINER_FOR_JAVA */
1489 if (retvar)
1490 {
1491 /* Mention the retvar. If the return type of the function was
1492 promoted, convert it back to the expected type. */
1493 if (TREE_TYPE (TREE_TYPE (fn)) != TREE_TYPE (retvar))
1494 retvar = build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)), retvar);
1495 BLOCK_EXPR_BODY (expr)
1496 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1497 TREE_TYPE (retvar), retvar);
1498 }
1499
1500 java_inlining_merge_static_initializers (fn, id->decl_map);
1501 #endif /* INLINER_FOR_JAVA */
1502
1503 /* Clean up. */
1504 splay_tree_delete (id->decl_map);
1505 id->decl_map = st;
1506
1507 /* The new expression has side-effects if the old one did. */
1508 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1509
1510 /* Replace the call by the inlined body. Wrap it in an
1511 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
1512 pointing to the right place. */
1513 #ifndef INLINER_FOR_JAVA
1514 chain = TREE_CHAIN (*tp);
1515 #endif /* INLINER_FOR_JAVA */
1516 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
1517 /*col=*/0);
1518 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
1519 #ifndef INLINER_FOR_JAVA
1520 TREE_CHAIN (*tp) = chain;
1521 #endif /* not INLINER_FOR_JAVA */
1522 pop_srcloc ();
1523
1524 /* If the value of the new expression is ignored, that's OK. We
1525 don't warn about this for CALL_EXPRs, so we shouldn't warn about
1526 the equivalent inlined version either. */
1527 TREE_USED (*tp) = 1;
1528
1529 /* Our function now has more statements than it did before. */
1530 DECL_ESTIMATED_INSNS (VARRAY_TREE (id->fns, 0)) += DECL_ESTIMATED_INSNS (fn);
1531 /* For accounting, subtract one for the saved call/ret. */
1532 id->inlined_insns += DECL_ESTIMATED_INSNS (fn) - 1;
1533
1534 /* Update callgraph if needed. */
1535 if (id->decl && flag_unit_at_a_time)
1536 {
1537 cgraph_remove_call (id->decl, fn);
1538 cgraph_create_edges (id->decl, *inlined_body);
1539 }
1540
1541 /* Recurse into the body of the just inlined function. */
1542 {
1543 tree old_decl = id->current_decl;
1544 id->current_decl = fn;
1545 expand_calls_inline (inlined_body, id);
1546 id->current_decl = old_decl;
1547 }
1548 VARRAY_POP (id->fns);
1549
1550 /* If we've returned to the top level, clear out the record of how
1551 much inlining has been done. */
1552 if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
1553 id->inlined_insns = 0;
1554
1555 /* Don't walk into subtrees. We've already handled them above. */
1556 *walk_subtrees = 0;
1557
1558 (*lang_hooks.tree_inlining.end_inlining) (fn);
1559
1560 /* Keep iterating. */
1561 return NULL_TREE;
1562 }
1563 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
1564 expansions as appropriate. */
1565
1566 static void
1567 expand_calls_inline (tree *tp, inline_data *id)
1568 {
1569 /* Search through *TP, replacing all calls to inline functions by
1570 appropriate equivalents. Use walk_tree in no-duplicates mode
1571 to avoid exponential time complexity. (We can't just use
1572 walk_tree_without_duplicates, because of the special TARGET_EXPR
1573 handling in expand_calls. The hash table is set up in
1574 optimize_function. */
1575 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
1576 }
1577
1578 /* Expand calls to inline functions in the body of FN. */
1579
1580 void
1581 optimize_inline_calls (tree fn)
1582 {
1583 inline_data id;
1584 tree prev_fn;
1585
1586 /* Clear out ID. */
1587 memset (&id, 0, sizeof (id));
1588
1589 id.decl = fn;
1590 id.current_decl = fn;
1591 /* Don't allow recursion into FN. */
1592 VARRAY_TREE_INIT (id.fns, 32, "fns");
1593 VARRAY_PUSH_TREE (id.fns, fn);
1594 if (!DECL_ESTIMATED_INSNS (fn))
1595 DECL_ESTIMATED_INSNS (fn)
1596 = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
1597 /* Or any functions that aren't finished yet. */
1598 prev_fn = NULL_TREE;
1599 if (current_function_decl)
1600 {
1601 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1602 prev_fn = current_function_decl;
1603 }
1604
1605 prev_fn = ((*lang_hooks.tree_inlining.add_pending_fn_decls)
1606 (&id.fns, prev_fn));
1607
1608 /* Create the list of functions this call will inline. */
1609 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1610
1611 /* Keep track of the low-water mark, i.e., the point where the first
1612 real inlining is represented in ID.FNS. */
1613 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1614
1615 /* Replace all calls to inline functions with the bodies of those
1616 functions. */
1617 id.tree_pruner = htab_create (37, htab_hash_pointer,
1618 htab_eq_pointer, NULL);
1619 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1620
1621 /* Clean up. */
1622 htab_delete (id.tree_pruner);
1623 if (DECL_LANG_SPECIFIC (fn))
1624 {
1625 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1626
1627 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1628 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1629 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1630 DECL_INLINED_FNS (fn) = ifn;
1631 }
1632 }
1633
1634 /* FN is a function that has a complete body, and CLONE is a function
1635 whose body is to be set to a copy of FN, mapping argument
1636 declarations according to the ARG_MAP splay_tree. */
1637
1638 void
1639 clone_body (tree clone, tree fn, void *arg_map)
1640 {
1641 inline_data id;
1642
1643 /* Clone the body, as if we were making an inline call. But, remap
1644 the parameters in the callee to the parameters of caller. If
1645 there's an in-charge parameter, map it to an appropriate
1646 constant. */
1647 memset (&id, 0, sizeof (id));
1648 VARRAY_TREE_INIT (id.fns, 2, "fns");
1649 VARRAY_PUSH_TREE (id.fns, clone);
1650 VARRAY_PUSH_TREE (id.fns, fn);
1651 id.decl_map = (splay_tree)arg_map;
1652
1653 /* Cloning is treated slightly differently from inlining. Set
1654 CLONING_P so that it's clear which operation we're performing. */
1655 id.cloning_p = true;
1656
1657 /* Actually copy the body. */
1658 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1659 }
1660
1661 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1662 FUNC is called with the DATA and the address of each sub-tree. If
1663 FUNC returns a non-NULL value, the traversal is aborted, and the
1664 value returned by FUNC is returned. If HTAB is non-NULL it is used
1665 to record the nodes visited, and to avoid visiting a node more than
1666 once. */
1667
1668 tree
1669 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
1670 {
1671 htab_t htab = (htab_t) htab_;
1672 enum tree_code code;
1673 int walk_subtrees;
1674 tree result;
1675
1676 #define WALK_SUBTREE(NODE) \
1677 do \
1678 { \
1679 result = walk_tree (&(NODE), func, data, htab); \
1680 if (result) \
1681 return result; \
1682 } \
1683 while (0)
1684
1685 #define WALK_SUBTREE_TAIL(NODE) \
1686 do \
1687 { \
1688 tp = & (NODE); \
1689 goto tail_recurse; \
1690 } \
1691 while (0)
1692
1693 tail_recurse:
1694 /* Skip empty subtrees. */
1695 if (!*tp)
1696 return NULL_TREE;
1697
1698 if (htab)
1699 {
1700 void **slot;
1701
1702 /* Don't walk the same tree twice, if the user has requested
1703 that we avoid doing so. */
1704 slot = htab_find_slot (htab, *tp, INSERT);
1705 if (*slot)
1706 return NULL_TREE;
1707 *slot = *tp;
1708 }
1709
1710 /* Call the function. */
1711 walk_subtrees = 1;
1712 result = (*func) (tp, &walk_subtrees, data);
1713
1714 /* If we found something, return it. */
1715 if (result)
1716 return result;
1717
1718 code = TREE_CODE (*tp);
1719
1720 #ifndef INLINER_FOR_JAVA
1721 /* Even if we didn't, FUNC may have decided that there was nothing
1722 interesting below this point in the tree. */
1723 if (!walk_subtrees)
1724 {
1725 if (STATEMENT_CODE_P (code) || code == TREE_LIST
1726 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1727 /* But we still need to check our siblings. */
1728 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1729 else
1730 return NULL_TREE;
1731 }
1732
1733 /* Handle common cases up front. */
1734 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
1735 #else /* INLINER_FOR_JAVA */
1736 if (code != EXIT_BLOCK_EXPR
1737 && code != SAVE_EXPR
1738 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
1739 #endif /* INLINER_FOR_JAVA */
1740 {
1741 int i, len;
1742
1743 #ifndef INLINER_FOR_JAVA
1744 /* Set lineno here so we get the right instantiation context
1745 if we call instantiate_decl from inlinable_function_p. */
1746 if (STATEMENT_CODE_P (code) && !STMT_LINENO_FOR_FN_P (*tp))
1747 input_line = STMT_LINENO (*tp);
1748 #endif /* not INLINER_FOR_JAVA */
1749
1750 /* Walk over all the sub-trees of this operand. */
1751 len = first_rtl_op (code);
1752 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1753 But, we only want to walk once. */
1754 if (code == TARGET_EXPR
1755 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1756 --len;
1757 /* Go through the subtrees. We need to do this in forward order so
1758 that the scope of a FOR_EXPR is handled properly. */
1759 for (i = 0; i < len; ++i)
1760 WALK_SUBTREE (TREE_OPERAND (*tp, i));
1761
1762 #ifndef INLINER_FOR_JAVA
1763 /* For statements, we also walk the chain so that we cover the
1764 entire statement tree. */
1765 if (STATEMENT_CODE_P (code))
1766 {
1767 if (code == DECL_STMT
1768 && DECL_STMT_DECL (*tp)
1769 && DECL_P (DECL_STMT_DECL (*tp)))
1770 {
1771 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1772 into declarations that are just mentioned, rather than
1773 declared; they don't really belong to this part of the tree.
1774 And, we can see cycles: the initializer for a declaration can
1775 refer to the declaration itself. */
1776 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1777 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1778 WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp)));
1779 }
1780
1781 /* This can be tail-recursion optimized if we write it this way. */
1782 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1783 }
1784
1785 #endif /* not INLINER_FOR_JAVA */
1786 /* We didn't find what we were looking for. */
1787 return NULL_TREE;
1788 }
1789 else if (TREE_CODE_CLASS (code) == 'd')
1790 {
1791 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1792 }
1793 else if (TREE_CODE_CLASS (code) == 't')
1794 {
1795 WALK_SUBTREE (TYPE_SIZE (*tp));
1796 WALK_SUBTREE (TYPE_SIZE_UNIT (*tp));
1797 /* Also examine various special fields, below. */
1798 }
1799
1800 result = (*lang_hooks.tree_inlining.walk_subtrees) (tp, &walk_subtrees, func,
1801 data, htab);
1802 if (result || ! walk_subtrees)
1803 return result;
1804
1805 /* Not one of the easy cases. We must explicitly go through the
1806 children. */
1807 switch (code)
1808 {
1809 case ERROR_MARK:
1810 case IDENTIFIER_NODE:
1811 case INTEGER_CST:
1812 case REAL_CST:
1813 case VECTOR_CST:
1814 case STRING_CST:
1815 case REAL_TYPE:
1816 case COMPLEX_TYPE:
1817 case VECTOR_TYPE:
1818 case VOID_TYPE:
1819 case BOOLEAN_TYPE:
1820 case UNION_TYPE:
1821 case ENUMERAL_TYPE:
1822 case BLOCK:
1823 case RECORD_TYPE:
1824 case CHAR_TYPE:
1825 /* None of thse have subtrees other than those already walked
1826 above. */
1827 break;
1828
1829 case POINTER_TYPE:
1830 case REFERENCE_TYPE:
1831 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1832 break;
1833
1834 case TREE_LIST:
1835 WALK_SUBTREE (TREE_VALUE (*tp));
1836 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1837 break;
1838
1839 case TREE_VEC:
1840 {
1841 int len = TREE_VEC_LENGTH (*tp);
1842
1843 if (len == 0)
1844 break;
1845
1846 /* Walk all elements but the first. */
1847 while (--len)
1848 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1849
1850 /* Now walk the first one as a tail call. */
1851 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
1852 }
1853
1854 case COMPLEX_CST:
1855 WALK_SUBTREE (TREE_REALPART (*tp));
1856 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
1857
1858 case CONSTRUCTOR:
1859 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
1860
1861 case METHOD_TYPE:
1862 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1863 /* Fall through. */
1864
1865 case FUNCTION_TYPE:
1866 WALK_SUBTREE (TREE_TYPE (*tp));
1867 {
1868 tree arg = TYPE_ARG_TYPES (*tp);
1869
1870 /* We never want to walk into default arguments. */
1871 for (; arg; arg = TREE_CHAIN (arg))
1872 WALK_SUBTREE (TREE_VALUE (arg));
1873 }
1874 break;
1875
1876 case ARRAY_TYPE:
1877 WALK_SUBTREE (TREE_TYPE (*tp));
1878 WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp));
1879
1880 case INTEGER_TYPE:
1881 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1882 WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp));
1883
1884 case OFFSET_TYPE:
1885 WALK_SUBTREE (TREE_TYPE (*tp));
1886 WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp));
1887
1888 #ifdef INLINER_FOR_JAVA
1889 case EXIT_BLOCK_EXPR:
1890 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
1891
1892 case SAVE_EXPR:
1893 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
1894 #endif /* INLINER_FOR_JAVA */
1895
1896 default:
1897 abort ();
1898 }
1899
1900 /* We didn't find what we were looking for. */
1901 return NULL_TREE;
1902
1903 #undef WALK_SUBTREE
1904 #undef WALK_SUBTREE_TAIL
1905 }
1906
1907 /* Like walk_tree, but does not walk duplicate nodes more than
1908 once. */
1909
1910 tree
1911 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
1912 {
1913 tree result;
1914 htab_t htab;
1915
1916 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1917 result = walk_tree (tp, func, data, htab);
1918 htab_delete (htab);
1919 return result;
1920 }
1921
1922 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1923
1924 tree
1925 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1926 {
1927 enum tree_code code = TREE_CODE (*tp);
1928
1929 /* We make copies of most nodes. */
1930 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1931 || TREE_CODE_CLASS (code) == 'c'
1932 || code == TREE_LIST
1933 || code == TREE_VEC
1934 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1935 {
1936 /* Because the chain gets clobbered when we make a copy, we save it
1937 here. */
1938 tree chain = TREE_CHAIN (*tp);
1939
1940 /* Copy the node. */
1941 *tp = copy_node (*tp);
1942
1943 /* Now, restore the chain, if appropriate. That will cause
1944 walk_tree to walk into the chain as well. */
1945 if (code == PARM_DECL || code == TREE_LIST
1946 #ifndef INLINER_FOR_JAVA
1947 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp)
1948 || STATEMENT_CODE_P (code))
1949 TREE_CHAIN (*tp) = chain;
1950
1951 /* For now, we don't update BLOCKs when we make copies. So, we
1952 have to nullify all scope-statements. */
1953 if (TREE_CODE (*tp) == SCOPE_STMT)
1954 SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
1955 #else /* INLINER_FOR_JAVA */
1956 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1957 TREE_CHAIN (*tp) = chain;
1958 #endif /* INLINER_FOR_JAVA */
1959 }
1960 else if (TREE_CODE_CLASS (code) == 't' && !variably_modified_type_p (*tp))
1961 /* Types only need to be copied if they are variably modified. */
1962 *walk_subtrees = 0;
1963
1964 return NULL_TREE;
1965 }
1966
1967 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
1968 information indicating to what new SAVE_EXPR this one should be
1969 mapped, use that one. Otherwise, create a new node and enter it in
1970 ST. FN is the function into which the copy will be placed. */
1971
1972 void
1973 remap_save_expr (tree *tp, void *st_, tree fn, int *walk_subtrees)
1974 {
1975 splay_tree st = (splay_tree) st_;
1976 splay_tree_node n;
1977
1978 /* See if we already encountered this SAVE_EXPR. */
1979 n = splay_tree_lookup (st, (splay_tree_key) *tp);
1980
1981 /* If we didn't already remap this SAVE_EXPR, do so now. */
1982 if (!n)
1983 {
1984 tree t = copy_node (*tp);
1985
1986 /* The SAVE_EXPR is now part of the function into which we
1987 are inlining this body. */
1988 SAVE_EXPR_CONTEXT (t) = fn;
1989 /* And we haven't evaluated it yet. */
1990 SAVE_EXPR_RTL (t) = NULL_RTX;
1991 /* Remember this SAVE_EXPR. */
1992 n = splay_tree_insert (st,
1993 (splay_tree_key) *tp,
1994 (splay_tree_value) t);
1995 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
1996 splay_tree_insert (st, (splay_tree_key) t,
1997 (splay_tree_value) error_mark_node);
1998 }
1999 else
2000 /* We've already walked into this SAVE_EXPR, so we needn't do it
2001 again. */
2002 *walk_subtrees = 0;
2003
2004 /* Replace this SAVE_EXPR with the copy. */
2005 *tp = (tree) n->value;
2006 }
2007
2008 #ifdef INLINER_FOR_JAVA
2009 /* Add STMT to EXISTING if possible, otherwise create a new
2010 COMPOUND_EXPR and add STMT to it. */
2011
2012 static tree
2013 add_stmt_to_compound (tree existing, tree type, tree stmt)
2014 {
2015 if (!stmt)
2016 return existing;
2017 else if (existing)
2018 return build (COMPOUND_EXPR, type, existing, stmt);
2019 else
2020 return stmt;
2021 }
2022
2023 #endif /* INLINER_FOR_JAVA */