re PR rtl-optimization/10392 ([SH] optimizer generates faulty array indexing)
[gcc.git] / gcc / tree-inline.c
1 /* Control and data flow functions for trees.
2 Copyright 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "splay-tree.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "intl.h"
42
43
44 /* This should be eventually be generalized to other languages, but
45 this would require a shared function-as-trees infrastructure. */
46 #ifndef INLINER_FOR_JAVA
47 #include "c-common.h"
48 #else /* INLINER_FOR_JAVA */
49 #include "parse.h"
50 #include "java-tree.h"
51 #endif /* INLINER_FOR_JAVA */
52
53 /* 0 if we should not perform inlining.
54 1 if we should expand functions calls inline at the tree level.
55 2 if we should consider *all* functions to be inline
56 candidates. */
57
58 int flag_inline_trees = 0;
59
60 /* To Do:
61
62 o In order to make inlining-on-trees work, we pessimized
63 function-local static constants. In particular, they are now
64 always output, even when not addressed. Fix this by treating
65 function-local static constants just like global static
66 constants; the back-end already knows not to output them if they
67 are not needed.
68
69 o Provide heuristics to clamp inlining of recursive template
70 calls? */
71
72 /* Data required for function inlining. */
73
74 typedef struct inline_data
75 {
76 /* A stack of the functions we are inlining. For example, if we are
77 compiling `f', which calls `g', which calls `h', and we are
78 inlining the body of `h', the stack will contain, `h', followed
79 by `g', followed by `f'. The first few elements of the stack may
80 contain other functions that we know we should not recurse into,
81 even though they are not directly being inlined. */
82 varray_type fns;
83 /* The index of the first element of FNS that really represents an
84 inlined function. */
85 unsigned first_inlined_fn;
86 /* The label to jump to when a return statement is encountered. If
87 this value is NULL, then return statements will simply be
88 remapped as return statements, rather than as jumps. */
89 tree ret_label;
90 /* The map from local declarations in the inlined function to
91 equivalents in the function into which it is being inlined. */
92 splay_tree decl_map;
93 /* Nonzero if we are currently within the cleanup for a
94 TARGET_EXPR. */
95 int in_target_cleanup_p;
96 /* A list of the functions current function has inlined. */
97 varray_type inlined_fns;
98 /* The approximate number of instructions we have inlined in the
99 current call stack. */
100 int inlined_insns;
101 /* We use the same mechanism to build clones that we do to perform
102 inlining. However, there are a few places where we need to
103 distinguish between those two situations. This flag is true if
104 we are cloning, rather than inlining. */
105 bool cloning_p;
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
108 htab_t tree_pruner;
109 /* Decl of function we are inlining into. */
110 tree decl;
111 tree current_decl;
112 } inline_data;
113
114 /* Prototypes. */
115
116 static tree declare_return_variable (inline_data *, tree, tree *);
117 static tree copy_body_r (tree *, int *, void *);
118 static tree copy_body (inline_data *);
119 static tree expand_call_inline (tree *, int *, void *);
120 static void expand_calls_inline (tree *, inline_data *);
121 static bool inlinable_function_p (tree);
122 static tree remap_decl (tree, inline_data *);
123 static tree remap_type (tree, inline_data *);
124 #ifndef INLINER_FOR_JAVA
125 static tree initialize_inlined_parameters (inline_data *, tree, tree);
126 static void remap_block (tree, tree, inline_data *);
127 static void copy_scope_stmt (tree *, int *, inline_data *);
128 #else /* INLINER_FOR_JAVA */
129 static tree initialize_inlined_parameters (inline_data *, tree, tree, tree);
130 static void remap_block (tree *, tree, inline_data *);
131 static tree add_stmt_to_compound (tree, tree, tree);
132 #endif /* INLINER_FOR_JAVA */
133
134 /* Remap DECL during the copying of the BLOCK tree for the function. */
135
136 static tree
137 remap_decl (tree decl, inline_data *id)
138 {
139 splay_tree_node n;
140 tree fn;
141
142 /* We only remap local variables in the current function. */
143 fn = VARRAY_TOP_TREE (id->fns);
144 if (! (*lang_hooks.tree_inlining.auto_var_in_fn_p) (decl, fn))
145 return NULL_TREE;
146
147 /* See if we have remapped this declaration. */
148 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
149
150 /* If we didn't already have an equivalent for this declaration,
151 create one now. */
152 if (!n)
153 {
154 tree t;
155
156 /* Make a copy of the variable or label. */
157 t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
158
159 /* Remap types, if necessary. */
160 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
161 if (TREE_CODE (t) == TYPE_DECL)
162 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
163 else if (TREE_CODE (t) == PARM_DECL)
164 DECL_ARG_TYPE_AS_WRITTEN (t)
165 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
166
167 /* Remap sizes as necessary. */
168 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
169 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
170
171 #ifndef INLINER_FOR_JAVA
172 if (! DECL_NAME (t) && TREE_TYPE (t)
173 && (*lang_hooks.tree_inlining.anon_aggr_type_p) (TREE_TYPE (t)))
174 {
175 /* For a VAR_DECL of anonymous type, we must also copy the
176 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
177 tree members = NULL;
178 tree src;
179
180 for (src = DECL_ANON_UNION_ELEMS (t); src;
181 src = TREE_CHAIN (src))
182 {
183 tree member = remap_decl (TREE_VALUE (src), id);
184
185 if (TREE_PURPOSE (src))
186 abort ();
187 members = tree_cons (NULL, member, members);
188 }
189 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
190 }
191 #endif /* not INLINER_FOR_JAVA */
192
193 /* Remember it, so that if we encounter this local entity
194 again we can reuse this copy. */
195 n = splay_tree_insert (id->decl_map,
196 (splay_tree_key) decl,
197 (splay_tree_value) t);
198 }
199
200 return (tree) n->value;
201 }
202
203 static tree
204 remap_type (tree type, inline_data *id)
205 {
206 splay_tree_node node;
207 tree new, t;
208
209 if (type == NULL)
210 return type;
211
212 /* See if we have remapped this type. */
213 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
214 if (node)
215 return (tree) node->value;
216
217 /* The type only needs remapping if it's variably modified. */
218 if (! variably_modified_type_p (type))
219 {
220 splay_tree_insert (id->decl_map, (splay_tree_key) type,
221 (splay_tree_value) type);
222 return type;
223 }
224
225 /* We do need a copy. build and register it now. */
226 new = copy_node (type);
227 splay_tree_insert (id->decl_map, (splay_tree_key) type,
228 (splay_tree_value) new);
229
230 /* This is a new type, not a copy of an old type. Need to reassociate
231 variants. We can handle everything except the main variant lazily. */
232 t = TYPE_MAIN_VARIANT (type);
233 if (type != t)
234 {
235 t = remap_type (t, id);
236 TYPE_MAIN_VARIANT (new) = t;
237 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
238 TYPE_NEXT_VARIANT (t) = new;
239 }
240 else
241 {
242 TYPE_MAIN_VARIANT (new) = new;
243 TYPE_NEXT_VARIANT (new) = NULL;
244 }
245
246 /* Lazily create pointer and reference types. */
247 TYPE_POINTER_TO (new) = NULL;
248 TYPE_REFERENCE_TO (new) = NULL;
249
250 switch (TREE_CODE (new))
251 {
252 case INTEGER_TYPE:
253 case REAL_TYPE:
254 case ENUMERAL_TYPE:
255 case BOOLEAN_TYPE:
256 case CHAR_TYPE:
257 t = TYPE_MIN_VALUE (new);
258 if (t && TREE_CODE (t) != INTEGER_CST)
259 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
260 t = TYPE_MAX_VALUE (new);
261 if (t && TREE_CODE (t) != INTEGER_CST)
262 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
263 return new;
264
265 case POINTER_TYPE:
266 TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
267 if (TYPE_MODE (new) == ptr_mode)
268 TYPE_POINTER_TO (t) = new;
269 return new;
270
271 case REFERENCE_TYPE:
272 TREE_TYPE (new) = t = remap_type (TREE_TYPE (new), id);
273 if (TYPE_MODE (new) == ptr_mode)
274 TYPE_REFERENCE_TO (t) = new;
275 return new;
276
277 case METHOD_TYPE:
278 case FUNCTION_TYPE:
279 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
280 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
281 return new;
282
283 case ARRAY_TYPE:
284 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
285 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
286 break;
287
288 case RECORD_TYPE:
289 case UNION_TYPE:
290 case QUAL_UNION_TYPE:
291 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
292 break;
293
294 case FILE_TYPE:
295 case SET_TYPE:
296 case OFFSET_TYPE:
297 default:
298 /* Shouldn't have been thought variable sized. */
299 abort ();
300 }
301
302 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
303 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
304
305 return new;
306 }
307
308 #ifndef INLINER_FOR_JAVA
309 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
310 remapped versions of the variables therein. And hook the new block
311 into the block-tree. If non-NULL, the DECLS are declarations to
312 add to use instead of the BLOCK_VARS in the old block. */
313 #else /* INLINER_FOR_JAVA */
314 /* Copy the BLOCK to contain remapped versions of the variables
315 therein. And hook the new block into the block-tree. */
316 #endif /* INLINER_FOR_JAVA */
317
318 static void
319 #ifndef INLINER_FOR_JAVA
320 remap_block (tree scope_stmt, tree decls, inline_data *id)
321 #else /* INLINER_FOR_JAVA */
322 remap_block (tree *block, tree decls, inline_data *id)
323 #endif /* INLINER_FOR_JAVA */
324 {
325 #ifndef INLINER_FOR_JAVA
326 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
327 not know whether or not expand_expr will actually write out the
328 code we put there. If it does not, then we'll have more BLOCKs
329 than block-notes, and things will go awry. At some point, we
330 should make the back-end handle BLOCK notes in a tidier way,
331 without requiring a strict correspondence to the block-tree; then
332 this check can go. */
333 if (id->in_target_cleanup_p)
334 {
335 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
336 return;
337 }
338
339 /* If this is the beginning of a scope, remap the associated BLOCK. */
340 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
341 {
342 tree old_block;
343 tree new_block;
344 tree old_var;
345 tree fn;
346
347 /* Make the new block. */
348 old_block = SCOPE_STMT_BLOCK (scope_stmt);
349 new_block = make_node (BLOCK);
350 TREE_USED (new_block) = TREE_USED (old_block);
351 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
352 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
353
354 /* Remap its variables. */
355 for (old_var = decls ? decls : BLOCK_VARS (old_block);
356 old_var;
357 old_var = TREE_CHAIN (old_var))
358 {
359 tree new_var;
360
361 /* Remap the variable. */
362 new_var = remap_decl (old_var, id);
363 /* If we didn't remap this variable, so we can't mess with
364 its TREE_CHAIN. If we remapped this variable to
365 something other than a declaration (say, if we mapped it
366 to a constant), then we must similarly omit any mention
367 of it here. */
368 if (!new_var || !DECL_P (new_var))
369 ;
370 else
371 {
372 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
373 BLOCK_VARS (new_block) = new_var;
374 }
375 }
376 /* We put the BLOCK_VARS in reverse order; fix that now. */
377 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
378 fn = VARRAY_TREE (id->fns, 0);
379 if (id->cloning_p)
380 /* We're building a clone; DECL_INITIAL is still
381 error_mark_node, and current_binding_level is the parm
382 binding level. */
383 (*lang_hooks.decls.insert_block) (new_block);
384 else
385 {
386 /* Attach this new block after the DECL_INITIAL block for the
387 function into which this block is being inlined. In
388 rest_of_compilation we will straighten out the BLOCK tree. */
389 tree *first_block;
390 if (DECL_INITIAL (fn))
391 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
392 else
393 first_block = &DECL_INITIAL (fn);
394 BLOCK_CHAIN (new_block) = *first_block;
395 *first_block = new_block;
396 }
397 /* Remember the remapped block. */
398 splay_tree_insert (id->decl_map,
399 (splay_tree_key) old_block,
400 (splay_tree_value) new_block);
401 }
402 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
403 remapped block. */
404 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
405 {
406 splay_tree_node n;
407
408 /* Find this block in the table of remapped things. */
409 n = splay_tree_lookup (id->decl_map,
410 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
411 if (! n)
412 abort ();
413 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
414 }
415 #else /* INLINER_FOR_JAVA */
416 tree old_block;
417 tree new_block;
418 tree old_var;
419 tree fn;
420
421 /* Make the new block. */
422 old_block = *block;
423 new_block = make_node (BLOCK);
424 TREE_USED (new_block) = TREE_USED (old_block);
425 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
426 BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (old_block);
427 TREE_SIDE_EFFECTS (new_block) = TREE_SIDE_EFFECTS (old_block);
428 TREE_TYPE (new_block) = TREE_TYPE (old_block);
429 *block = new_block;
430
431 /* Remap its variables. */
432 for (old_var = decls ? decls : BLOCK_VARS (old_block);
433 old_var;
434 old_var = TREE_CHAIN (old_var))
435 {
436 tree new_var;
437
438 /* All local class initialization flags go in the outermost
439 scope. */
440 if (LOCAL_CLASS_INITIALIZATION_FLAG_P (old_var))
441 {
442 /* We may already have one. */
443 if (! splay_tree_lookup (id->decl_map, (splay_tree_key) old_var))
444 {
445 tree outermost_block;
446 new_var = remap_decl (old_var, id);
447 DECL_ABSTRACT_ORIGIN (new_var) = NULL;
448 outermost_block = DECL_SAVED_TREE (current_function_decl);
449 TREE_CHAIN (new_var) = BLOCK_VARS (outermost_block);
450 BLOCK_VARS (outermost_block) = new_var;
451 }
452 continue;
453 }
454
455 /* Remap the variable. */
456 new_var = remap_decl (old_var, id);
457 /* If we didn't remap this variable, so we can't mess with
458 its TREE_CHAIN. If we remapped this variable to
459 something other than a declaration (say, if we mapped it
460 to a constant), then we must similarly omit any mention
461 of it here. */
462 if (!new_var || !DECL_P (new_var))
463 ;
464 else
465 {
466 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
467 BLOCK_VARS (new_block) = new_var;
468 }
469 }
470 /* We put the BLOCK_VARS in reverse order; fix that now. */
471 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
472 fn = VARRAY_TREE (id->fns, 0);
473 /* Remember the remapped block. */
474 splay_tree_insert (id->decl_map,
475 (splay_tree_key) old_block,
476 (splay_tree_value) new_block);
477 #endif /* INLINER_FOR_JAVA */
478 }
479
480 #ifndef INLINER_FOR_JAVA
481 /* Copy the SCOPE_STMT pointed to by TP. */
482
483 static void
484 copy_scope_stmt (tree *tp, int *walk_subtrees, inline_data *id)
485 {
486 tree block;
487
488 /* Remember whether or not this statement was nullified. When
489 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
490 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
491 deal with copying BLOCKs if they do not wish to do so. */
492 block = SCOPE_STMT_BLOCK (*tp);
493 /* Copy (and replace) the statement. */
494 copy_tree_r (tp, walk_subtrees, NULL);
495 /* Restore the SCOPE_STMT_BLOCK. */
496 SCOPE_STMT_BLOCK (*tp) = block;
497
498 /* Remap the associated block. */
499 remap_block (*tp, NULL_TREE, id);
500 }
501 #endif /* not INLINER_FOR_JAVA */
502
503 /* Called from copy_body via walk_tree. DATA is really an
504 `inline_data *'. */
505 static tree
506 copy_body_r (tree *tp, int *walk_subtrees, void *data)
507 {
508 inline_data* id;
509 tree fn;
510
511 /* Set up. */
512 id = (inline_data *) data;
513 fn = VARRAY_TOP_TREE (id->fns);
514
515 #if 0
516 /* All automatic variables should have a DECL_CONTEXT indicating
517 what function they come from. */
518 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
519 && DECL_NAMESPACE_SCOPE_P (*tp))
520 if (! DECL_EXTERNAL (*tp) && ! TREE_STATIC (*tp))
521 abort ();
522 #endif
523
524 #ifdef INLINER_FOR_JAVA
525 if (TREE_CODE (*tp) == BLOCK)
526 remap_block (tp, NULL_TREE, id);
527 #endif
528
529 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
530 GOTO_STMT with the RET_LABEL as its target. */
531 #ifndef INLINER_FOR_JAVA
532 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
533 #else /* INLINER_FOR_JAVA */
534 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
535 #endif /* INLINER_FOR_JAVA */
536 {
537 tree return_stmt = *tp;
538 tree goto_stmt;
539
540 /* Build the GOTO_STMT. */
541 #ifndef INLINER_FOR_JAVA
542 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
543 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
544 GOTO_FAKE_P (goto_stmt) = 1;
545 #else /* INLINER_FOR_JAVA */
546 tree assignment = TREE_OPERAND (return_stmt, 0);
547 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
548 TREE_SIDE_EFFECTS (goto_stmt) = 1;
549 #endif /* INLINER_FOR_JAVA */
550
551 /* If we're returning something, just turn that into an
552 assignment into the equivalent of the original
553 RESULT_DECL. */
554 #ifndef INLINER_FOR_JAVA
555 if (RETURN_STMT_EXPR (return_stmt))
556 {
557 *tp = build_stmt (EXPR_STMT,
558 RETURN_STMT_EXPR (return_stmt));
559 STMT_IS_FULL_EXPR_P (*tp) = 1;
560 /* And then jump to the end of the function. */
561 TREE_CHAIN (*tp) = goto_stmt;
562 }
563 #else /* INLINER_FOR_JAVA */
564 if (assignment)
565 {
566 copy_body_r (&assignment, walk_subtrees, data);
567 *tp = build (COMPOUND_EXPR, void_type_node, assignment, goto_stmt);
568 TREE_SIDE_EFFECTS (*tp) = 1;
569 }
570 #endif /* INLINER_FOR_JAVA */
571 /* If we're not returning anything just do the jump. */
572 else
573 *tp = goto_stmt;
574 }
575 /* Local variables and labels need to be replaced by equivalent
576 variables. We don't want to copy static variables; there's only
577 one of those, no matter how many times we inline the containing
578 function. */
579 else if ((*lang_hooks.tree_inlining.auto_var_in_fn_p) (*tp, fn))
580 {
581 tree new_decl;
582
583 /* Remap the declaration. */
584 new_decl = remap_decl (*tp, id);
585 if (! new_decl)
586 abort ();
587 /* Replace this variable with the copy. */
588 STRIP_TYPE_NOPS (new_decl);
589 *tp = new_decl;
590 }
591 #if 0
592 else if (nonstatic_local_decl_p (*tp)
593 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
594 abort ();
595 #endif
596 else if (TREE_CODE (*tp) == SAVE_EXPR)
597 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
598 walk_subtrees);
599 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
600 /* UNSAVE_EXPRs should not be generated until expansion time. */
601 abort ();
602 #ifndef INLINER_FOR_JAVA
603 /* For a SCOPE_STMT, we must copy the associated block so that we
604 can write out debugging information for the inlined variables. */
605 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
606 copy_scope_stmt (tp, walk_subtrees, id);
607 #else /* INLINER_FOR_JAVA */
608 else if (TREE_CODE (*tp) == LABELED_BLOCK_EXPR)
609 {
610 /* We need a new copy of this labeled block; the EXIT_BLOCK_EXPR
611 will refer to it, so save a copy ready for remapping. We
612 save it in the decl_map, although it isn't a decl. */
613 tree new_block = copy_node (*tp);
614 splay_tree_insert (id->decl_map,
615 (splay_tree_key) *tp,
616 (splay_tree_value) new_block);
617 *tp = new_block;
618 }
619 else if (TREE_CODE (*tp) == EXIT_BLOCK_EXPR)
620 {
621 splay_tree_node n
622 = splay_tree_lookup (id->decl_map,
623 (splay_tree_key) TREE_OPERAND (*tp, 0));
624 /* We _must_ have seen the enclosing LABELED_BLOCK_EXPR. */
625 if (! n)
626 abort ();
627 *tp = copy_node (*tp);
628 TREE_OPERAND (*tp, 0) = (tree) n->value;
629 }
630 #endif /* INLINER_FOR_JAVA */
631 /* Types may need remapping as well. */
632 else if (TYPE_P (*tp))
633 *tp = remap_type (*tp, id);
634
635 /* Otherwise, just copy the node. Note that copy_tree_r already
636 knows not to copy VAR_DECLs, etc., so this is safe. */
637 else
638 {
639 if (TREE_CODE (*tp) == MODIFY_EXPR
640 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
641 && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
642 (TREE_OPERAND (*tp, 0), fn)))
643 {
644 /* Some assignments VAR = VAR; don't generate any rtl code
645 and thus don't count as variable modification. Avoid
646 keeping bogosities like 0 = 0. */
647 tree decl = TREE_OPERAND (*tp, 0), value;
648 splay_tree_node n;
649
650 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
651 if (n)
652 {
653 value = (tree) n->value;
654 STRIP_TYPE_NOPS (value);
655 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
656 {
657 *tp = value;
658 return copy_body_r (tp, walk_subtrees, data);
659 }
660 }
661 }
662 else if (TREE_CODE (*tp) == ADDR_EXPR
663 && ((*lang_hooks.tree_inlining.auto_var_in_fn_p)
664 (TREE_OPERAND (*tp, 0), fn)))
665 {
666 /* Get rid of &* from inline substitutions. It can occur when
667 someone takes the address of a parm or return slot passed by
668 invisible reference. */
669 tree decl = TREE_OPERAND (*tp, 0), value;
670 splay_tree_node n;
671
672 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
673 if (n)
674 {
675 value = (tree) n->value;
676 if (TREE_CODE (value) == INDIRECT_REF)
677 {
678 *tp = convert (TREE_TYPE (*tp), TREE_OPERAND (value, 0));
679 return copy_body_r (tp, walk_subtrees, data);
680 }
681 }
682 }
683
684 copy_tree_r (tp, walk_subtrees, NULL);
685
686 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
687
688 /* The copied TARGET_EXPR has never been expanded, even if the
689 original node was expanded already. */
690 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
691 {
692 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
693 TREE_OPERAND (*tp, 3) = NULL_TREE;
694 }
695 }
696
697 /* Keep iterating. */
698 return NULL_TREE;
699 }
700
701 /* Make a copy of the body of FN so that it can be inserted inline in
702 another function. */
703
704 static tree
705 copy_body (inline_data *id)
706 {
707 tree body;
708
709 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
710 walk_tree (&body, copy_body_r, id, NULL);
711
712 return body;
713 }
714
715 /* Generate code to initialize the parameters of the function at the
716 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
717
718 static tree
719 #ifndef INLINER_FOR_JAVA
720 initialize_inlined_parameters (inline_data *id, tree args, tree fn)
721 #else /* INLINER_FOR_JAVA */
722 initialize_inlined_parameters (inline_data *id, tree args, tree fn, tree block)
723 #endif /* INLINER_FOR_JAVA */
724 {
725 tree init_stmts;
726 tree parms;
727 tree a;
728 tree p;
729 #ifdef INLINER_FOR_JAVA
730 tree vars = NULL_TREE;
731 #endif /* INLINER_FOR_JAVA */
732 int argnum = 0;
733
734 /* Figure out what the parameters are. */
735 parms =
736 DECL_ARGUMENTS (fn);
737
738 /* Start with no initializations whatsoever. */
739 init_stmts = NULL_TREE;
740
741 /* Loop through the parameter declarations, replacing each with an
742 equivalent VAR_DECL, appropriately initialized. */
743 for (p = parms, a = args; p;
744 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
745 {
746 #ifndef INLINER_FOR_JAVA
747 tree init_stmt;
748 tree cleanup;
749 #endif /* not INLINER_FOR_JAVA */
750 tree var;
751 tree value;
752 tree var_sub;
753
754 ++argnum;
755
756 /* Find the initializer. */
757 value = (*lang_hooks.tree_inlining.convert_parm_for_inlining)
758 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
759
760 /* If the parameter is never assigned to, we may not need to
761 create a new variable here at all. Instead, we may be able
762 to just use the argument value. */
763 if (TREE_READONLY (p)
764 && !TREE_ADDRESSABLE (p)
765 && value && !TREE_SIDE_EFFECTS (value))
766 {
767 /* Simplify the value, if possible. */
768 value = fold (DECL_P (value) ? decl_constant_value (value) : value);
769
770 /* We can't risk substituting complex expressions. They
771 might contain variables that will be assigned to later.
772 Theoretically, we could check the expression to see if
773 all of the variables that determine its value are
774 read-only, but we don't bother. */
775 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
776 {
777 /* If this is a declaration, wrap it a NOP_EXPR so that
778 we don't try to put the VALUE on the list of
779 BLOCK_VARS. */
780 if (DECL_P (value))
781 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
782
783 /* If this is a constant, make sure it has the right type. */
784 else if (TREE_TYPE (value) != TREE_TYPE (p))
785 value = fold (build1 (NOP_EXPR, TREE_TYPE (p), value));
786
787 splay_tree_insert (id->decl_map,
788 (splay_tree_key) p,
789 (splay_tree_value) value);
790 continue;
791 }
792 }
793
794 /* Make an equivalent VAR_DECL. */
795 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
796
797 /* See if the frontend wants to pass this by invisible reference. If
798 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
799 replace uses of the PARM_DECL with dereferences. */
800 if (TREE_TYPE (var) != TREE_TYPE (p)
801 && POINTER_TYPE_P (TREE_TYPE (var))
802 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
803 var_sub = build1 (INDIRECT_REF, TREE_TYPE (p), var);
804 else
805 var_sub = var;
806
807 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
808 that way, when the PARM_DECL is encountered, it will be
809 automatically replaced by the VAR_DECL. */
810 splay_tree_insert (id->decl_map,
811 (splay_tree_key) p,
812 (splay_tree_value) var_sub);
813
814 /* Declare this new variable. */
815 #ifndef INLINER_FOR_JAVA
816 init_stmt = build_stmt (DECL_STMT, var);
817 TREE_CHAIN (init_stmt) = init_stmts;
818 init_stmts = init_stmt;
819 #else /* INLINER_FOR_JAVA */
820 TREE_CHAIN (var) = vars;
821 vars = var;
822 #endif /* INLINER_FOR_JAVA */
823
824 /* Initialize this VAR_DECL from the equivalent argument. If
825 the argument is an object, created via a constructor or copy,
826 this will not result in an extra copy: the TARGET_EXPR
827 representing the argument will be bound to VAR, and the
828 object will be constructed in VAR. */
829 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
830 #ifndef INLINER_FOR_JAVA
831 DECL_INITIAL (var) = value;
832 else
833 {
834 /* Even if P was TREE_READONLY, the new VAR should not be.
835 In the original code, we would have constructed a
836 temporary, and then the function body would have never
837 changed the value of P. However, now, we will be
838 constructing VAR directly. The constructor body may
839 change its value multiple times as it is being
840 constructed. Therefore, it must not be TREE_READONLY;
841 the back-end assumes that TREE_READONLY variable is
842 assigned to only once. */
843 TREE_READONLY (var) = 0;
844
845 /* Build a run-time initialization. */
846 init_stmt = build_stmt (EXPR_STMT,
847 build (INIT_EXPR, TREE_TYPE (p),
848 var, value));
849 /* Add this initialization to the list. Note that we want the
850 declaration *after* the initialization because we are going
851 to reverse all the initialization statements below. */
852 TREE_CHAIN (init_stmt) = init_stmts;
853 init_stmts = init_stmt;
854 }
855
856 /* See if we need to clean up the declaration. */
857 cleanup = (*lang_hooks.maybe_build_cleanup) (var);
858 if (cleanup)
859 {
860 tree cleanup_stmt;
861 /* Build the cleanup statement. */
862 cleanup_stmt = build_stmt (CLEANUP_STMT, var, cleanup);
863 /* Add it to the *front* of the list; the list will be
864 reversed below. */
865 TREE_CHAIN (cleanup_stmt) = init_stmts;
866 init_stmts = cleanup_stmt;
867 }
868 #else /* INLINER_FOR_JAVA */
869 {
870 tree assignment = build (MODIFY_EXPR, TREE_TYPE (p), var, value);
871 init_stmts = add_stmt_to_compound (init_stmts, TREE_TYPE (p),
872 assignment);
873 }
874 else
875 {
876 /* Java objects don't ever need constructing when being
877 passed as arguments because only call by reference is
878 supported. */
879 abort ();
880 }
881 #endif /* INLINER_FOR_JAVA */
882 }
883
884 #ifndef INLINER_FOR_JAVA
885 /* Evaluate trailing arguments. */
886 for (; a; a = TREE_CHAIN (a))
887 {
888 tree init_stmt;
889 tree value = TREE_VALUE (a);
890
891 if (! value || ! TREE_SIDE_EFFECTS (value))
892 continue;
893
894 init_stmt = build_stmt (EXPR_STMT, value);
895 TREE_CHAIN (init_stmt) = init_stmts;
896 init_stmts = init_stmt;
897 }
898
899 /* The initialization statements have been built up in reverse
900 order. Straighten them out now. */
901 return nreverse (init_stmts);
902 #else /* INLINER_FOR_JAVA */
903 BLOCK_VARS (block) = nreverse (vars);
904 return init_stmts;
905 #endif /* INLINER_FOR_JAVA */
906 }
907
908 /* Declare a return variable to replace the RESULT_DECL for the
909 function we are calling. An appropriate DECL_STMT is returned.
910 The USE_STMT is filled in to contain a use of the declaration to
911 indicate the return value of the function. */
912
913 #ifndef INLINER_FOR_JAVA
914 static tree
915 declare_return_variable (struct inline_data *id, tree return_slot_addr,
916 tree *use_stmt)
917 #else /* INLINER_FOR_JAVA */
918 static tree
919 declare_return_variable (struct inline_data *id, tree return_slot_addr,
920 tree *var)
921 #endif /* INLINER_FOR_JAVA */
922 {
923 tree fn = VARRAY_TOP_TREE (id->fns);
924 tree result = DECL_RESULT (fn);
925 #ifndef INLINER_FOR_JAVA
926 tree var;
927 #endif /* not INLINER_FOR_JAVA */
928 int need_return_decl = 1;
929
930 /* We don't need to do anything for functions that don't return
931 anything. */
932 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
933 {
934 #ifndef INLINER_FOR_JAVA
935 *use_stmt = NULL_TREE;
936 #else /* INLINER_FOR_JAVA */
937 *var = NULL_TREE;
938 #endif /* INLINER_FOR_JAVA */
939 return NULL_TREE;
940 }
941
942 #ifndef INLINER_FOR_JAVA
943 var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
944 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
945 &need_return_decl, return_slot_addr));
946
947 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
948 way, when the RESULT_DECL is encountered, it will be
949 automatically replaced by the VAR_DECL. */
950 splay_tree_insert (id->decl_map,
951 (splay_tree_key) result,
952 (splay_tree_value) var);
953
954 /* Build the USE_STMT. If the return type of the function was
955 promoted, convert it back to the expected type. */
956 if (TREE_TYPE (var) == TREE_TYPE (TREE_TYPE (fn)))
957 *use_stmt = build_stmt (EXPR_STMT, var);
958 else
959 *use_stmt = build_stmt (EXPR_STMT,
960 build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)),
961 var));
962 TREE_ADDRESSABLE (*use_stmt) = 1;
963
964 /* Build the declaration statement if FN does not return an
965 aggregate. */
966 if (need_return_decl)
967 return build_stmt (DECL_STMT, var);
968 #else /* INLINER_FOR_JAVA */
969 *var = ((*lang_hooks.tree_inlining.copy_res_decl_for_inlining)
970 (result, fn, VARRAY_TREE (id->fns, 0), id->decl_map,
971 &need_return_decl, return_slot_addr));
972
973 splay_tree_insert (id->decl_map,
974 (splay_tree_key) result,
975 (splay_tree_value) *var);
976 DECL_IGNORED_P (*var) = 1;
977 if (need_return_decl)
978 return *var;
979 #endif /* INLINER_FOR_JAVA */
980 /* If FN does return an aggregate, there's no need to declare the
981 return variable; we're using a variable in our caller's frame. */
982 else
983 return NULL_TREE;
984 }
985
986 /* Returns nonzero if a function can be inlined as a tree. */
987
988 bool
989 tree_inlinable_function_p (tree fn)
990 {
991 return inlinable_function_p (fn);
992 }
993
994 static const char *inline_forbidden_reason;
995
996 static tree
997 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
998 void *fnp)
999 {
1000 tree node = *nodep;
1001 tree fn = (tree) fnp;
1002 tree t;
1003
1004 switch (TREE_CODE (node))
1005 {
1006 case CALL_EXPR:
1007 /* Refuse to inline alloca call unless user explicitly forced so as
1008 this may change program's memory overhead drastically when the
1009 function using alloca is called in loop. In GCC present in
1010 SPEC2000 inlining into schedule_block cause it to require 2GB of
1011 RAM instead of 256MB. */
1012 if (alloca_call_p (node)
1013 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1014 {
1015 inline_forbidden_reason
1016 = N_("%Jfunction '%F' can never be inlined because it uses "
1017 "alloca (override using the always_inline attribute)");
1018 return node;
1019 }
1020 t = get_callee_fndecl (node);
1021 if (! t)
1022 break;
1023
1024
1025 /* We cannot inline functions that call setjmp. */
1026 if (setjmp_call_p (t))
1027 {
1028 inline_forbidden_reason
1029 = N_("%Jfunction '%F' can never be inlined because it uses setjmp");
1030 return node;
1031 }
1032
1033 if (DECL_BUILT_IN (t))
1034 switch (DECL_FUNCTION_CODE (t))
1035 {
1036 /* We cannot inline functions that take a variable number of
1037 arguments. */
1038 case BUILT_IN_VA_START:
1039 case BUILT_IN_STDARG_START:
1040 case BUILT_IN_NEXT_ARG:
1041 case BUILT_IN_VA_END:
1042 {
1043 inline_forbidden_reason
1044 = N_("%Jfunction '%F' can never be inlined because it "
1045 "uses variable argument lists");
1046 return node;
1047 }
1048 case BUILT_IN_LONGJMP:
1049 {
1050 /* We can't inline functions that call __builtin_longjmp at
1051 all. The non-local goto machinery really requires the
1052 destination be in a different function. If we allow the
1053 function calling __builtin_longjmp to be inlined into the
1054 function calling __builtin_setjmp, Things will Go Awry. */
1055 /* ??? Need front end help to identify "regular" non-local
1056 goto. */
1057 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1058 {
1059 inline_forbidden_reason
1060 = N_("%Jfunction '%F' can never be inlined because "
1061 "it uses setjmp-longjmp exception handling");
1062 return node;
1063 }
1064 }
1065
1066 default:
1067 break;
1068 }
1069 break;
1070
1071 #ifndef INLINER_FOR_JAVA
1072 case DECL_STMT:
1073 /* We cannot inline functions that contain other functions. */
1074 if (TREE_CODE (TREE_OPERAND (node, 0)) == FUNCTION_DECL
1075 && DECL_INITIAL (TREE_OPERAND (node, 0)))
1076 {
1077 inline_forbidden_reason
1078 = N_("%Jfunction '%F' can never be inlined "
1079 "because it contains a nested function");
1080 return node;
1081 }
1082 break;
1083
1084 case GOTO_STMT:
1085 case GOTO_EXPR:
1086 t = TREE_OPERAND (node, 0);
1087
1088 /* We will not inline a function which uses computed goto. The
1089 addresses of its local labels, which may be tucked into
1090 global storage, are of course not constant across
1091 instantiations, which causes unexpected behavior. */
1092 if (TREE_CODE (t) != LABEL_DECL)
1093 {
1094 inline_forbidden_reason
1095 = N_("%Jfunction '%F' can never be inlined "
1096 "because it contains a computed goto");
1097 return node;
1098 }
1099
1100 /* We cannot inline a nested function that jumps to a nonlocal
1101 label. */
1102 if (TREE_CODE (t) == LABEL_DECL && DECL_CONTEXT (t) != fn)
1103 {
1104 inline_forbidden_reason
1105 = N_("%Jfunction '%F' can never be inlined "
1106 "because it contains a nonlocal goto");
1107 return node;
1108 }
1109
1110 break;
1111
1112 case RECORD_TYPE:
1113 case UNION_TYPE:
1114 /* We cannot inline a function of the form
1115
1116 void F (int i) { struct S { int ar[i]; } s; }
1117
1118 Attempting to do so produces a catch-22.
1119 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1120 UNION_TYPE nodes, then it goes into infinite recursion on a
1121 structure containing a pointer to its own type. If it doesn't,
1122 then the type node for S doesn't get adjusted properly when
1123 F is inlined, and we abort in find_function_data. */
1124 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1125 if (variably_modified_type_p (TREE_TYPE (t)))
1126 {
1127 inline_forbidden_reason
1128 = N_("%Jfunction '%F' can never be inlined "
1129 "because it uses variable sized variables");
1130 return node;
1131 }
1132 #endif
1133 default:
1134 break;
1135 }
1136
1137 return NULL_TREE;
1138 }
1139
1140 /* Return subexpression representing possible alloca call, if any. */
1141 static tree
1142 inline_forbidden_p (tree fndecl)
1143 {
1144 location_t saved_loc = input_location;
1145 tree ret = walk_tree_without_duplicates
1146 (&DECL_SAVED_TREE (fndecl), inline_forbidden_p_1, fndecl);
1147 input_location = saved_loc;
1148 return ret;
1149 }
1150
1151 /* Returns nonzero if FN is a function that does not have any
1152 fundamental inline blocking properties. */
1153
1154 static bool
1155 inlinable_function_p (tree fn)
1156 {
1157 bool inlinable = true;
1158
1159 /* If we've already decided this function shouldn't be inlined,
1160 there's no need to check again. */
1161 if (DECL_UNINLINABLE (fn))
1162 return false;
1163
1164 /* See if there is any language-specific reason it cannot be
1165 inlined. (It is important that this hook be called early because
1166 in C++ it may result in template instantiation.)
1167 If the function is not inlinable for language-specific reasons,
1168 it is left up to the langhook to explain why. */
1169 inlinable = !(*lang_hooks.tree_inlining.cannot_inline_tree_fn) (&fn);
1170
1171 /* If we don't have the function body available, we can't inline it.
1172 However, this should not be recorded since we also get here for
1173 forward declared inline functions. Therefore, return at once. */
1174 if (!DECL_SAVED_TREE (fn))
1175 return false;
1176
1177 /* If we're not inlining at all, then we cannot inline this function. */
1178 else if (!flag_inline_trees)
1179 inlinable = false;
1180
1181 /* Only try to inline functions if DECL_INLINE is set. This should be
1182 true for all functions declared `inline', and for all other functions
1183 as well with -finline-functions.
1184
1185 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1186 it's the front-end that must set DECL_INLINE in this case, because
1187 dwarf2out loses if a function that does not have DECL_INLINE set is
1188 inlined anyway. That is why we have both DECL_INLINE and
1189 DECL_DECLARED_INLINE_P. */
1190 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1191 here should be redundant. */
1192 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1193 inlinable = false;
1194
1195 #ifdef INLINER_FOR_JAVA
1196 /* Synchronized methods can't be inlined. This is a bug. */
1197 else if (METHOD_SYNCHRONIZED (fn))
1198 inlinable = false;
1199 #endif /* INLINER_FOR_JAVA */
1200
1201 else if (inline_forbidden_p (fn))
1202 {
1203 /* See if we should warn about uninlinable functions. Previously,
1204 some of these warnings would be issued while trying to expand
1205 the function inline, but that would cause multiple warnings
1206 about functions that would for example call alloca. But since
1207 this a property of the function, just one warning is enough.
1208 As a bonus we can now give more details about the reason why a
1209 function is not inlinable.
1210 We only warn for functions declared `inline' by the user. */
1211 bool do_warning = (warn_inline
1212 && DECL_INLINE (fn)
1213 && DECL_DECLARED_INLINE_P (fn)
1214 && !DECL_IN_SYSTEM_HEADER (fn));
1215
1216 if (lookup_attribute ("always_inline",
1217 DECL_ATTRIBUTES (fn)))
1218 sorry (inline_forbidden_reason, fn, fn);
1219 else if (do_warning)
1220 warning (inline_forbidden_reason, fn, fn);
1221
1222 inlinable = false;
1223 }
1224
1225 /* Squirrel away the result so that we don't have to check again. */
1226 DECL_UNINLINABLE (fn) = !inlinable;
1227
1228 return inlinable;
1229 }
1230
1231 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1232
1233 static tree
1234 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1235 {
1236 inline_data *id;
1237 tree t;
1238 tree expr;
1239 tree stmt;
1240 #ifndef INLINER_FOR_JAVA
1241 tree chain;
1242 tree scope_stmt;
1243 tree use_stmt;
1244 #else /* INLINER_FOR_JAVA */
1245 tree retvar;
1246 #endif /* INLINER_FOR_JAVA */
1247 tree fn;
1248 tree arg_inits;
1249 tree *inlined_body;
1250 splay_tree st;
1251 tree args;
1252 tree return_slot_addr;
1253 const char *reason;
1254
1255 /* See what we've got. */
1256 id = (inline_data *) data;
1257 t = *tp;
1258
1259 /* Recurse, but letting recursive invocations know that we are
1260 inside the body of a TARGET_EXPR. */
1261 if (TREE_CODE (*tp) == TARGET_EXPR)
1262 {
1263 #ifndef INLINER_FOR_JAVA
1264 int i, len = first_rtl_op (TARGET_EXPR);
1265
1266 /* We're walking our own subtrees. */
1267 *walk_subtrees = 0;
1268
1269 /* Actually walk over them. This loop is the body of
1270 walk_trees, omitting the case where the TARGET_EXPR
1271 itself is handled. */
1272 for (i = 0; i < len; ++i)
1273 {
1274 if (i == 2)
1275 ++id->in_target_cleanup_p;
1276 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1277 id->tree_pruner);
1278 if (i == 2)
1279 --id->in_target_cleanup_p;
1280 }
1281
1282 return NULL_TREE;
1283 #else /* INLINER_FOR_JAVA */
1284 abort ();
1285 #endif /* INLINER_FOR_JAVA */
1286 }
1287 else if (TREE_CODE (t) == EXPR_WITH_FILE_LOCATION)
1288 {
1289 /* We're walking the subtree directly. */
1290 *walk_subtrees = 0;
1291 /* Update the source position. */
1292 push_srcloc (EXPR_WFL_FILENAME (t), EXPR_WFL_LINENO (t));
1293 walk_tree (&EXPR_WFL_NODE (t), expand_call_inline, data,
1294 id->tree_pruner);
1295 /* Restore the original source position. */
1296 pop_srcloc ();
1297
1298 return NULL_TREE;
1299 }
1300
1301 if (TYPE_P (t))
1302 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1303 them should not be expanded. This can happen if the type is a
1304 dynamic array type, for example. */
1305 *walk_subtrees = 0;
1306
1307 /* From here on, we're only interested in CALL_EXPRs. */
1308 if (TREE_CODE (t) != CALL_EXPR)
1309 return NULL_TREE;
1310
1311 /* First, see if we can figure out what function is being called.
1312 If we cannot, then there is no hope of inlining the function. */
1313 fn = get_callee_fndecl (t);
1314 if (!fn)
1315 return NULL_TREE;
1316
1317 /* Turn forward declarations into real ones. */
1318 fn = cgraph_node (fn)->decl;
1319
1320 /* If fn is a declaration of a function in a nested scope that was
1321 globally declared inline, we don't set its DECL_INITIAL.
1322 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1323 C++ front-end uses it for cdtors to refer to their internal
1324 declarations, that are not real functions. Fortunately those
1325 don't have trees to be saved, so we can tell by checking their
1326 DECL_SAVED_TREE. */
1327 if (! DECL_INITIAL (fn)
1328 && DECL_ABSTRACT_ORIGIN (fn)
1329 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1330 fn = DECL_ABSTRACT_ORIGIN (fn);
1331
1332 /* Don't try to inline functions that are not well-suited to
1333 inlining. */
1334 if (!cgraph_inline_p (id->current_decl, fn, &reason))
1335 {
1336 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1337 {
1338 sorry ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1339 sorry ("called from here");
1340 }
1341 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1342 && !DECL_IN_SYSTEM_HEADER (fn)
1343 && strlen (reason))
1344 {
1345 warning ("%Jinlining failed in call to '%F': %s", fn, fn, reason);
1346 warning ("called from here");
1347 }
1348 return NULL_TREE;
1349 }
1350
1351 if (! (*lang_hooks.tree_inlining.start_inlining) (fn))
1352 return NULL_TREE;
1353
1354 /* Set the current filename and line number to the function we are
1355 inlining so that when we create new _STMT nodes here they get
1356 line numbers corresponding to the function we are calling. We
1357 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
1358 because individual statements don't record the filename. */
1359 push_srcloc (DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn));
1360
1361 #ifndef INLINER_FOR_JAVA
1362 /* Build a statement-expression containing code to initialize the
1363 arguments, the actual inline expansion of the body, and a label
1364 for the return statements within the function to jump to. The
1365 type of the statement expression is the return type of the
1366 function call. */
1367 expr = build1 (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), make_node (COMPOUND_STMT));
1368 /* There is no scope associated with the statement-expression. */
1369 STMT_EXPR_NO_SCOPE (expr) = 1;
1370 if (lookup_attribute ("warn_unused_result",
1371 TYPE_ATTRIBUTES (TREE_TYPE (fn))))
1372 STMT_EXPR_WARN_UNUSED_RESULT (expr) = 1;
1373 stmt = STMT_EXPR_STMT (expr);
1374 #else /* INLINER_FOR_JAVA */
1375 /* Build a block containing code to initialize the arguments, the
1376 actual inline expansion of the body, and a label for the return
1377 statements within the function to jump to. The type of the
1378 statement expression is the return type of the function call. */
1379 stmt = NULL;
1380 expr = build (BLOCK, TREE_TYPE (TREE_TYPE (fn)), stmt);
1381 #endif /* INLINER_FOR_JAVA */
1382
1383 /* Local declarations will be replaced by their equivalents in this
1384 map. */
1385 st = id->decl_map;
1386 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1387 NULL, NULL);
1388
1389 /* Initialize the parameters. */
1390 args = TREE_OPERAND (t, 1);
1391 return_slot_addr = NULL_TREE;
1392 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1393 {
1394 return_slot_addr = TREE_VALUE (args);
1395 args = TREE_CHAIN (args);
1396 }
1397
1398 #ifndef INLINER_FOR_JAVA
1399 arg_inits = initialize_inlined_parameters (id, args, fn);
1400 /* Expand any inlined calls in the initializers. Do this before we
1401 push FN on the stack of functions we are inlining; we want to
1402 inline calls to FN that appear in the initializers for the
1403 parameters. */
1404 expand_calls_inline (&arg_inits, id);
1405 /* And add them to the tree. */
1406 COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), arg_inits);
1407 #else /* INLINER_FOR_JAVA */
1408 arg_inits = initialize_inlined_parameters (id, args, fn, expr);
1409 if (arg_inits)
1410 {
1411 /* Expand any inlined calls in the initializers. Do this before we
1412 push FN on the stack of functions we are inlining; we want to
1413 inline calls to FN that appear in the initializers for the
1414 parameters. */
1415 expand_calls_inline (&arg_inits, id);
1416
1417 /* And add them to the tree. */
1418 BLOCK_EXPR_BODY (expr) = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1419 TREE_TYPE (arg_inits),
1420 arg_inits);
1421 }
1422 #endif /* INLINER_FOR_JAVA */
1423
1424 /* Record the function we are about to inline so that we can avoid
1425 recursing into it. */
1426 VARRAY_PUSH_TREE (id->fns, fn);
1427
1428 /* Record the function we are about to inline if optimize_function
1429 has not been called on it yet and we don't have it in the list. */
1430 if (! DECL_INLINED_FNS (fn))
1431 {
1432 int i;
1433
1434 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
1435 if (VARRAY_TREE (id->inlined_fns, i) == fn)
1436 break;
1437 if (i < 0)
1438 VARRAY_PUSH_TREE (id->inlined_fns, fn);
1439 }
1440
1441 /* Return statements in the function body will be replaced by jumps
1442 to the RET_LABEL. */
1443 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1444 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1445
1446 if (! DECL_INITIAL (fn)
1447 || TREE_CODE (DECL_INITIAL (fn)) != BLOCK)
1448 abort ();
1449
1450 #ifndef INLINER_FOR_JAVA
1451 /* Create a block to put the parameters in. We have to do this
1452 after the parameters have been remapped because remapping
1453 parameters is different from remapping ordinary variables. */
1454 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
1455 SCOPE_BEGIN_P (scope_stmt) = 1;
1456 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
1457 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
1458 TREE_CHAIN (scope_stmt) = COMPOUND_BODY (stmt);
1459 COMPOUND_BODY (stmt) = scope_stmt;
1460
1461 /* Tell the debugging backends that this block represents the
1462 outermost scope of the inlined function. */
1463 if (SCOPE_STMT_BLOCK (scope_stmt))
1464 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
1465
1466 /* Declare the return variable for the function. */
1467 COMPOUND_BODY (stmt)
1468 = chainon (COMPOUND_BODY (stmt),
1469 declare_return_variable (id, return_slot_addr, &use_stmt));
1470 #else /* INLINER_FOR_JAVA */
1471 {
1472 /* Declare the return variable for the function. */
1473 tree decl = declare_return_variable (id, return_slot_addr, &retvar);
1474 if (retvar)
1475 {
1476 tree *next = &BLOCK_VARS (expr);
1477 while (*next)
1478 next = &TREE_CHAIN (*next);
1479 *next = decl;
1480 }
1481 }
1482 #endif /* INLINER_FOR_JAVA */
1483
1484 /* After we've initialized the parameters, we insert the body of the
1485 function itself. */
1486 #ifndef INLINER_FOR_JAVA
1487 inlined_body = &COMPOUND_BODY (stmt);
1488 while (*inlined_body)
1489 inlined_body = &TREE_CHAIN (*inlined_body);
1490 *inlined_body = copy_body (id);
1491 #else /* INLINER_FOR_JAVA */
1492 {
1493 tree new_body;
1494 java_inlining_map_static_initializers (fn, id->decl_map);
1495 new_body = copy_body (id);
1496 TREE_TYPE (new_body) = TREE_TYPE (TREE_TYPE (fn));
1497 BLOCK_EXPR_BODY (expr)
1498 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1499 TREE_TYPE (new_body), new_body);
1500 inlined_body = &BLOCK_EXPR_BODY (expr);
1501 }
1502 #endif /* INLINER_FOR_JAVA */
1503
1504 /* After the body of the function comes the RET_LABEL. This must come
1505 before we evaluate the returned value below, because that evaluation
1506 may cause RTL to be generated. */
1507 #ifndef INLINER_FOR_JAVA
1508 COMPOUND_BODY (stmt)
1509 = chainon (COMPOUND_BODY (stmt),
1510 build_stmt (LABEL_STMT, id->ret_label));
1511 #else /* INLINER_FOR_JAVA */
1512 {
1513 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1514 BLOCK_EXPR_BODY (expr)
1515 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr), void_type_node, label);
1516 TREE_SIDE_EFFECTS (label) = TREE_SIDE_EFFECTS (t);
1517 }
1518 #endif /* INLINER_FOR_JAVA */
1519
1520 /* Finally, mention the returned value so that the value of the
1521 statement-expression is the returned value of the function. */
1522 #ifndef INLINER_FOR_JAVA
1523 COMPOUND_BODY (stmt) = chainon (COMPOUND_BODY (stmt), use_stmt);
1524
1525 /* Close the block for the parameters. */
1526 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
1527 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
1528 remap_block (scope_stmt, NULL_TREE, id);
1529 COMPOUND_BODY (stmt)
1530 = chainon (COMPOUND_BODY (stmt), scope_stmt);
1531 #else /* INLINER_FOR_JAVA */
1532 if (retvar)
1533 {
1534 /* Mention the retvar. If the return type of the function was
1535 promoted, convert it back to the expected type. */
1536 if (TREE_TYPE (TREE_TYPE (fn)) != TREE_TYPE (retvar))
1537 retvar = build1 (NOP_EXPR, TREE_TYPE (TREE_TYPE (fn)), retvar);
1538 BLOCK_EXPR_BODY (expr)
1539 = add_stmt_to_compound (BLOCK_EXPR_BODY (expr),
1540 TREE_TYPE (retvar), retvar);
1541 }
1542
1543 java_inlining_merge_static_initializers (fn, id->decl_map);
1544 #endif /* INLINER_FOR_JAVA */
1545
1546 /* Clean up. */
1547 splay_tree_delete (id->decl_map);
1548 id->decl_map = st;
1549
1550 /* The new expression has side-effects if the old one did. */
1551 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
1552
1553 /* Replace the call by the inlined body. Wrap it in an
1554 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
1555 pointing to the right place. */
1556 #ifndef INLINER_FOR_JAVA
1557 chain = TREE_CHAIN (*tp);
1558 #endif /* INLINER_FOR_JAVA */
1559 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
1560 /*col=*/0);
1561 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
1562 #ifndef INLINER_FOR_JAVA
1563 TREE_CHAIN (*tp) = chain;
1564 #endif /* not INLINER_FOR_JAVA */
1565 pop_srcloc ();
1566
1567 /* If the value of the new expression is ignored, that's OK. We
1568 don't warn about this for CALL_EXPRs, so we shouldn't warn about
1569 the equivalent inlined version either. */
1570 TREE_USED (*tp) = 1;
1571
1572 /* Our function now has more statements than it did before. */
1573 DECL_ESTIMATED_INSNS (VARRAY_TREE (id->fns, 0)) += DECL_ESTIMATED_INSNS (fn);
1574 /* For accounting, subtract one for the saved call/ret. */
1575 id->inlined_insns += DECL_ESTIMATED_INSNS (fn) - 1;
1576
1577 /* Update callgraph if needed. */
1578 if (id->decl)
1579 {
1580 cgraph_remove_call (id->decl, fn);
1581 cgraph_create_edges (id->decl, *inlined_body);
1582 }
1583
1584 /* Recurse into the body of the just inlined function. */
1585 {
1586 tree old_decl = id->current_decl;
1587 id->current_decl = fn;
1588 expand_calls_inline (inlined_body, id);
1589 id->current_decl = old_decl;
1590 }
1591 VARRAY_POP (id->fns);
1592
1593 /* If we've returned to the top level, clear out the record of how
1594 much inlining has been done. */
1595 if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
1596 id->inlined_insns = 0;
1597
1598 /* Don't walk into subtrees. We've already handled them above. */
1599 *walk_subtrees = 0;
1600
1601 (*lang_hooks.tree_inlining.end_inlining) (fn);
1602
1603 /* Keep iterating. */
1604 return NULL_TREE;
1605 }
1606 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
1607 expansions as appropriate. */
1608
1609 static void
1610 expand_calls_inline (tree *tp, inline_data *id)
1611 {
1612 /* Search through *TP, replacing all calls to inline functions by
1613 appropriate equivalents. Use walk_tree in no-duplicates mode
1614 to avoid exponential time complexity. (We can't just use
1615 walk_tree_without_duplicates, because of the special TARGET_EXPR
1616 handling in expand_calls. The hash table is set up in
1617 optimize_function. */
1618 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
1619 }
1620
1621 /* Expand calls to inline functions in the body of FN. */
1622
1623 void
1624 optimize_inline_calls (tree fn)
1625 {
1626 inline_data id;
1627 tree prev_fn;
1628
1629 /* Clear out ID. */
1630 memset (&id, 0, sizeof (id));
1631
1632 id.decl = fn;
1633 id.current_decl = fn;
1634 /* Don't allow recursion into FN. */
1635 VARRAY_TREE_INIT (id.fns, 32, "fns");
1636 VARRAY_PUSH_TREE (id.fns, fn);
1637 if (!DECL_ESTIMATED_INSNS (fn))
1638 DECL_ESTIMATED_INSNS (fn)
1639 = (*lang_hooks.tree_inlining.estimate_num_insns) (fn);
1640 /* Or any functions that aren't finished yet. */
1641 prev_fn = NULL_TREE;
1642 if (current_function_decl)
1643 {
1644 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1645 prev_fn = current_function_decl;
1646 }
1647
1648 prev_fn = ((*lang_hooks.tree_inlining.add_pending_fn_decls)
1649 (&id.fns, prev_fn));
1650
1651 /* Create the list of functions this call will inline. */
1652 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
1653
1654 /* Keep track of the low-water mark, i.e., the point where the first
1655 real inlining is represented in ID.FNS. */
1656 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1657
1658 /* Replace all calls to inline functions with the bodies of those
1659 functions. */
1660 id.tree_pruner = htab_create (37, htab_hash_pointer,
1661 htab_eq_pointer, NULL);
1662 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1663
1664 /* Clean up. */
1665 htab_delete (id.tree_pruner);
1666 if (DECL_LANG_SPECIFIC (fn))
1667 {
1668 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1669
1670 if (VARRAY_ACTIVE_SIZE (id.inlined_fns))
1671 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1672 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1673 DECL_INLINED_FNS (fn) = ifn;
1674 }
1675 }
1676
1677 /* FN is a function that has a complete body, and CLONE is a function
1678 whose body is to be set to a copy of FN, mapping argument
1679 declarations according to the ARG_MAP splay_tree. */
1680
1681 void
1682 clone_body (tree clone, tree fn, void *arg_map)
1683 {
1684 inline_data id;
1685
1686 /* Clone the body, as if we were making an inline call. But, remap
1687 the parameters in the callee to the parameters of caller. If
1688 there's an in-charge parameter, map it to an appropriate
1689 constant. */
1690 memset (&id, 0, sizeof (id));
1691 VARRAY_TREE_INIT (id.fns, 2, "fns");
1692 VARRAY_PUSH_TREE (id.fns, clone);
1693 VARRAY_PUSH_TREE (id.fns, fn);
1694 id.decl_map = (splay_tree)arg_map;
1695
1696 /* Cloning is treated slightly differently from inlining. Set
1697 CLONING_P so that it's clear which operation we're performing. */
1698 id.cloning_p = true;
1699
1700 /* Actually copy the body. */
1701 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1702 }
1703
1704 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1705 FUNC is called with the DATA and the address of each sub-tree. If
1706 FUNC returns a non-NULL value, the traversal is aborted, and the
1707 value returned by FUNC is returned. If HTAB is non-NULL it is used
1708 to record the nodes visited, and to avoid visiting a node more than
1709 once. */
1710
1711 tree
1712 walk_tree (tree *tp, walk_tree_fn func, void *data, void *htab_)
1713 {
1714 htab_t htab = (htab_t) htab_;
1715 enum tree_code code;
1716 int walk_subtrees;
1717 tree result;
1718
1719 #define WALK_SUBTREE(NODE) \
1720 do \
1721 { \
1722 result = walk_tree (&(NODE), func, data, htab); \
1723 if (result) \
1724 return result; \
1725 } \
1726 while (0)
1727
1728 #define WALK_SUBTREE_TAIL(NODE) \
1729 do \
1730 { \
1731 tp = & (NODE); \
1732 goto tail_recurse; \
1733 } \
1734 while (0)
1735
1736 tail_recurse:
1737 /* Skip empty subtrees. */
1738 if (!*tp)
1739 return NULL_TREE;
1740
1741 if (htab)
1742 {
1743 void **slot;
1744
1745 /* Don't walk the same tree twice, if the user has requested
1746 that we avoid doing so. */
1747 slot = htab_find_slot (htab, *tp, INSERT);
1748 if (*slot)
1749 return NULL_TREE;
1750 *slot = *tp;
1751 }
1752
1753 /* Call the function. */
1754 walk_subtrees = 1;
1755 result = (*func) (tp, &walk_subtrees, data);
1756
1757 /* If we found something, return it. */
1758 if (result)
1759 return result;
1760
1761 code = TREE_CODE (*tp);
1762
1763 #ifndef INLINER_FOR_JAVA
1764 /* Even if we didn't, FUNC may have decided that there was nothing
1765 interesting below this point in the tree. */
1766 if (!walk_subtrees)
1767 {
1768 if (STATEMENT_CODE_P (code) || code == TREE_LIST
1769 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1770 /* But we still need to check our siblings. */
1771 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1772 else
1773 return NULL_TREE;
1774 }
1775
1776 /* Handle common cases up front. */
1777 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
1778 #else /* INLINER_FOR_JAVA */
1779 if (code != EXIT_BLOCK_EXPR
1780 && code != SAVE_EXPR
1781 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
1782 #endif /* INLINER_FOR_JAVA */
1783 {
1784 int i, len;
1785
1786 #ifndef INLINER_FOR_JAVA
1787 /* Set lineno here so we get the right instantiation context
1788 if we call instantiate_decl from inlinable_function_p. */
1789 if (STATEMENT_CODE_P (code) && !STMT_LINENO_FOR_FN_P (*tp))
1790 input_line = STMT_LINENO (*tp);
1791 #endif /* not INLINER_FOR_JAVA */
1792
1793 /* Walk over all the sub-trees of this operand. */
1794 len = first_rtl_op (code);
1795 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1796 But, we only want to walk once. */
1797 if (code == TARGET_EXPR
1798 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1799 --len;
1800 /* Go through the subtrees. We need to do this in forward order so
1801 that the scope of a FOR_EXPR is handled properly. */
1802 for (i = 0; i < len; ++i)
1803 WALK_SUBTREE (TREE_OPERAND (*tp, i));
1804
1805 #ifndef INLINER_FOR_JAVA
1806 /* For statements, we also walk the chain so that we cover the
1807 entire statement tree. */
1808 if (STATEMENT_CODE_P (code))
1809 {
1810 if (code == DECL_STMT
1811 && DECL_STMT_DECL (*tp)
1812 && DECL_P (DECL_STMT_DECL (*tp)))
1813 {
1814 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1815 into declarations that are just mentioned, rather than
1816 declared; they don't really belong to this part of the tree.
1817 And, we can see cycles: the initializer for a declaration can
1818 refer to the declaration itself. */
1819 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1820 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1821 WALK_SUBTREE (DECL_SIZE_UNIT (DECL_STMT_DECL (*tp)));
1822 WALK_SUBTREE (TREE_TYPE (*tp));
1823 }
1824
1825 /* This can be tail-recursion optimized if we write it this way. */
1826 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1827 }
1828
1829 #endif /* not INLINER_FOR_JAVA */
1830 /* We didn't find what we were looking for. */
1831 return NULL_TREE;
1832 }
1833 else if (TREE_CODE_CLASS (code) == 'd')
1834 {
1835 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1836 }
1837 else if (TREE_CODE_CLASS (code) == 't')
1838 {
1839 WALK_SUBTREE (TYPE_SIZE (*tp));
1840 WALK_SUBTREE (TYPE_SIZE_UNIT (*tp));
1841 /* Also examine various special fields, below. */
1842 }
1843
1844 result = (*lang_hooks.tree_inlining.walk_subtrees) (tp, &walk_subtrees, func,
1845 data, htab);
1846 if (result || ! walk_subtrees)
1847 return result;
1848
1849 /* Not one of the easy cases. We must explicitly go through the
1850 children. */
1851 switch (code)
1852 {
1853 case ERROR_MARK:
1854 case IDENTIFIER_NODE:
1855 case INTEGER_CST:
1856 case REAL_CST:
1857 case VECTOR_CST:
1858 case STRING_CST:
1859 case REAL_TYPE:
1860 case COMPLEX_TYPE:
1861 case VECTOR_TYPE:
1862 case VOID_TYPE:
1863 case BOOLEAN_TYPE:
1864 case UNION_TYPE:
1865 case ENUMERAL_TYPE:
1866 case BLOCK:
1867 case RECORD_TYPE:
1868 case CHAR_TYPE:
1869 case PLACEHOLDER_EXPR:
1870 /* None of these have subtrees other than those already walked
1871 above. */
1872 break;
1873
1874 case POINTER_TYPE:
1875 case REFERENCE_TYPE:
1876 WALK_SUBTREE_TAIL (TREE_TYPE (*tp));
1877 break;
1878
1879 case TREE_LIST:
1880 WALK_SUBTREE (TREE_VALUE (*tp));
1881 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
1882 break;
1883
1884 case TREE_VEC:
1885 {
1886 int len = TREE_VEC_LENGTH (*tp);
1887
1888 if (len == 0)
1889 break;
1890
1891 /* Walk all elements but the first. */
1892 while (--len)
1893 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1894
1895 /* Now walk the first one as a tail call. */
1896 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
1897 }
1898
1899 case COMPLEX_CST:
1900 WALK_SUBTREE (TREE_REALPART (*tp));
1901 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
1902
1903 case CONSTRUCTOR:
1904 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
1905
1906 case METHOD_TYPE:
1907 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1908 /* Fall through. */
1909
1910 case FUNCTION_TYPE:
1911 WALK_SUBTREE (TREE_TYPE (*tp));
1912 {
1913 tree arg = TYPE_ARG_TYPES (*tp);
1914
1915 /* We never want to walk into default arguments. */
1916 for (; arg; arg = TREE_CHAIN (arg))
1917 WALK_SUBTREE (TREE_VALUE (arg));
1918 }
1919 break;
1920
1921 case ARRAY_TYPE:
1922 WALK_SUBTREE (TREE_TYPE (*tp));
1923 WALK_SUBTREE_TAIL (TYPE_DOMAIN (*tp));
1924
1925 case INTEGER_TYPE:
1926 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1927 WALK_SUBTREE_TAIL (TYPE_MAX_VALUE (*tp));
1928
1929 case OFFSET_TYPE:
1930 WALK_SUBTREE (TREE_TYPE (*tp));
1931 WALK_SUBTREE_TAIL (TYPE_OFFSET_BASETYPE (*tp));
1932
1933 #ifdef INLINER_FOR_JAVA
1934 case EXIT_BLOCK_EXPR:
1935 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 1));
1936
1937 case SAVE_EXPR:
1938 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
1939 #endif /* INLINER_FOR_JAVA */
1940
1941 default:
1942 abort ();
1943 }
1944
1945 /* We didn't find what we were looking for. */
1946 return NULL_TREE;
1947
1948 #undef WALK_SUBTREE
1949 #undef WALK_SUBTREE_TAIL
1950 }
1951
1952 /* Like walk_tree, but does not walk duplicate nodes more than
1953 once. */
1954
1955 tree
1956 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
1957 {
1958 tree result;
1959 htab_t htab;
1960
1961 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1962 result = walk_tree (tp, func, data, htab);
1963 htab_delete (htab);
1964 return result;
1965 }
1966
1967 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1968
1969 tree
1970 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1971 {
1972 enum tree_code code = TREE_CODE (*tp);
1973
1974 /* We make copies of most nodes. */
1975 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1976 || TREE_CODE_CLASS (code) == 'c'
1977 || code == TREE_LIST
1978 || code == TREE_VEC
1979 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
1980 {
1981 /* Because the chain gets clobbered when we make a copy, we save it
1982 here. */
1983 tree chain = TREE_CHAIN (*tp);
1984
1985 /* Copy the node. */
1986 *tp = copy_node (*tp);
1987
1988 /* Now, restore the chain, if appropriate. That will cause
1989 walk_tree to walk into the chain as well. */
1990 if (code == PARM_DECL || code == TREE_LIST
1991 #ifndef INLINER_FOR_JAVA
1992 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp)
1993 || STATEMENT_CODE_P (code))
1994 TREE_CHAIN (*tp) = chain;
1995
1996 /* For now, we don't update BLOCKs when we make copies. So, we
1997 have to nullify all scope-statements. */
1998 if (TREE_CODE (*tp) == SCOPE_STMT)
1999 SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
2000 #else /* INLINER_FOR_JAVA */
2001 || (*lang_hooks.tree_inlining.tree_chain_matters_p) (*tp))
2002 TREE_CHAIN (*tp) = chain;
2003 #endif /* INLINER_FOR_JAVA */
2004 }
2005 else if (TREE_CODE_CLASS (code) == 't')
2006 *walk_subtrees = 0;
2007
2008 return NULL_TREE;
2009 }
2010
2011 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2012 information indicating to what new SAVE_EXPR this one should be
2013 mapped, use that one. Otherwise, create a new node and enter it in
2014 ST. FN is the function into which the copy will be placed. */
2015
2016 void
2017 remap_save_expr (tree *tp, void *st_, tree fn, int *walk_subtrees)
2018 {
2019 splay_tree st = (splay_tree) st_;
2020 splay_tree_node n;
2021
2022 /* See if we already encountered this SAVE_EXPR. */
2023 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2024
2025 /* If we didn't already remap this SAVE_EXPR, do so now. */
2026 if (!n)
2027 {
2028 tree t = copy_node (*tp);
2029
2030 /* The SAVE_EXPR is now part of the function into which we
2031 are inlining this body. */
2032 SAVE_EXPR_CONTEXT (t) = fn;
2033 /* And we haven't evaluated it yet. */
2034 SAVE_EXPR_RTL (t) = NULL_RTX;
2035 /* Remember this SAVE_EXPR. */
2036 n = splay_tree_insert (st,
2037 (splay_tree_key) *tp,
2038 (splay_tree_value) t);
2039 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2040 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2041 }
2042 else
2043 /* We've already walked into this SAVE_EXPR, so we needn't do it
2044 again. */
2045 *walk_subtrees = 0;
2046
2047 /* Replace this SAVE_EXPR with the copy. */
2048 *tp = (tree) n->value;
2049 }
2050
2051 #ifdef INLINER_FOR_JAVA
2052 /* Add STMT to EXISTING if possible, otherwise create a new
2053 COMPOUND_EXPR and add STMT to it. */
2054
2055 static tree
2056 add_stmt_to_compound (tree existing, tree type, tree stmt)
2057 {
2058 if (!stmt)
2059 return existing;
2060 else if (existing)
2061 return build (COMPOUND_EXPR, type, existing, stmt);
2062 else
2063 return stmt;
2064 }
2065
2066 #endif /* INLINER_FOR_JAVA */