real.h (struct real_format): Split the signbit field into two two fields, signbit_ro...
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "integrate.h"
36 #include "varray.h"
37 #include "hashtab.h"
38 #include "pointer-set.h"
39 #include "splay-tree.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "diagnostic.h"
47 #include "debug.h"
48
49 /* I'm not real happy about this, but we need to handle gimple and
50 non-gimple trees. */
51 #include "tree-iterator.h"
52 #include "tree-gimple.h"
53
54 /* 0 if we should not perform inlining.
55 1 if we should expand functions calls inline at the tree level.
56 2 if we should consider *all* functions to be inline
57 candidates. */
58
59 int flag_inline_trees = 0;
60
61 /* To Do:
62
63 o In order to make inlining-on-trees work, we pessimized
64 function-local static constants. In particular, they are now
65 always output, even when not addressed. Fix this by treating
66 function-local static constants just like global static
67 constants; the back-end already knows not to output them if they
68 are not needed.
69
70 o Provide heuristics to clamp inlining of recursive template
71 calls? */
72
73 /* Data required for function inlining. */
74
75 typedef struct inline_data
76 {
77 /* A stack of the functions we are inlining. For example, if we are
78 compiling `f', which calls `g', which calls `h', and we are
79 inlining the body of `h', the stack will contain, `h', followed
80 by `g', followed by `f'. The first few elements of the stack may
81 contain other functions that we know we should not recurse into,
82 even though they are not directly being inlined. */
83 varray_type fns;
84 /* The index of the first element of FNS that really represents an
85 inlined function. */
86 unsigned first_inlined_fn;
87 /* The label to jump to when a return statement is encountered. If
88 this value is NULL, then return statements will simply be
89 remapped as return statements, rather than as jumps. */
90 tree ret_label;
91 /* The VAR_DECL for the return value. */
92 tree retvar;
93 /* The map from local declarations in the inlined function to
94 equivalents in the function into which it is being inlined. */
95 splay_tree decl_map;
96 /* Nonzero if we are currently within the cleanup for a
97 TARGET_EXPR. */
98 int in_target_cleanup_p;
99 /* We use the same mechanism to build clones that we do to perform
100 inlining. However, there are a few places where we need to
101 distinguish between those two situations. This flag is true if
102 we are cloning, rather than inlining. */
103 bool cloning_p;
104 /* Similarly for saving function body. */
105 bool saving_p;
106 /* Hash table used to prevent walk_tree from visiting the same node
107 umpteen million times. */
108 htab_t tree_pruner;
109 /* Callgraph node of function we are inlining into. */
110 struct cgraph_node *node;
111 /* Callgraph node of currently inlined function. */
112 struct cgraph_node *current_node;
113 /* Statement iterator. We need this so we can keep the tree in
114 gimple form when we insert the inlined function. It is not
115 used when we are not dealing with gimple trees. */
116 tree_stmt_iterator tsi;
117 } inline_data;
118
119 /* Prototypes. */
120
121 /* The approximate number of instructions per statement. This number
122 need not be particularly accurate; it is used only to make
123 decisions about when a function is too big to inline. */
124 #define INSNS_PER_STMT (10)
125
126 static tree copy_body_r (tree *, int *, void *);
127 static tree copy_body (inline_data *);
128 static tree expand_call_inline (tree *, int *, void *);
129 static void expand_calls_inline (tree *, inline_data *);
130 static bool inlinable_function_p (tree);
131 static tree remap_decl (tree, inline_data *);
132 static tree remap_type (tree, inline_data *);
133 static tree initialize_inlined_parameters (inline_data *, tree,
134 tree, tree, tree);
135 static void remap_block (tree *, inline_data *);
136 static tree remap_decls (tree, inline_data *);
137 static void copy_bind_expr (tree *, int *, inline_data *);
138 static tree mark_local_for_remap_r (tree *, int *, void *);
139 static void unsave_expr_1 (tree);
140 static tree unsave_r (tree *, int *, void *);
141 static void declare_inline_vars (tree bind_expr, tree vars);
142 static void remap_save_expr (tree *, void *, int *);
143
144 /* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
146
147 static void
148 insert_decl_map (inline_data *id, tree key, tree value)
149 {
150 splay_tree_insert (id->decl_map, (splay_tree_key) key,
151 (splay_tree_value) value);
152
153 /* Always insert an identity map as well. If we see this same new
154 node again, we won't want to duplicate it a second time. */
155 if (key != value)
156 splay_tree_insert (id->decl_map, (splay_tree_key) value,
157 (splay_tree_value) value);
158 }
159
160 /* Remap DECL during the copying of the BLOCK tree for the function.
161 We are only called to remap local variables in the current function. */
162
163 static tree
164 remap_decl (tree decl, inline_data *id)
165 {
166 splay_tree_node n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
167 tree fn = VARRAY_TOP_TREE (id->fns);
168
169 /* See if we have remapped this declaration. If we didn't already have an
170 equivalent for this declaration, create one now. */
171 if (!n)
172 {
173 /* Make a copy of the variable or label. */
174 tree t = copy_decl_for_inlining (decl, fn, VARRAY_TREE (id->fns, 0));
175
176 /* Remap types, if necessary. */
177 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
178 if (TREE_CODE (t) == TYPE_DECL)
179 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
180 else if (TREE_CODE (t) == PARM_DECL)
181 DECL_ARG_TYPE_AS_WRITTEN (t)
182 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
183
184 /* Remap sizes as necessary. */
185 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
186 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
187
188 /* If fields, do likewise for offset and qualifier. */
189 if (TREE_CODE (t) == FIELD_DECL)
190 {
191 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
192 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
193 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
194 }
195
196 #if 0
197 /* FIXME handle anon aggrs. */
198 if (! DECL_NAME (t) && TREE_TYPE (t)
199 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
200 {
201 /* For a VAR_DECL of anonymous type, we must also copy the
202 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
203 tree members = NULL;
204 tree src;
205
206 for (src = DECL_ANON_UNION_ELEMS (t); src;
207 src = TREE_CHAIN (src))
208 {
209 tree member = remap_decl (TREE_VALUE (src), id);
210
211 gcc_assert (!TREE_PURPOSE (src));
212 members = tree_cons (NULL, member, members);
213 }
214 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
215 }
216 #endif
217
218 /* Remember it, so that if we encounter this local entity
219 again we can reuse this copy. */
220 insert_decl_map (id, decl, t);
221 return t;
222 }
223
224 return unshare_expr ((tree) n->value);
225 }
226
227 static tree
228 remap_type (tree type, inline_data *id)
229 {
230 splay_tree_node node;
231 tree new, t;
232
233 if (type == NULL)
234 return type;
235
236 /* See if we have remapped this type. */
237 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
238 if (node)
239 return (tree) node->value;
240
241 /* The type only needs remapping if it's variably modified by a variable
242 in the function we are inlining. */
243 if (! variably_modified_type_p (type, VARRAY_TOP_TREE (id->fns)))
244 {
245 insert_decl_map (id, type, type);
246 return type;
247 }
248
249 /* We do need a copy. build and register it now. If this is a pointer or
250 reference type, remap the designated type and make a new pointer or
251 reference type. */
252 if (TREE_CODE (type) == POINTER_TYPE)
253 {
254 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
255 TYPE_MODE (type),
256 TYPE_REF_CAN_ALIAS_ALL (type));
257 insert_decl_map (id, type, new);
258 return new;
259 }
260 else if (TREE_CODE (type) == REFERENCE_TYPE)
261 {
262 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
263 TYPE_MODE (type),
264 TYPE_REF_CAN_ALIAS_ALL (type));
265 insert_decl_map (id, type, new);
266 return new;
267 }
268 else
269 new = copy_node (type);
270
271 insert_decl_map (id, type, new);
272
273 /* This is a new type, not a copy of an old type. Need to reassociate
274 variants. We can handle everything except the main variant lazily. */
275 t = TYPE_MAIN_VARIANT (type);
276 if (type != t)
277 {
278 t = remap_type (t, id);
279 TYPE_MAIN_VARIANT (new) = t;
280 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
281 TYPE_NEXT_VARIANT (t) = new;
282 }
283 else
284 {
285 TYPE_MAIN_VARIANT (new) = new;
286 TYPE_NEXT_VARIANT (new) = NULL;
287 }
288
289 /* Lazily create pointer and reference types. */
290 TYPE_POINTER_TO (new) = NULL;
291 TYPE_REFERENCE_TO (new) = NULL;
292
293 switch (TREE_CODE (new))
294 {
295 case INTEGER_TYPE:
296 case REAL_TYPE:
297 case ENUMERAL_TYPE:
298 case BOOLEAN_TYPE:
299 case CHAR_TYPE:
300 t = TYPE_MIN_VALUE (new);
301 if (t && TREE_CODE (t) != INTEGER_CST)
302 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
303
304 t = TYPE_MAX_VALUE (new);
305 if (t && TREE_CODE (t) != INTEGER_CST)
306 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
307 return new;
308
309 case FUNCTION_TYPE:
310 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
311 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
312 return new;
313
314 case ARRAY_TYPE:
315 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
316 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
317 break;
318
319 case RECORD_TYPE:
320 case UNION_TYPE:
321 case QUAL_UNION_TYPE:
322 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
323 break;
324
325 case FILE_TYPE:
326 case OFFSET_TYPE:
327 default:
328 /* Shouldn't have been thought variable sized. */
329 gcc_unreachable ();
330 }
331
332 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
333 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
334
335 return new;
336 }
337
338 static tree
339 remap_decls (tree decls, inline_data *id)
340 {
341 tree old_var;
342 tree new_decls = NULL_TREE;
343
344 /* Remap its variables. */
345 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
346 {
347 tree new_var;
348
349 /* Remap the variable. */
350 new_var = remap_decl (old_var, id);
351
352 /* If we didn't remap this variable, so we can't mess with its
353 TREE_CHAIN. If we remapped this variable to the return slot, it's
354 already declared somewhere else, so don't declare it here. */
355 if (!new_var || new_var == id->retvar)
356 ;
357 else
358 {
359 gcc_assert (DECL_P (new_var));
360 TREE_CHAIN (new_var) = new_decls;
361 new_decls = new_var;
362 }
363 }
364
365 return nreverse (new_decls);
366 }
367
368 /* Copy the BLOCK to contain remapped versions of the variables
369 therein. And hook the new block into the block-tree. */
370
371 static void
372 remap_block (tree *block, inline_data *id)
373 {
374 tree old_block;
375 tree new_block;
376 tree fn;
377
378 /* Make the new block. */
379 old_block = *block;
380 new_block = make_node (BLOCK);
381 TREE_USED (new_block) = TREE_USED (old_block);
382 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
383 *block = new_block;
384
385 /* Remap its variables. */
386 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
387
388 fn = VARRAY_TREE (id->fns, 0);
389 #if 1
390 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
391 rest_of_compilation is a good start. */
392 if (id->cloning_p)
393 /* We're building a clone; DECL_INITIAL is still
394 error_mark_node, and current_binding_level is the parm
395 binding level. */
396 lang_hooks.decls.insert_block (new_block);
397 else
398 {
399 /* Attach this new block after the DECL_INITIAL block for the
400 function into which this block is being inlined. In
401 rest_of_compilation we will straighten out the BLOCK tree. */
402 tree *first_block;
403 if (DECL_INITIAL (fn))
404 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
405 else
406 first_block = &DECL_INITIAL (fn);
407 BLOCK_CHAIN (new_block) = *first_block;
408 *first_block = new_block;
409 }
410 #endif
411 /* Remember the remapped block. */
412 insert_decl_map (id, old_block, new_block);
413 }
414
415 static void
416 copy_statement_list (tree *tp)
417 {
418 tree_stmt_iterator oi, ni;
419 tree new;
420
421 new = alloc_stmt_list ();
422 ni = tsi_start (new);
423 oi = tsi_start (*tp);
424 *tp = new;
425
426 for (; !tsi_end_p (oi); tsi_next (&oi))
427 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
428 }
429
430 static void
431 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
432 {
433 tree block = BIND_EXPR_BLOCK (*tp);
434 /* Copy (and replace) the statement. */
435 copy_tree_r (tp, walk_subtrees, NULL);
436 if (block)
437 {
438 remap_block (&block, id);
439 BIND_EXPR_BLOCK (*tp) = block;
440 }
441
442 if (BIND_EXPR_VARS (*tp))
443 /* This will remap a lot of the same decls again, but this should be
444 harmless. */
445 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
446 }
447
448 /* Called from copy_body via walk_tree. DATA is really an `inline_data *'. */
449
450 static tree
451 copy_body_r (tree *tp, int *walk_subtrees, void *data)
452 {
453 inline_data *id = (inline_data *) data;
454 tree fn = VARRAY_TOP_TREE (id->fns);
455
456 #if 0
457 /* All automatic variables should have a DECL_CONTEXT indicating
458 what function they come from. */
459 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
460 && DECL_NAMESPACE_SCOPE_P (*tp))
461 gcc_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp));
462 #endif
463
464 /* If this is a RETURN_EXPR, change it into a MODIFY_EXPR and a
465 GOTO_EXPR with the RET_LABEL as its target. */
466 if (TREE_CODE (*tp) == RETURN_EXPR && id->ret_label)
467 {
468 tree return_stmt = *tp;
469 tree goto_stmt;
470
471 /* Build the GOTO_EXPR. */
472 tree assignment = TREE_OPERAND (return_stmt, 0);
473 goto_stmt = build1 (GOTO_EXPR, void_type_node, id->ret_label);
474 TREE_USED (id->ret_label) = 1;
475
476 /* If we're returning something, just turn that into an
477 assignment into the equivalent of the original
478 RESULT_DECL. */
479 if (assignment)
480 {
481 /* Do not create a statement containing a naked RESULT_DECL. */
482 if (TREE_CODE (assignment) == RESULT_DECL)
483 gimplify_stmt (&assignment);
484
485 *tp = build (BIND_EXPR, void_type_node, NULL, NULL, NULL);
486 append_to_statement_list (assignment, &BIND_EXPR_BODY (*tp));
487 append_to_statement_list (goto_stmt, &BIND_EXPR_BODY (*tp));
488 }
489 /* If we're not returning anything just do the jump. */
490 else
491 *tp = goto_stmt;
492 }
493 /* Local variables and labels need to be replaced by equivalent
494 variables. We don't want to copy static variables; there's only
495 one of those, no matter how many times we inline the containing
496 function. Similarly for globals from an outer function. */
497 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
498 {
499 tree new_decl;
500
501 /* Remap the declaration. */
502 new_decl = remap_decl (*tp, id);
503 gcc_assert (new_decl);
504 /* Replace this variable with the copy. */
505 STRIP_TYPE_NOPS (new_decl);
506 *tp = new_decl;
507 *walk_subtrees = 0;
508 }
509 else if (TREE_CODE (*tp) == STATEMENT_LIST)
510 copy_statement_list (tp);
511 else if (TREE_CODE (*tp) == SAVE_EXPR)
512 remap_save_expr (tp, id->decl_map, walk_subtrees);
513 else if (TREE_CODE (*tp) == BIND_EXPR)
514 copy_bind_expr (tp, walk_subtrees, id);
515 /* Types may need remapping as well. */
516 else if (TYPE_P (*tp))
517 *tp = remap_type (*tp, id);
518
519 /* If this is a constant, we have to copy the node iff the type will be
520 remapped. copy_tree_r will not copy a constant. */
521 else if (TREE_CODE_CLASS (TREE_CODE (*tp)) == tcc_constant)
522 {
523 tree new_type = remap_type (TREE_TYPE (*tp), id);
524
525 if (new_type == TREE_TYPE (*tp))
526 *walk_subtrees = 0;
527
528 else if (TREE_CODE (*tp) == INTEGER_CST)
529 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
530 TREE_INT_CST_HIGH (*tp));
531 else
532 {
533 *tp = copy_node (*tp);
534 TREE_TYPE (*tp) = new_type;
535 }
536 }
537
538 /* Otherwise, just copy the node. Note that copy_tree_r already
539 knows not to copy VAR_DECLs, etc., so this is safe. */
540 else
541 {
542 tree old_node = *tp;
543
544 if (TREE_CODE (*tp) == MODIFY_EXPR
545 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
546 && (lang_hooks.tree_inlining.auto_var_in_fn_p
547 (TREE_OPERAND (*tp, 0), fn)))
548 {
549 /* Some assignments VAR = VAR; don't generate any rtl code
550 and thus don't count as variable modification. Avoid
551 keeping bogosities like 0 = 0. */
552 tree decl = TREE_OPERAND (*tp, 0), value;
553 splay_tree_node n;
554
555 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
556 if (n)
557 {
558 value = (tree) n->value;
559 STRIP_TYPE_NOPS (value);
560 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
561 {
562 *tp = build_empty_stmt ();
563 return copy_body_r (tp, walk_subtrees, data);
564 }
565 }
566 }
567 else if (TREE_CODE (*tp) == INDIRECT_REF)
568 {
569 /* Get rid of *& from inline substitutions that can happen when a
570 pointer argument is an ADDR_EXPR. */
571 tree decl = TREE_OPERAND (*tp, 0), value;
572 splay_tree_node n;
573
574 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
575 if (n)
576 {
577 value = (tree) n->value;
578 STRIP_NOPS (value);
579 if (TREE_CODE (value) == ADDR_EXPR
580 && (lang_hooks.types_compatible_p
581 (TREE_TYPE (*tp), TREE_TYPE (TREE_OPERAND (value, 0)))))
582 {
583 *tp = TREE_OPERAND (value, 0);
584 return copy_body_r (tp, walk_subtrees, data);
585 }
586 }
587 }
588
589 copy_tree_r (tp, walk_subtrees, NULL);
590
591 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
592 {
593 if (id->saving_p)
594 {
595 struct cgraph_node *node;
596 struct cgraph_edge *edge;
597
598 for (node = id->node->next_clone; node; node = node->next_clone)
599 {
600 edge = cgraph_edge (node, old_node);
601 gcc_assert (edge);
602 edge->call_expr = *tp;
603 }
604 }
605 else
606 {
607 struct cgraph_edge *edge
608 = cgraph_edge (id->current_node, old_node);
609
610 if (edge)
611 cgraph_clone_edge (edge, id->node, *tp);
612 }
613 }
614
615 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
616
617 /* The copied TARGET_EXPR has never been expanded, even if the
618 original node was expanded already. */
619 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
620 {
621 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
622 TREE_OPERAND (*tp, 3) = NULL_TREE;
623 }
624
625 /* Variable substitution need not be simple. In particular, the
626 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
627 and friends are up-to-date. */
628 else if (TREE_CODE (*tp) == ADDR_EXPR)
629 {
630 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
631 recompute_tree_invarant_for_addr_expr (*tp);
632 *walk_subtrees = 0;
633 }
634 }
635
636 /* Keep iterating. */
637 return NULL_TREE;
638 }
639
640 /* Make a copy of the body of FN so that it can be inserted inline in
641 another function. */
642
643 static tree
644 copy_body (inline_data *id)
645 {
646 tree body;
647 tree fndecl = VARRAY_TOP_TREE (id->fns);
648
649 if (fndecl == current_function_decl
650 && cfun->saved_tree)
651 body = cfun->saved_tree;
652 else
653 body = DECL_SAVED_TREE (fndecl);
654 walk_tree (&body, copy_body_r, id, NULL);
655
656 return body;
657 }
658
659 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
660 defined in function FN, or of a data member thereof. */
661
662 static bool
663 self_inlining_addr_expr (tree value, tree fn)
664 {
665 tree var;
666
667 if (TREE_CODE (value) != ADDR_EXPR)
668 return false;
669
670 var = get_base_address (TREE_OPERAND (value, 0));
671
672 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
673 }
674
675 static void
676 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
677 tree *init_stmts, tree *vars, bool *gimplify_init_stmts_p)
678 {
679 tree init_stmt;
680 tree var;
681
682 /* If the parameter is never assigned to, we may not need to
683 create a new variable here at all. Instead, we may be able
684 to just use the argument value. */
685 if (TREE_READONLY (p)
686 && !TREE_ADDRESSABLE (p)
687 && value && !TREE_SIDE_EFFECTS (value))
688 {
689 /* We can't risk substituting complex expressions. They
690 might contain variables that will be assigned to later.
691 Theoretically, we could check the expression to see if
692 all of the variables that determine its value are
693 read-only, but we don't bother. */
694 /* We may produce non-gimple trees by adding NOPs or introduce
695 invalid sharing when operand is not really constant.
696 It is not big deal to prohibit constant propagation here as
697 we will constant propagate in DOM1 pass anyway. */
698 if (is_gimple_min_invariant (value)
699 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
700 /* We have to be very careful about ADDR_EXPR. Make sure
701 the base variable isn't a local variable of the inlined
702 function, e.g., when doing recursive inlining, direct or
703 mutually-recursive or whatever, which is why we don't
704 just test whether fn == current_function_decl. */
705 && ! self_inlining_addr_expr (value, fn))
706 {
707 insert_decl_map (id, p, value);
708 return;
709 }
710 }
711
712 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
713 here since the type of this decl must be visible to the calling
714 function. */
715 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
716
717 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
718 that way, when the PARM_DECL is encountered, it will be
719 automatically replaced by the VAR_DECL. */
720 insert_decl_map (id, p, var);
721
722 /* Declare this new variable. */
723 TREE_CHAIN (var) = *vars;
724 *vars = var;
725
726 /* Make gimplifier happy about this variable. */
727 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
728
729 /* Even if P was TREE_READONLY, the new VAR should not be.
730 In the original code, we would have constructed a
731 temporary, and then the function body would have never
732 changed the value of P. However, now, we will be
733 constructing VAR directly. The constructor body may
734 change its value multiple times as it is being
735 constructed. Therefore, it must not be TREE_READONLY;
736 the back-end assumes that TREE_READONLY variable is
737 assigned to only once. */
738 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
739 TREE_READONLY (var) = 0;
740
741 /* Initialize this VAR_DECL from the equivalent argument. Convert
742 the argument to the proper type in case it was promoted. */
743 if (value)
744 {
745 tree rhs = fold_convert (TREE_TYPE (var), value);
746
747 if (rhs == error_mark_node)
748 return;
749
750 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
751 keep our trees in gimple form. */
752 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
753 append_to_statement_list (init_stmt, init_stmts);
754
755 /* If we did not create a gimple value and we did not create a gimple
756 cast of a gimple value, then we will need to gimplify INIT_STMTS
757 at the end. Note that is_gimple_cast only checks the outer
758 tree code, not its operand. Thus the explicit check that it's
759 operand is a gimple value. */
760 if (!is_gimple_val (rhs)
761 && (!is_gimple_cast (rhs)
762 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
763 *gimplify_init_stmts_p = true;
764 }
765 }
766
767 /* Generate code to initialize the parameters of the function at the
768 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
769
770 static tree
771 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
772 tree fn, tree bind_expr)
773 {
774 tree init_stmts = NULL_TREE;
775 tree parms;
776 tree a;
777 tree p;
778 tree vars = NULL_TREE;
779 bool gimplify_init_stmts_p = false;
780 int argnum = 0;
781
782 /* Figure out what the parameters are. */
783 parms = DECL_ARGUMENTS (fn);
784 if (fn == current_function_decl)
785 parms = cfun->saved_args;
786
787 /* Loop through the parameter declarations, replacing each with an
788 equivalent VAR_DECL, appropriately initialized. */
789 for (p = parms, a = args; p;
790 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
791 {
792 tree value;
793
794 ++argnum;
795
796 /* Find the initializer. */
797 value = lang_hooks.tree_inlining.convert_parm_for_inlining
798 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
799
800 setup_one_parameter (id, p, value, fn, &init_stmts, &vars,
801 &gimplify_init_stmts_p);
802 }
803
804 /* Evaluate trailing arguments. */
805 for (; a; a = TREE_CHAIN (a))
806 {
807 tree value = TREE_VALUE (a);
808 append_to_statement_list (value, &init_stmts);
809 }
810
811 /* Initialize the static chain. */
812 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
813 if (fn == current_function_decl)
814 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
815 if (p)
816 {
817 /* No static chain? Seems like a bug in tree-nested.c. */
818 gcc_assert (static_chain);
819
820 setup_one_parameter (id, p, static_chain, fn, &init_stmts, &vars,
821 &gimplify_init_stmts_p);
822 }
823
824 if (gimplify_init_stmts_p)
825 gimplify_body (&init_stmts, current_function_decl, false);
826
827 declare_inline_vars (bind_expr, vars);
828 return init_stmts;
829 }
830
831 /* Declare a return variable to replace the RESULT_DECL for the function we
832 are calling. RETURN_SLOT_ADDR, if non-null, was a fake parameter that
833 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
834 the MODIFY_EXPR to which this call is the RHS.
835
836 The return value is a (possibly null) value that is the result of the
837 function as seen by the callee. *USE_P is a (possibly null) value that
838 holds the result as seen by the caller. */
839
840 static tree
841 declare_return_variable (inline_data *id, tree return_slot_addr,
842 tree modify_dest, tree *use_p)
843 {
844 tree callee = VARRAY_TOP_TREE (id->fns);
845 tree caller = VARRAY_TREE (id->fns, 0);
846 tree result = DECL_RESULT (callee);
847 tree callee_type = TREE_TYPE (result);
848 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
849 tree var, use;
850
851 /* We don't need to do anything for functions that don't return
852 anything. */
853 if (!result || VOID_TYPE_P (callee_type))
854 {
855 *use_p = NULL_TREE;
856 return NULL_TREE;
857 }
858
859 /* If there was a return slot, then the return value is the
860 dereferenced address of that object. */
861 if (return_slot_addr)
862 {
863 /* The front end shouldn't have used both return_slot_addr and
864 a modify expression. */
865 gcc_assert (!modify_dest);
866 if (DECL_BY_REFERENCE (result))
867 var = return_slot_addr;
868 else
869 var = build_fold_indirect_ref (return_slot_addr);
870 use = NULL;
871 goto done;
872 }
873
874 /* All types requiring non-trivial constructors should have been handled. */
875 gcc_assert (!TREE_ADDRESSABLE (callee_type));
876
877 /* Attempt to avoid creating a new temporary variable. */
878 if (modify_dest)
879 {
880 bool use_it = false;
881
882 /* We can't use MODIFY_DEST if there's type promotion involved. */
883 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
884 use_it = false;
885
886 /* ??? If we're assigning to a variable sized type, then we must
887 reuse the destination variable, because we've no good way to
888 create variable sized temporaries at this point. */
889 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
890 use_it = true;
891
892 /* If the callee cannot possibly modify MODIFY_DEST, then we can
893 reuse it as the result of the call directly. Don't do this if
894 it would promote MODIFY_DEST to addressable. */
895 else if (!TREE_STATIC (modify_dest)
896 && !TREE_ADDRESSABLE (modify_dest)
897 && !TREE_ADDRESSABLE (result))
898 use_it = true;
899
900 if (use_it)
901 {
902 var = modify_dest;
903 use = NULL;
904 goto done;
905 }
906 }
907
908 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
909
910 var = copy_decl_for_inlining (result, callee, caller);
911 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
912 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
913 = tree_cons (NULL_TREE, var,
914 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
915
916 /* Do not have the rest of GCC warn about this variable as it should
917 not be visible to the user. */
918 TREE_NO_WARNING (var) = 1;
919
920 /* Build the use expr. If the return type of the function was
921 promoted, convert it back to the expected type. */
922 use = var;
923 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
924 use = fold_convert (caller_type, var);
925
926 done:
927 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
928 way, when the RESULT_DECL is encountered, it will be
929 automatically replaced by the VAR_DECL. */
930 insert_decl_map (id, result, var);
931
932 /* Remember this so we can ignore it in remap_decls. */
933 id->retvar = var;
934
935 *use_p = use;
936 return var;
937 }
938
939 /* Returns nonzero if a function can be inlined as a tree. */
940
941 bool
942 tree_inlinable_function_p (tree fn)
943 {
944 return inlinable_function_p (fn);
945 }
946
947 static const char *inline_forbidden_reason;
948
949 static tree
950 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
951 void *fnp)
952 {
953 tree node = *nodep;
954 tree fn = (tree) fnp;
955 tree t;
956
957 switch (TREE_CODE (node))
958 {
959 case CALL_EXPR:
960 /* Refuse to inline alloca call unless user explicitly forced so as
961 this may change program's memory overhead drastically when the
962 function using alloca is called in loop. In GCC present in
963 SPEC2000 inlining into schedule_block cause it to require 2GB of
964 RAM instead of 256MB. */
965 if (alloca_call_p (node)
966 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
967 {
968 inline_forbidden_reason
969 = N_("%Jfunction %qF can never be inlined because it uses "
970 "alloca (override using the always_inline attribute)");
971 return node;
972 }
973 t = get_callee_fndecl (node);
974 if (! t)
975 break;
976
977 /* We cannot inline functions that call setjmp. */
978 if (setjmp_call_p (t))
979 {
980 inline_forbidden_reason
981 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
982 return node;
983 }
984
985 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
986 switch (DECL_FUNCTION_CODE (t))
987 {
988 /* We cannot inline functions that take a variable number of
989 arguments. */
990 case BUILT_IN_VA_START:
991 case BUILT_IN_STDARG_START:
992 case BUILT_IN_NEXT_ARG:
993 case BUILT_IN_VA_END:
994 inline_forbidden_reason
995 = N_("%Jfunction %qF can never be inlined because it "
996 "uses variable argument lists");
997 return node;
998
999 case BUILT_IN_LONGJMP:
1000 /* We can't inline functions that call __builtin_longjmp at
1001 all. The non-local goto machinery really requires the
1002 destination be in a different function. If we allow the
1003 function calling __builtin_longjmp to be inlined into the
1004 function calling __builtin_setjmp, Things will Go Awry. */
1005 inline_forbidden_reason
1006 = N_("%Jfunction %qF can never be inlined because "
1007 "it uses setjmp-longjmp exception handling");
1008 return node;
1009
1010 case BUILT_IN_NONLOCAL_GOTO:
1011 /* Similarly. */
1012 inline_forbidden_reason
1013 = N_("%Jfunction %qF can never be inlined because "
1014 "it uses non-local goto");
1015 return node;
1016
1017 default:
1018 break;
1019 }
1020 break;
1021
1022 case GOTO_EXPR:
1023 t = TREE_OPERAND (node, 0);
1024
1025 /* We will not inline a function which uses computed goto. The
1026 addresses of its local labels, which may be tucked into
1027 global storage, are of course not constant across
1028 instantiations, which causes unexpected behavior. */
1029 if (TREE_CODE (t) != LABEL_DECL)
1030 {
1031 inline_forbidden_reason
1032 = N_("%Jfunction %qF can never be inlined "
1033 "because it contains a computed goto");
1034 return node;
1035 }
1036 break;
1037
1038 case LABEL_EXPR:
1039 t = TREE_OPERAND (node, 0);
1040 if (DECL_NONLOCAL (t))
1041 {
1042 /* We cannot inline a function that receives a non-local goto
1043 because we cannot remap the destination label used in the
1044 function that is performing the non-local goto. */
1045 inline_forbidden_reason
1046 = N_("%Jfunction %qF can never be inlined "
1047 "because it receives a non-local goto");
1048 return node;
1049 }
1050 break;
1051
1052 case RECORD_TYPE:
1053 case UNION_TYPE:
1054 /* We cannot inline a function of the form
1055
1056 void F (int i) { struct S { int ar[i]; } s; }
1057
1058 Attempting to do so produces a catch-22.
1059 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1060 UNION_TYPE nodes, then it goes into infinite recursion on a
1061 structure containing a pointer to its own type. If it doesn't,
1062 then the type node for S doesn't get adjusted properly when
1063 F is inlined, and we abort in find_function_data.
1064
1065 ??? This is likely no longer true, but it's too late in the 4.0
1066 cycle to try to find out. This should be checked for 4.1. */
1067 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1068 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1069 {
1070 inline_forbidden_reason
1071 = N_("%Jfunction %qF can never be inlined "
1072 "because it uses variable sized variables");
1073 return node;
1074 }
1075
1076 default:
1077 break;
1078 }
1079
1080 return NULL_TREE;
1081 }
1082
1083 /* Return subexpression representing possible alloca call, if any. */
1084 static tree
1085 inline_forbidden_p (tree fndecl)
1086 {
1087 location_t saved_loc = input_location;
1088 tree ret = walk_tree_without_duplicates (&DECL_SAVED_TREE (fndecl),
1089 inline_forbidden_p_1, fndecl);
1090
1091 input_location = saved_loc;
1092 return ret;
1093 }
1094
1095 /* Returns nonzero if FN is a function that does not have any
1096 fundamental inline blocking properties. */
1097
1098 static bool
1099 inlinable_function_p (tree fn)
1100 {
1101 bool inlinable = true;
1102
1103 /* If we've already decided this function shouldn't be inlined,
1104 there's no need to check again. */
1105 if (DECL_UNINLINABLE (fn))
1106 return false;
1107
1108 /* See if there is any language-specific reason it cannot be
1109 inlined. (It is important that this hook be called early because
1110 in C++ it may result in template instantiation.)
1111 If the function is not inlinable for language-specific reasons,
1112 it is left up to the langhook to explain why. */
1113 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1114
1115 /* If we don't have the function body available, we can't inline it.
1116 However, this should not be recorded since we also get here for
1117 forward declared inline functions. Therefore, return at once. */
1118 if (!DECL_SAVED_TREE (fn))
1119 return false;
1120
1121 /* If we're not inlining at all, then we cannot inline this function. */
1122 else if (!flag_inline_trees)
1123 inlinable = false;
1124
1125 /* Only try to inline functions if DECL_INLINE is set. This should be
1126 true for all functions declared `inline', and for all other functions
1127 as well with -finline-functions.
1128
1129 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1130 it's the front-end that must set DECL_INLINE in this case, because
1131 dwarf2out loses if a function that does not have DECL_INLINE set is
1132 inlined anyway. That is why we have both DECL_INLINE and
1133 DECL_DECLARED_INLINE_P. */
1134 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1135 here should be redundant. */
1136 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1137 inlinable = false;
1138
1139 else if (inline_forbidden_p (fn))
1140 {
1141 /* See if we should warn about uninlinable functions. Previously,
1142 some of these warnings would be issued while trying to expand
1143 the function inline, but that would cause multiple warnings
1144 about functions that would for example call alloca. But since
1145 this a property of the function, just one warning is enough.
1146 As a bonus we can now give more details about the reason why a
1147 function is not inlinable.
1148 We only warn for functions declared `inline' by the user. */
1149 bool do_warning = (warn_inline
1150 && DECL_INLINE (fn)
1151 && DECL_DECLARED_INLINE_P (fn)
1152 && !DECL_IN_SYSTEM_HEADER (fn));
1153
1154 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1155 sorry (inline_forbidden_reason, fn, fn);
1156 else if (do_warning)
1157 warning (inline_forbidden_reason, fn, fn);
1158
1159 inlinable = false;
1160 }
1161
1162 /* Squirrel away the result so that we don't have to check again. */
1163 DECL_UNINLINABLE (fn) = !inlinable;
1164
1165 return inlinable;
1166 }
1167
1168 /* Used by estimate_num_insns. Estimate number of instructions seen
1169 by given statement. */
1170
1171 static tree
1172 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1173 {
1174 int *count = data;
1175 tree x = *tp;
1176
1177 if (IS_TYPE_OR_DECL_P (x))
1178 {
1179 *walk_subtrees = 0;
1180 return NULL;
1181 }
1182 /* Assume that constants and references counts nothing. These should
1183 be majorized by amount of operations among them we count later
1184 and are common target of CSE and similar optimizations. */
1185 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1186 return NULL;
1187
1188 switch (TREE_CODE (x))
1189 {
1190 /* Containers have no cost. */
1191 case TREE_LIST:
1192 case TREE_VEC:
1193 case BLOCK:
1194 case COMPONENT_REF:
1195 case BIT_FIELD_REF:
1196 case INDIRECT_REF:
1197 case ALIGN_INDIRECT_REF:
1198 case MISALIGNED_INDIRECT_REF:
1199 case ARRAY_REF:
1200 case ARRAY_RANGE_REF:
1201 case OBJ_TYPE_REF:
1202 case EXC_PTR_EXPR: /* ??? */
1203 case FILTER_EXPR: /* ??? */
1204 case COMPOUND_EXPR:
1205 case BIND_EXPR:
1206 case WITH_CLEANUP_EXPR:
1207 case NOP_EXPR:
1208 case VIEW_CONVERT_EXPR:
1209 case SAVE_EXPR:
1210 case ADDR_EXPR:
1211 case COMPLEX_EXPR:
1212 case RANGE_EXPR:
1213 case CASE_LABEL_EXPR:
1214 case SSA_NAME:
1215 case CATCH_EXPR:
1216 case EH_FILTER_EXPR:
1217 case STATEMENT_LIST:
1218 case ERROR_MARK:
1219 case NON_LVALUE_EXPR:
1220 case FDESC_EXPR:
1221 case VA_ARG_EXPR:
1222 case TRY_CATCH_EXPR:
1223 case TRY_FINALLY_EXPR:
1224 case LABEL_EXPR:
1225 case GOTO_EXPR:
1226 case RETURN_EXPR:
1227 case EXIT_EXPR:
1228 case LOOP_EXPR:
1229 case PHI_NODE:
1230 case WITH_SIZE_EXPR:
1231 break;
1232
1233 /* We don't account constants for now. Assume that the cost is amortized
1234 by operations that do use them. We may re-consider this decision once
1235 we are able to optimize the tree before estimating it's size and break
1236 out static initializers. */
1237 case IDENTIFIER_NODE:
1238 case INTEGER_CST:
1239 case REAL_CST:
1240 case COMPLEX_CST:
1241 case VECTOR_CST:
1242 case STRING_CST:
1243 *walk_subtrees = 0;
1244 return NULL;
1245
1246 /* Recognize assignments of large structures and constructors of
1247 big arrays. */
1248 case INIT_EXPR:
1249 case MODIFY_EXPR:
1250 x = TREE_OPERAND (x, 0);
1251 /* FALLTHRU */
1252 case TARGET_EXPR:
1253 case CONSTRUCTOR:
1254 {
1255 HOST_WIDE_INT size;
1256
1257 size = int_size_in_bytes (TREE_TYPE (x));
1258
1259 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1260 *count += 10;
1261 else
1262 *count += ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1263 }
1264 break;
1265
1266 /* Assign cost of 1 to usual operations.
1267 ??? We may consider mapping RTL costs to this. */
1268 case COND_EXPR:
1269
1270 case PLUS_EXPR:
1271 case MINUS_EXPR:
1272 case MULT_EXPR:
1273
1274 case FIX_TRUNC_EXPR:
1275 case FIX_CEIL_EXPR:
1276 case FIX_FLOOR_EXPR:
1277 case FIX_ROUND_EXPR:
1278
1279 case NEGATE_EXPR:
1280 case FLOAT_EXPR:
1281 case MIN_EXPR:
1282 case MAX_EXPR:
1283 case ABS_EXPR:
1284
1285 case LSHIFT_EXPR:
1286 case RSHIFT_EXPR:
1287 case LROTATE_EXPR:
1288 case RROTATE_EXPR:
1289
1290 case BIT_IOR_EXPR:
1291 case BIT_XOR_EXPR:
1292 case BIT_AND_EXPR:
1293 case BIT_NOT_EXPR:
1294
1295 case TRUTH_ANDIF_EXPR:
1296 case TRUTH_ORIF_EXPR:
1297 case TRUTH_AND_EXPR:
1298 case TRUTH_OR_EXPR:
1299 case TRUTH_XOR_EXPR:
1300 case TRUTH_NOT_EXPR:
1301
1302 case LT_EXPR:
1303 case LE_EXPR:
1304 case GT_EXPR:
1305 case GE_EXPR:
1306 case EQ_EXPR:
1307 case NE_EXPR:
1308 case ORDERED_EXPR:
1309 case UNORDERED_EXPR:
1310
1311 case UNLT_EXPR:
1312 case UNLE_EXPR:
1313 case UNGT_EXPR:
1314 case UNGE_EXPR:
1315 case UNEQ_EXPR:
1316 case LTGT_EXPR:
1317
1318 case CONVERT_EXPR:
1319
1320 case CONJ_EXPR:
1321
1322 case PREDECREMENT_EXPR:
1323 case PREINCREMENT_EXPR:
1324 case POSTDECREMENT_EXPR:
1325 case POSTINCREMENT_EXPR:
1326
1327 case SWITCH_EXPR:
1328
1329 case ASM_EXPR:
1330
1331 case REALIGN_LOAD_EXPR:
1332
1333 case RESX_EXPR:
1334 *count += 1;
1335 break;
1336
1337 /* Few special cases of expensive operations. This is useful
1338 to avoid inlining on functions having too many of these. */
1339 case TRUNC_DIV_EXPR:
1340 case CEIL_DIV_EXPR:
1341 case FLOOR_DIV_EXPR:
1342 case ROUND_DIV_EXPR:
1343 case EXACT_DIV_EXPR:
1344 case TRUNC_MOD_EXPR:
1345 case CEIL_MOD_EXPR:
1346 case FLOOR_MOD_EXPR:
1347 case ROUND_MOD_EXPR:
1348 case RDIV_EXPR:
1349 *count += 10;
1350 break;
1351 case CALL_EXPR:
1352 {
1353 tree decl = get_callee_fndecl (x);
1354
1355 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1356 switch (DECL_FUNCTION_CODE (decl))
1357 {
1358 case BUILT_IN_CONSTANT_P:
1359 *walk_subtrees = 0;
1360 return NULL_TREE;
1361 case BUILT_IN_EXPECT:
1362 return NULL_TREE;
1363 default:
1364 break;
1365 }
1366 *count += 10;
1367 break;
1368 }
1369 default:
1370 /* Abort here se we know we don't miss any nodes. */
1371 gcc_unreachable ();
1372 }
1373 return NULL;
1374 }
1375
1376 /* Estimate number of instructions that will be created by expanding EXPR. */
1377
1378 int
1379 estimate_num_insns (tree expr)
1380 {
1381 int num = 0;
1382 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1383 return num;
1384 }
1385
1386 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1387
1388 static tree
1389 expand_call_inline (tree *tp, int *walk_subtrees, void *data)
1390 {
1391 inline_data *id;
1392 tree t;
1393 tree expr;
1394 tree stmt;
1395 tree use_retvar;
1396 tree fn;
1397 tree arg_inits;
1398 tree *inlined_body;
1399 splay_tree st;
1400 tree args;
1401 tree return_slot_addr;
1402 tree modify_dest;
1403 location_t saved_location;
1404 struct cgraph_edge *edge;
1405 const char *reason;
1406
1407 /* See what we've got. */
1408 id = (inline_data *) data;
1409 t = *tp;
1410
1411 /* Set input_location here so we get the right instantiation context
1412 if we call instantiate_decl from inlinable_function_p. */
1413 saved_location = input_location;
1414 if (EXPR_HAS_LOCATION (t))
1415 input_location = EXPR_LOCATION (t);
1416
1417 /* Recurse, but letting recursive invocations know that we are
1418 inside the body of a TARGET_EXPR. */
1419 if (TREE_CODE (*tp) == TARGET_EXPR)
1420 {
1421 #if 0
1422 int i, len = TREE_CODE_LENGTH (TARGET_EXPR);
1423
1424 /* We're walking our own subtrees. */
1425 *walk_subtrees = 0;
1426
1427 /* Actually walk over them. This loop is the body of
1428 walk_trees, omitting the case where the TARGET_EXPR
1429 itself is handled. */
1430 for (i = 0; i < len; ++i)
1431 {
1432 if (i == 2)
1433 ++id->in_target_cleanup_p;
1434 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
1435 id->tree_pruner);
1436 if (i == 2)
1437 --id->in_target_cleanup_p;
1438 }
1439
1440 goto egress;
1441 #endif
1442 }
1443
1444 if (TYPE_P (t))
1445 /* Because types were not copied in copy_body, CALL_EXPRs beneath
1446 them should not be expanded. This can happen if the type is a
1447 dynamic array type, for example. */
1448 *walk_subtrees = 0;
1449
1450 /* From here on, we're only interested in CALL_EXPRs. */
1451 if (TREE_CODE (t) != CALL_EXPR)
1452 goto egress;
1453
1454 /* First, see if we can figure out what function is being called.
1455 If we cannot, then there is no hope of inlining the function. */
1456 fn = get_callee_fndecl (t);
1457 if (!fn)
1458 goto egress;
1459
1460 /* Turn forward declarations into real ones. */
1461 fn = cgraph_node (fn)->decl;
1462
1463 /* If fn is a declaration of a function in a nested scope that was
1464 globally declared inline, we don't set its DECL_INITIAL.
1465 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1466 C++ front-end uses it for cdtors to refer to their internal
1467 declarations, that are not real functions. Fortunately those
1468 don't have trees to be saved, so we can tell by checking their
1469 DECL_SAVED_TREE. */
1470 if (! DECL_INITIAL (fn)
1471 && DECL_ABSTRACT_ORIGIN (fn)
1472 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1473 fn = DECL_ABSTRACT_ORIGIN (fn);
1474
1475 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1476 Kill this check once this is fixed. */
1477 if (!id->current_node->analyzed)
1478 goto egress;
1479
1480 edge = cgraph_edge (id->current_node, t);
1481
1482 /* Constant propagation on argument done during previous inlining
1483 may create new direct call. Produce an edge for it. */
1484 if (!edge)
1485 {
1486 struct cgraph_node *dest = cgraph_node (fn);
1487
1488 /* We have missing edge in the callgraph. This can happen in one case
1489 where previous inlining turned indirect call into direct call by
1490 constant propagating arguments. In all other cases we hit a bug
1491 (incorrect node sharing is most common reason for missing edges. */
1492 gcc_assert (dest->needed || !flag_unit_at_a_time);
1493 cgraph_create_edge (id->node, dest, t)->inline_failed
1494 = N_("originally indirect function call not considered for inlining");
1495 goto egress;
1496 }
1497
1498 /* Don't try to inline functions that are not well-suited to
1499 inlining. */
1500 if (!cgraph_inline_p (edge, &reason))
1501 {
1502 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1503 {
1504 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1505 sorry ("called from here");
1506 }
1507 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1508 && !DECL_IN_SYSTEM_HEADER (fn)
1509 && strlen (reason)
1510 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1511 {
1512 warning ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1513 warning ("called from here");
1514 }
1515 goto egress;
1516 }
1517
1518 #ifdef ENABLE_CHECKING
1519 if (edge->callee->decl != id->node->decl)
1520 verify_cgraph_node (edge->callee);
1521 #endif
1522
1523 if (! lang_hooks.tree_inlining.start_inlining (fn))
1524 goto egress;
1525
1526 /* Build a block containing code to initialize the arguments, the
1527 actual inline expansion of the body, and a label for the return
1528 statements within the function to jump to. The type of the
1529 statement expression is the return type of the function call. */
1530 stmt = NULL;
1531 expr = build (BIND_EXPR, void_type_node, NULL_TREE,
1532 stmt, make_node (BLOCK));
1533 BLOCK_ABSTRACT_ORIGIN (BIND_EXPR_BLOCK (expr)) = fn;
1534
1535 /* Local declarations will be replaced by their equivalents in this
1536 map. */
1537 st = id->decl_map;
1538 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
1539 NULL, NULL);
1540
1541 /* Initialize the parameters. */
1542 args = TREE_OPERAND (t, 1);
1543 return_slot_addr = NULL_TREE;
1544 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
1545 {
1546 return_slot_addr = TREE_VALUE (args);
1547 args = TREE_CHAIN (args);
1548 TREE_TYPE (expr) = void_type_node;
1549 }
1550
1551 arg_inits = initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2),
1552 fn, expr);
1553 if (arg_inits)
1554 {
1555 /* Expand any inlined calls in the initializers. Do this before we
1556 push FN on the stack of functions we are inlining; we want to
1557 inline calls to FN that appear in the initializers for the
1558 parameters.
1559
1560 Note we need to save and restore the saved tree statement iterator
1561 to avoid having it clobbered by expand_calls_inline. */
1562 tree_stmt_iterator save_tsi;
1563
1564 save_tsi = id->tsi;
1565 expand_calls_inline (&arg_inits, id);
1566 id->tsi = save_tsi;
1567
1568 /* And add them to the tree. */
1569 append_to_statement_list (arg_inits, &BIND_EXPR_BODY (expr));
1570 }
1571
1572 /* Record the function we are about to inline so that we can avoid
1573 recursing into it. */
1574 VARRAY_PUSH_TREE (id->fns, fn);
1575
1576 /* Return statements in the function body will be replaced by jumps
1577 to the RET_LABEL. */
1578 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
1579 DECL_ARTIFICIAL (id->ret_label) = 1;
1580 DECL_IGNORED_P (id->ret_label) = 1;
1581 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
1582 insert_decl_map (id, id->ret_label, id->ret_label);
1583
1584 gcc_assert (DECL_INITIAL (fn));
1585 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
1586
1587 /* Find the lhs to which the result of this call is assigned. */
1588 modify_dest = tsi_stmt (id->tsi);
1589 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
1590 {
1591 modify_dest = TREE_OPERAND (modify_dest, 0);
1592
1593 /* The function which we are inlining might not return a value,
1594 in which case we should issue a warning that the function
1595 does not return a value. In that case the optimizers will
1596 see that the variable to which the value is assigned was not
1597 initialized. We do not want to issue a warning about that
1598 uninitialized variable. */
1599 if (DECL_P (modify_dest))
1600 TREE_NO_WARNING (modify_dest) = 1;
1601 }
1602 else
1603 modify_dest = NULL;
1604
1605 /* Declare the return variable for the function. */
1606 declare_return_variable (id, return_slot_addr,
1607 modify_dest, &use_retvar);
1608
1609 /* After we've initialized the parameters, we insert the body of the
1610 function itself. */
1611 {
1612 struct cgraph_node *old_node = id->current_node;
1613 tree copy;
1614
1615 id->current_node = edge->callee;
1616 copy = copy_body (id);
1617
1618 /* If the function uses a return slot, then it may legitimately
1619 fall through while still returning a value, so we have to skip
1620 the warning here. */
1621 if (warn_return_type
1622 && !TREE_NO_WARNING (fn)
1623 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
1624 && return_slot_addr == NULL_TREE
1625 && block_may_fallthru (copy))
1626 {
1627 warning ("control may reach end of non-void function %qD being inlined",
1628 fn);
1629 TREE_NO_WARNING (fn) = 1;
1630 }
1631
1632 append_to_statement_list (copy, &BIND_EXPR_BODY (expr));
1633 id->current_node = old_node;
1634 }
1635 inlined_body = &BIND_EXPR_BODY (expr);
1636
1637 /* After the body of the function comes the RET_LABEL. This must come
1638 before we evaluate the returned value below, because that evaluation
1639 may cause RTL to be generated. */
1640 if (TREE_USED (id->ret_label))
1641 {
1642 tree label = build1 (LABEL_EXPR, void_type_node, id->ret_label);
1643 append_to_statement_list (label, &BIND_EXPR_BODY (expr));
1644 }
1645
1646 /* Clean up. */
1647 splay_tree_delete (id->decl_map);
1648 id->decl_map = st;
1649
1650 /* Although, from the semantic viewpoint, the new expression has
1651 side-effects only if the old one did, it is not possible, from
1652 the technical viewpoint, to evaluate the body of a function
1653 multiple times without serious havoc. */
1654 TREE_SIDE_EFFECTS (expr) = 1;
1655
1656 tsi_link_before (&id->tsi, expr, TSI_SAME_STMT);
1657
1658 /* If the inlined function returns a result that we care about,
1659 then we're going to need to splice in a MODIFY_EXPR. Otherwise
1660 the call was a standalone statement and we can just replace it
1661 with the BIND_EXPR inline representation of the called function. */
1662 if (!use_retvar || !modify_dest)
1663 *tsi_stmt_ptr (id->tsi) = build_empty_stmt ();
1664 else
1665 *tp = use_retvar;
1666
1667 /* When we gimplify a function call, we may clear TREE_SIDE_EFFECTS on
1668 the call if it is to a "const" function. Thus the copy of
1669 TREE_SIDE_EFFECTS from the CALL_EXPR to the BIND_EXPR above with
1670 result in TREE_SIDE_EFFECTS not being set for the inlined copy of a
1671 "const" function.
1672
1673 Unfortunately, that is wrong as inlining the function can create/expose
1674 interesting side effects (such as setting of a return value).
1675
1676 The easiest solution is to simply recalculate TREE_SIDE_EFFECTS for
1677 the toplevel expression. */
1678 recalculate_side_effects (expr);
1679
1680 /* Output the inlining info for this abstract function, since it has been
1681 inlined. If we don't do this now, we can lose the information about the
1682 variables in the function when the blocks get blown away as soon as we
1683 remove the cgraph node. */
1684 (*debug_hooks->outlining_inline_function) (edge->callee->decl);
1685
1686 /* Update callgraph if needed. */
1687 cgraph_remove_node (edge->callee);
1688
1689 /* Recurse into the body of the just inlined function. */
1690 expand_calls_inline (inlined_body, id);
1691 VARRAY_POP (id->fns);
1692
1693 /* Don't walk into subtrees. We've already handled them above. */
1694 *walk_subtrees = 0;
1695
1696 lang_hooks.tree_inlining.end_inlining (fn);
1697
1698 /* Keep iterating. */
1699 egress:
1700 input_location = saved_location;
1701 return NULL_TREE;
1702 }
1703
1704 static void
1705 expand_calls_inline (tree *stmt_p, inline_data *id)
1706 {
1707 tree stmt = *stmt_p;
1708 enum tree_code code = TREE_CODE (stmt);
1709 int dummy;
1710
1711 switch (code)
1712 {
1713 case STATEMENT_LIST:
1714 {
1715 tree_stmt_iterator i;
1716 tree new;
1717
1718 for (i = tsi_start (stmt); !tsi_end_p (i); )
1719 {
1720 id->tsi = i;
1721 expand_calls_inline (tsi_stmt_ptr (i), id);
1722
1723 new = tsi_stmt (i);
1724 if (TREE_CODE (new) == STATEMENT_LIST)
1725 {
1726 tsi_link_before (&i, new, TSI_SAME_STMT);
1727 tsi_delink (&i);
1728 }
1729 else
1730 tsi_next (&i);
1731 }
1732 }
1733 break;
1734
1735 case COND_EXPR:
1736 expand_calls_inline (&COND_EXPR_THEN (stmt), id);
1737 expand_calls_inline (&COND_EXPR_ELSE (stmt), id);
1738 break;
1739
1740 case CATCH_EXPR:
1741 expand_calls_inline (&CATCH_BODY (stmt), id);
1742 break;
1743
1744 case EH_FILTER_EXPR:
1745 expand_calls_inline (&EH_FILTER_FAILURE (stmt), id);
1746 break;
1747
1748 case TRY_CATCH_EXPR:
1749 case TRY_FINALLY_EXPR:
1750 expand_calls_inline (&TREE_OPERAND (stmt, 0), id);
1751 expand_calls_inline (&TREE_OPERAND (stmt, 1), id);
1752 break;
1753
1754 case BIND_EXPR:
1755 expand_calls_inline (&BIND_EXPR_BODY (stmt), id);
1756 break;
1757
1758 case COMPOUND_EXPR:
1759 /* We're gimple. We should have gotten rid of all these. */
1760 gcc_unreachable ();
1761
1762 case RETURN_EXPR:
1763 stmt_p = &TREE_OPERAND (stmt, 0);
1764 stmt = *stmt_p;
1765 if (!stmt || TREE_CODE (stmt) != MODIFY_EXPR)
1766 break;
1767
1768 /* FALLTHRU */
1769
1770 case MODIFY_EXPR:
1771 stmt_p = &TREE_OPERAND (stmt, 1);
1772 stmt = *stmt_p;
1773 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
1774 {
1775 stmt_p = &TREE_OPERAND (stmt, 0);
1776 stmt = *stmt_p;
1777 }
1778 if (TREE_CODE (stmt) != CALL_EXPR)
1779 break;
1780
1781 /* FALLTHRU */
1782
1783 case CALL_EXPR:
1784 expand_call_inline (stmt_p, &dummy, id);
1785 break;
1786
1787 default:
1788 break;
1789 }
1790 }
1791
1792 /* Expand calls to inline functions in the body of FN. */
1793
1794 void
1795 optimize_inline_calls (tree fn)
1796 {
1797 inline_data id;
1798 tree prev_fn;
1799
1800 /* There is no point in performing inlining if errors have already
1801 occurred -- and we might crash if we try to inline invalid
1802 code. */
1803 if (errorcount || sorrycount)
1804 return;
1805
1806 /* Clear out ID. */
1807 memset (&id, 0, sizeof (id));
1808
1809 id.current_node = id.node = cgraph_node (fn);
1810 /* Don't allow recursion into FN. */
1811 VARRAY_TREE_INIT (id.fns, 32, "fns");
1812 VARRAY_PUSH_TREE (id.fns, fn);
1813 /* Or any functions that aren't finished yet. */
1814 prev_fn = NULL_TREE;
1815 if (current_function_decl)
1816 {
1817 VARRAY_PUSH_TREE (id.fns, current_function_decl);
1818 prev_fn = current_function_decl;
1819 }
1820
1821 prev_fn = lang_hooks.tree_inlining.add_pending_fn_decls (&id.fns, prev_fn);
1822
1823 /* Keep track of the low-water mark, i.e., the point where the first
1824 real inlining is represented in ID.FNS. */
1825 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
1826
1827 /* Replace all calls to inline functions with the bodies of those
1828 functions. */
1829 id.tree_pruner = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
1830 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
1831
1832 /* Clean up. */
1833 htab_delete (id.tree_pruner);
1834
1835 #ifdef ENABLE_CHECKING
1836 {
1837 struct cgraph_edge *e;
1838
1839 verify_cgraph_node (id.node);
1840
1841 /* Double check that we inlined everything we are supposed to inline. */
1842 for (e = id.node->callees; e; e = e->next_callee)
1843 gcc_assert (e->inline_failed);
1844 }
1845 #endif
1846 }
1847
1848 /* FN is a function that has a complete body, and CLONE is a function whose
1849 body is to be set to a copy of FN, mapping argument declarations according
1850 to the ARG_MAP splay_tree. */
1851
1852 void
1853 clone_body (tree clone, tree fn, void *arg_map)
1854 {
1855 inline_data id;
1856
1857 /* Clone the body, as if we were making an inline call. But, remap the
1858 parameters in the callee to the parameters of caller. If there's an
1859 in-charge parameter, map it to an appropriate constant. */
1860 memset (&id, 0, sizeof (id));
1861 VARRAY_TREE_INIT (id.fns, 2, "fns");
1862 VARRAY_PUSH_TREE (id.fns, clone);
1863 VARRAY_PUSH_TREE (id.fns, fn);
1864 id.decl_map = (splay_tree)arg_map;
1865
1866 /* Cloning is treated slightly differently from inlining. Set
1867 CLONING_P so that it's clear which operation we're performing. */
1868 id.cloning_p = true;
1869
1870 /* Actually copy the body. */
1871 append_to_statement_list_force (copy_body (&id), &DECL_SAVED_TREE (clone));
1872 }
1873
1874 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
1875 in *arg_copy and of the static chain, if any, in *sc_copy. */
1876
1877 tree
1878 save_body (tree fn, tree *arg_copy, tree *sc_copy)
1879 {
1880 inline_data id;
1881 tree body, *parg;
1882
1883 memset (&id, 0, sizeof (id));
1884 VARRAY_TREE_INIT (id.fns, 1, "fns");
1885 VARRAY_PUSH_TREE (id.fns, fn);
1886 id.node = cgraph_node (fn);
1887 id.saving_p = true;
1888 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
1889 *arg_copy = DECL_ARGUMENTS (fn);
1890
1891 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
1892 {
1893 tree new = copy_node (*parg);
1894
1895 lang_hooks.dup_lang_specific_decl (new);
1896 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
1897 insert_decl_map (&id, *parg, new);
1898 TREE_CHAIN (new) = TREE_CHAIN (*parg);
1899 *parg = new;
1900 }
1901
1902 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1903 if (*sc_copy)
1904 {
1905 tree new = copy_node (*sc_copy);
1906
1907 lang_hooks.dup_lang_specific_decl (new);
1908 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
1909 insert_decl_map (&id, *sc_copy, new);
1910 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
1911 *sc_copy = new;
1912 }
1913
1914 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
1915
1916 /* Actually copy the body. */
1917 body = copy_body (&id);
1918
1919 /* Clean up. */
1920 splay_tree_delete (id.decl_map);
1921 return body;
1922 }
1923
1924 #define WALK_SUBTREE(NODE) \
1925 do \
1926 { \
1927 result = walk_tree (&(NODE), func, data, pset); \
1928 if (result) \
1929 return result; \
1930 } \
1931 while (0)
1932
1933 /* This is a subroutine of walk_tree that walks field of TYPE that are to
1934 be walked whenever a type is seen in the tree. Rest of operands and return
1935 value are as for walk_tree. */
1936
1937 static tree
1938 walk_type_fields (tree type, walk_tree_fn func, void *data,
1939 struct pointer_set_t *pset)
1940 {
1941 tree result = NULL_TREE;
1942
1943 switch (TREE_CODE (type))
1944 {
1945 case POINTER_TYPE:
1946 case REFERENCE_TYPE:
1947 /* We have to worry about mutually recursive pointers. These can't
1948 be written in C. They can in Ada. It's pathological, but
1949 there's an ACATS test (c38102a) that checks it. Deal with this
1950 by checking if we're pointing to another pointer, that one
1951 points to another pointer, that one does too, and we have no htab.
1952 If so, get a hash table. We check three levels deep to avoid
1953 the cost of the hash table if we don't need one. */
1954 if (POINTER_TYPE_P (TREE_TYPE (type))
1955 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
1956 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
1957 && !pset)
1958 {
1959 result = walk_tree_without_duplicates (&TREE_TYPE (type),
1960 func, data);
1961 if (result)
1962 return result;
1963
1964 break;
1965 }
1966
1967 /* ... fall through ... */
1968
1969 case COMPLEX_TYPE:
1970 WALK_SUBTREE (TREE_TYPE (type));
1971 break;
1972
1973 case METHOD_TYPE:
1974 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
1975
1976 /* Fall through. */
1977
1978 case FUNCTION_TYPE:
1979 WALK_SUBTREE (TREE_TYPE (type));
1980 {
1981 tree arg;
1982
1983 /* We never want to walk into default arguments. */
1984 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
1985 WALK_SUBTREE (TREE_VALUE (arg));
1986 }
1987 break;
1988
1989 case ARRAY_TYPE:
1990 /* Don't follow this nodes's type if a pointer for fear that we'll
1991 have infinite recursion. Those types are uninteresting anyway. */
1992 if (!POINTER_TYPE_P (TREE_TYPE (type))
1993 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
1994 WALK_SUBTREE (TREE_TYPE (type));
1995 WALK_SUBTREE (TYPE_DOMAIN (type));
1996 break;
1997
1998 case BOOLEAN_TYPE:
1999 case ENUMERAL_TYPE:
2000 case INTEGER_TYPE:
2001 case CHAR_TYPE:
2002 case REAL_TYPE:
2003 WALK_SUBTREE (TYPE_MIN_VALUE (type));
2004 WALK_SUBTREE (TYPE_MAX_VALUE (type));
2005 break;
2006
2007 case OFFSET_TYPE:
2008 WALK_SUBTREE (TREE_TYPE (type));
2009 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
2010 break;
2011
2012 default:
2013 break;
2014 }
2015
2016 return NULL_TREE;
2017 }
2018
2019 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
2020 called with the DATA and the address of each sub-tree. If FUNC returns a
2021 non-NULL value, the traversal is aborted, and the value returned by FUNC
2022 is returned. If PSET is non-NULL it is used to record the nodes visited,
2023 and to avoid visiting a node more than once. */
2024
2025 tree
2026 walk_tree (tree *tp, walk_tree_fn func, void *data, struct pointer_set_t *pset)
2027 {
2028 enum tree_code code;
2029 int walk_subtrees;
2030 tree result;
2031
2032 #define WALK_SUBTREE_TAIL(NODE) \
2033 do \
2034 { \
2035 tp = & (NODE); \
2036 goto tail_recurse; \
2037 } \
2038 while (0)
2039
2040 tail_recurse:
2041 /* Skip empty subtrees. */
2042 if (!*tp)
2043 return NULL_TREE;
2044
2045 /* Don't walk the same tree twice, if the user has requested
2046 that we avoid doing so. */
2047 if (pset && pointer_set_insert (pset, *tp))
2048 return NULL_TREE;
2049
2050 /* Call the function. */
2051 walk_subtrees = 1;
2052 result = (*func) (tp, &walk_subtrees, data);
2053
2054 /* If we found something, return it. */
2055 if (result)
2056 return result;
2057
2058 code = TREE_CODE (*tp);
2059
2060 /* Even if we didn't, FUNC may have decided that there was nothing
2061 interesting below this point in the tree. */
2062 if (!walk_subtrees)
2063 {
2064 if (code == TREE_LIST)
2065 /* But we still need to check our siblings. */
2066 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2067 else
2068 return NULL_TREE;
2069 }
2070
2071 result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
2072 data, pset);
2073 if (result || ! walk_subtrees)
2074 return result;
2075
2076 /* If this is a DECL_EXPR, walk into various fields of the type that it's
2077 defining. We only want to walk into these fields of a type in this
2078 case. Note that decls get walked as part of the processing of a
2079 BIND_EXPR.
2080
2081 ??? Precisely which fields of types that we are supposed to walk in
2082 this case vs. the normal case aren't well defined. */
2083 if (code == DECL_EXPR
2084 && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
2085 && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
2086 {
2087 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
2088
2089 /* Call the function for the type. See if it returns anything or
2090 doesn't want us to continue. If we are to continue, walk both
2091 the normal fields and those for the declaration case. */
2092 result = (*func) (type_p, &walk_subtrees, data);
2093 if (result || !walk_subtrees)
2094 return NULL_TREE;
2095
2096 result = walk_type_fields (*type_p, func, data, pset);
2097 if (result)
2098 return result;
2099
2100 WALK_SUBTREE (TYPE_SIZE (*type_p));
2101 WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
2102
2103 /* If this is a record type, also walk the fields. */
2104 if (TREE_CODE (*type_p) == RECORD_TYPE
2105 || TREE_CODE (*type_p) == UNION_TYPE
2106 || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2107 {
2108 tree field;
2109
2110 for (field = TYPE_FIELDS (*type_p); field;
2111 field = TREE_CHAIN (field))
2112 {
2113 /* We'd like to look at the type of the field, but we can easily
2114 get infinite recursion. So assume it's pointed to elsewhere
2115 in the tree. Also, ignore things that aren't fields. */
2116 if (TREE_CODE (field) != FIELD_DECL)
2117 continue;
2118
2119 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
2120 WALK_SUBTREE (DECL_SIZE (field));
2121 WALK_SUBTREE (DECL_SIZE_UNIT (field));
2122 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
2123 WALK_SUBTREE (DECL_QUALIFIER (field));
2124 }
2125 }
2126 }
2127
2128 else if (code != SAVE_EXPR
2129 && code != BIND_EXPR
2130 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2131 {
2132 int i, len;
2133
2134 /* Walk over all the sub-trees of this operand. */
2135 len = TREE_CODE_LENGTH (code);
2136 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
2137 But, we only want to walk once. */
2138 if (code == TARGET_EXPR
2139 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
2140 --len;
2141
2142 /* Go through the subtrees. We need to do this in forward order so
2143 that the scope of a FOR_EXPR is handled properly. */
2144 #ifdef DEBUG_WALK_TREE
2145 for (i = 0; i < len; ++i)
2146 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2147 #else
2148 for (i = 0; i < len - 1; ++i)
2149 WALK_SUBTREE (TREE_OPERAND (*tp, i));
2150
2151 if (len)
2152 {
2153 /* The common case is that we may tail recurse here. */
2154 if (code != BIND_EXPR
2155 && !TREE_CHAIN (*tp))
2156 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
2157 else
2158 WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
2159 }
2160 #endif
2161 }
2162
2163 /* If this is a type, walk the needed fields in the type. */
2164 else if (TYPE_P (*tp))
2165 {
2166 result = walk_type_fields (*tp, func, data, pset);
2167 if (result)
2168 return result;
2169 }
2170 else
2171 {
2172 /* Not one of the easy cases. We must explicitly go through the
2173 children. */
2174 switch (code)
2175 {
2176 case ERROR_MARK:
2177 case IDENTIFIER_NODE:
2178 case INTEGER_CST:
2179 case REAL_CST:
2180 case VECTOR_CST:
2181 case STRING_CST:
2182 case BLOCK:
2183 case PLACEHOLDER_EXPR:
2184 case SSA_NAME:
2185 case FIELD_DECL:
2186 case RESULT_DECL:
2187 /* None of thse have subtrees other than those already walked
2188 above. */
2189 break;
2190
2191 case TREE_LIST:
2192 WALK_SUBTREE (TREE_VALUE (*tp));
2193 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
2194 break;
2195
2196 case TREE_VEC:
2197 {
2198 int len = TREE_VEC_LENGTH (*tp);
2199
2200 if (len == 0)
2201 break;
2202
2203 /* Walk all elements but the first. */
2204 while (--len)
2205 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
2206
2207 /* Now walk the first one as a tail call. */
2208 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
2209 }
2210
2211 case COMPLEX_CST:
2212 WALK_SUBTREE (TREE_REALPART (*tp));
2213 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
2214
2215 case CONSTRUCTOR:
2216 WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
2217
2218 case SAVE_EXPR:
2219 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
2220
2221 case BIND_EXPR:
2222 {
2223 tree decl;
2224 for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
2225 {
2226 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
2227 into declarations that are just mentioned, rather than
2228 declared; they don't really belong to this part of the tree.
2229 And, we can see cycles: the initializer for a declaration
2230 can refer to the declaration itself. */
2231 WALK_SUBTREE (DECL_INITIAL (decl));
2232 WALK_SUBTREE (DECL_SIZE (decl));
2233 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
2234 }
2235 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
2236 }
2237
2238 case STATEMENT_LIST:
2239 {
2240 tree_stmt_iterator i;
2241 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
2242 WALK_SUBTREE (*tsi_stmt_ptr (i));
2243 }
2244 break;
2245
2246 default:
2247 /* ??? This could be a language-defined node. We really should make
2248 a hook for it, but right now just ignore it. */
2249 break;
2250 }
2251 }
2252
2253 /* We didn't find what we were looking for. */
2254 return NULL_TREE;
2255
2256 #undef WALK_SUBTREE
2257 #undef WALK_SUBTREE_TAIL
2258 }
2259
2260 /* Like walk_tree, but does not walk duplicate nodes more than once. */
2261
2262 tree
2263 walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
2264 {
2265 tree result;
2266 struct pointer_set_t *pset;
2267
2268 pset = pointer_set_create ();
2269 result = walk_tree (tp, func, data, pset);
2270 pointer_set_destroy (pset);
2271 return result;
2272 }
2273
2274 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2275
2276 tree
2277 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2278 {
2279 enum tree_code code = TREE_CODE (*tp);
2280
2281 /* We make copies of most nodes. */
2282 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2283 || code == TREE_LIST
2284 || code == TREE_VEC
2285 || code == TYPE_DECL)
2286 {
2287 /* Because the chain gets clobbered when we make a copy, we save it
2288 here. */
2289 tree chain = TREE_CHAIN (*tp);
2290 tree new;
2291
2292 /* Copy the node. */
2293 new = copy_node (*tp);
2294
2295 /* Propagate mudflap marked-ness. */
2296 if (flag_mudflap && mf_marked_p (*tp))
2297 mf_mark (new);
2298
2299 *tp = new;
2300
2301 /* Now, restore the chain, if appropriate. That will cause
2302 walk_tree to walk into the chain as well. */
2303 if (code == PARM_DECL || code == TREE_LIST)
2304 TREE_CHAIN (*tp) = chain;
2305
2306 /* For now, we don't update BLOCKs when we make copies. So, we
2307 have to nullify all BIND_EXPRs. */
2308 if (TREE_CODE (*tp) == BIND_EXPR)
2309 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2310 }
2311
2312 else if (TREE_CODE_CLASS (code) == tcc_type)
2313 *walk_subtrees = 0;
2314 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2315 *walk_subtrees = 0;
2316 else if (TREE_CODE_CLASS (code) == tcc_constant)
2317 *walk_subtrees = 0;
2318 else
2319 gcc_assert (code != STATEMENT_LIST);
2320 return NULL_TREE;
2321 }
2322
2323 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2324 information indicating to what new SAVE_EXPR this one should be mapped,
2325 use that one. Otherwise, create a new node and enter it in ST. */
2326
2327 static void
2328 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2329 {
2330 splay_tree st = (splay_tree) st_;
2331 splay_tree_node n;
2332 tree t;
2333
2334 /* See if we already encountered this SAVE_EXPR. */
2335 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2336
2337 /* If we didn't already remap this SAVE_EXPR, do so now. */
2338 if (!n)
2339 {
2340 t = copy_node (*tp);
2341
2342 /* Remember this SAVE_EXPR. */
2343 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2344 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2345 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2346 }
2347 else
2348 {
2349 /* We've already walked into this SAVE_EXPR; don't do it again. */
2350 *walk_subtrees = 0;
2351 t = (tree) n->value;
2352 }
2353
2354 /* Replace this SAVE_EXPR with the copy. */
2355 *tp = t;
2356 }
2357
2358 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2359 copies the declaration and enters it in the splay_tree in DATA (which is
2360 really an `inline_data *'). */
2361
2362 static tree
2363 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2364 void *data)
2365 {
2366 inline_data *id = (inline_data *) data;
2367
2368 /* Don't walk into types. */
2369 if (TYPE_P (*tp))
2370 *walk_subtrees = 0;
2371
2372 else if (TREE_CODE (*tp) == LABEL_EXPR)
2373 {
2374 tree decl = TREE_OPERAND (*tp, 0);
2375
2376 /* Copy the decl and remember the copy. */
2377 insert_decl_map (id, decl,
2378 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2379 DECL_CONTEXT (decl)));
2380 }
2381
2382 return NULL_TREE;
2383 }
2384
2385 /* Perform any modifications to EXPR required when it is unsaved. Does
2386 not recurse into EXPR's subtrees. */
2387
2388 static void
2389 unsave_expr_1 (tree expr)
2390 {
2391 switch (TREE_CODE (expr))
2392 {
2393 case TARGET_EXPR:
2394 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2395 It's OK for this to happen if it was part of a subtree that
2396 isn't immediately expanded, such as operand 2 of another
2397 TARGET_EXPR. */
2398 if (TREE_OPERAND (expr, 1))
2399 break;
2400
2401 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2402 TREE_OPERAND (expr, 3) = NULL_TREE;
2403 break;
2404
2405 default:
2406 break;
2407 }
2408 }
2409
2410 /* Called via walk_tree when an expression is unsaved. Using the
2411 splay_tree pointed to by ST (which is really a `splay_tree'),
2412 remaps all local declarations to appropriate replacements. */
2413
2414 static tree
2415 unsave_r (tree *tp, int *walk_subtrees, void *data)
2416 {
2417 inline_data *id = (inline_data *) data;
2418 splay_tree st = id->decl_map;
2419 splay_tree_node n;
2420
2421 /* Only a local declaration (variable or label). */
2422 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2423 || TREE_CODE (*tp) == LABEL_DECL)
2424 {
2425 /* Lookup the declaration. */
2426 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2427
2428 /* If it's there, remap it. */
2429 if (n)
2430 *tp = (tree) n->value;
2431 }
2432
2433 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2434 copy_statement_list (tp);
2435 else if (TREE_CODE (*tp) == BIND_EXPR)
2436 copy_bind_expr (tp, walk_subtrees, id);
2437 else if (TREE_CODE (*tp) == SAVE_EXPR)
2438 remap_save_expr (tp, st, walk_subtrees);
2439 else
2440 {
2441 copy_tree_r (tp, walk_subtrees, NULL);
2442
2443 /* Do whatever unsaving is required. */
2444 unsave_expr_1 (*tp);
2445 }
2446
2447 /* Keep iterating. */
2448 return NULL_TREE;
2449 }
2450
2451 /* Copies everything in EXPR and replaces variables, labels
2452 and SAVE_EXPRs local to EXPR. */
2453
2454 tree
2455 unsave_expr_now (tree expr)
2456 {
2457 inline_data id;
2458
2459 /* There's nothing to do for NULL_TREE. */
2460 if (expr == 0)
2461 return expr;
2462
2463 /* Set up ID. */
2464 memset (&id, 0, sizeof (id));
2465 VARRAY_TREE_INIT (id.fns, 1, "fns");
2466 VARRAY_PUSH_TREE (id.fns, current_function_decl);
2467 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2468
2469 /* Walk the tree once to find local labels. */
2470 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2471
2472 /* Walk the tree again, copying, remapping, and unsaving. */
2473 walk_tree (&expr, unsave_r, &id, NULL);
2474
2475 /* Clean up. */
2476 splay_tree_delete (id.decl_map);
2477
2478 return expr;
2479 }
2480
2481 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2482
2483 static tree
2484 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2485 {
2486 if (*tp == data)
2487 return (tree) data;
2488 else
2489 return NULL;
2490 }
2491
2492 bool
2493 debug_find_tree (tree top, tree search)
2494 {
2495 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2496 }
2497
2498 /* Declare the variables created by the inliner. Add all the variables in
2499 VARS to BIND_EXPR. */
2500
2501 static void
2502 declare_inline_vars (tree bind_expr, tree vars)
2503 {
2504 tree t;
2505 for (t = vars; t; t = TREE_CHAIN (t))
2506 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2507
2508 add_var_to_bind_expr (bind_expr, vars);
2509 }