* config/mips/mips.h (POINTERS_EXTEND_UNSIGNED): Delete.
[gcc.git] / gcc / tree-tailcall.c
1 /* Tail call optimization on trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "function.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
34 #include "except.h"
35 #include "tree-pass.h"
36 #include "flags.h"
37 #include "langhooks.h"
38
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
42
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
46
47 int sum (int n)
48 {
49 if (n > 0)
50 return n + sum (n - 1);
51 else
52 return 0;
53 }
54
55 is transformed into
56
57 int sum (int n)
58 {
59 int acc = 0;
60
61 while (n > 0)
62 acc += n--;
63
64 return acc;
65 }
66
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
72 omit the accumulator.
73
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
78
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
81
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
85 are unchanged.
86
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
90
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
92
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
96
97 TODO -- it is possible to do similar tricks for other operations. */
98
99 /* A structure that describes the tailcall. */
100
101 struct tailcall
102 {
103 /* The block in that the call occur. */
104 basic_block call_block;
105
106 /* The iterator pointing to the call statement. */
107 block_stmt_iterator call_bsi;
108
109 /* True if it is a call to the current function. */
110 bool tail_recursion;
111
112 /* The return value of the caller is mult * f + add, where f is the return
113 value of the call. */
114 tree mult, add;
115
116 /* Next tailcall in the chain. */
117 struct tailcall *next;
118 };
119
120 /* The variables holding the value of multiplicative and additive
121 accumulator. */
122 static tree m_acc, a_acc;
123
124 static bool suitable_for_tail_opt_p (void);
125 static bool optimize_tail_call (struct tailcall *, bool);
126 static void eliminate_tail_call (struct tailcall *);
127 static void find_tail_calls (basic_block, struct tailcall **);
128
129 /* Returns false when the function is not suitable for tail call optimization
130 from some reason (e.g. if it takes variable number of arguments). */
131
132 static bool
133 suitable_for_tail_opt_p (void)
134 {
135 int i;
136
137 if (current_function_stdarg)
138 return false;
139
140 /* No local variable should be call-clobbered. We ignore any kind
141 of memory tag, as these are not real variables. */
142 for (i = 0; i < (int) VARRAY_ACTIVE_SIZE (referenced_vars); i++)
143 {
144 tree var = VARRAY_TREE (referenced_vars, i);
145
146 if (!(TREE_STATIC (var) || DECL_EXTERNAL (var))
147 && var_ann (var)->mem_tag_kind == NOT_A_TAG
148 && is_call_clobbered (var))
149 return false;
150 }
151
152 return true;
153 }
154 /* Returns false when the function is not suitable for tail call optimization
155 from some reason (e.g. if it takes variable number of arguments).
156 This test must pass in addition to suitable_for_tail_opt_p in order to make
157 tail call discovery happen. */
158
159 static bool
160 suitable_for_tail_call_opt_p (void)
161 {
162 /* alloca (until we have stack slot life analysis) inhibits
163 sibling call optimizations, but not tail recursion. */
164 if (current_function_calls_alloca)
165 return false;
166
167 /* If we are using sjlj exceptions, we may need to add a call to
168 _Unwind_SjLj_Unregister at exit of the function. Which means
169 that we cannot do any sibcall transformations. */
170 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
171 return false;
172
173 /* Any function that calls setjmp might have longjmp called from
174 any called function. ??? We really should represent this
175 properly in the CFG so that this needn't be special cased. */
176 if (current_function_calls_setjmp)
177 return false;
178
179 return true;
180 }
181
182 /* Checks whether the expression EXPR in stmt AT is independent of the
183 statement pointed by BSI (in a sense that we already know EXPR's value
184 at BSI). We use the fact that we are only called from the chain of
185 basic blocks that have only single successor. Returns the expression
186 containing the value of EXPR at BSI. */
187
188 static tree
189 independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
190 {
191 basic_block bb, call_bb, at_bb;
192 edge e;
193
194 if (is_gimple_min_invariant (expr))
195 return expr;
196
197 if (TREE_CODE (expr) != SSA_NAME)
198 return NULL_TREE;
199
200 /* Mark the blocks in the chain leading to the end. */
201 at_bb = bb_for_stmt (at);
202 call_bb = bb_for_stmt (bsi_stmt (bsi));
203 for (bb = call_bb; bb != at_bb; bb = bb->succ->dest)
204 bb->aux = &bb->aux;
205 bb->aux = &bb->aux;
206
207 while (1)
208 {
209 at = SSA_NAME_DEF_STMT (expr);
210 bb = bb_for_stmt (at);
211
212 /* The default definition or defined before the chain. */
213 if (!bb || !bb->aux)
214 break;
215
216 if (bb == call_bb)
217 {
218 for (; !bsi_end_p (bsi); bsi_next (&bsi))
219 if (bsi_stmt (bsi) == at)
220 break;
221
222 if (!bsi_end_p (bsi))
223 expr = NULL_TREE;
224 break;
225 }
226
227 if (TREE_CODE (at) != PHI_NODE)
228 {
229 expr = NULL_TREE;
230 break;
231 }
232
233 for (e = bb->pred; e; e = e->pred_next)
234 if (e->src->aux)
235 break;
236 gcc_assert (e);
237
238 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
239 if (TREE_CODE (expr) != SSA_NAME)
240 {
241 /* The value is a constant. */
242 break;
243 }
244 }
245
246 /* Unmark the blocks. */
247 for (bb = call_bb; bb != at_bb; bb = bb->succ->dest)
248 bb->aux = NULL;
249 bb->aux = NULL;
250
251 return expr;
252 }
253
254 /* Simulates the effect of an assignment of ASS in STMT on the return value
255 of the tail recursive CALL passed in ASS_VAR. M and A are the
256 multiplicative and the additive factor for the real return value. */
257
258 static bool
259 process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
260 tree *a, tree *ass_var)
261 {
262 tree op0, op1, non_ass_var;
263 tree dest = TREE_OPERAND (ass, 0);
264 tree src = TREE_OPERAND (ass, 1);
265 enum tree_code code = TREE_CODE (src);
266 tree src_var = src;
267
268 /* See if this is a simple copy operation of an SSA name to the function
269 result. In that case we may have a simple tail call. Ignore type
270 conversions that can never produce extra code between the function
271 call and the function return. */
272 STRIP_NOPS (src_var);
273 if (TREE_CODE (src_var) == SSA_NAME)
274 {
275 if (src_var != *ass_var)
276 return false;
277
278 *ass_var = dest;
279 return true;
280 }
281
282 if (TREE_CODE_CLASS (code) != '2')
283 return false;
284
285 /* Accumulator optimizations will reverse the order of operations.
286 We can only do that for floating-point types if we're assuming
287 that addition and multiplication are associative. */
288 if (!flag_unsafe_math_optimizations)
289 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
290 return false;
291
292 /* We only handle the code like
293
294 x = call ();
295 y = m * x;
296 z = y + a;
297 return z;
298
299 TODO -- Extend it for cases where the linear transformation of the output
300 is expressed in a more complicated way. */
301
302 op0 = TREE_OPERAND (src, 0);
303 op1 = TREE_OPERAND (src, 1);
304
305 if (op0 == *ass_var
306 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
307 ;
308 else if (op1 == *ass_var
309 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
310 ;
311 else
312 return false;
313
314 switch (code)
315 {
316 case PLUS_EXPR:
317 /* There should be no previous addition. TODO -- it should be fairly
318 straightforward to lift this restriction -- just allow storing
319 more complicated expressions in *A, and gimplify it in
320 adjust_accumulator_values. */
321 if (*a)
322 return false;
323 *a = non_ass_var;
324 *ass_var = dest;
325 return true;
326
327 case MULT_EXPR:
328 /* Similar remark applies here. Handling multiplication after addition
329 is just slightly more complicated -- we need to multiply both *A and
330 *M. */
331 if (*a || *m)
332 return false;
333 *m = non_ass_var;
334 *ass_var = dest;
335 return true;
336
337 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
338
339 default:
340 return false;
341 }
342 }
343
344 /* Propagate VAR through phis on edge E. */
345
346 static tree
347 propagate_through_phis (tree var, edge e)
348 {
349 basic_block dest = e->dest;
350 tree phi;
351
352 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
353 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
354 return PHI_RESULT (phi);
355
356 return var;
357 }
358
359 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
360 added to the start of RET. */
361
362 static void
363 find_tail_calls (basic_block bb, struct tailcall **ret)
364 {
365 tree ass_var, ret_var, stmt, func, param, args, call = NULL_TREE;
366 block_stmt_iterator bsi, absi;
367 bool tail_recursion;
368 struct tailcall *nw;
369 edge e;
370 tree m, a;
371 basic_block abb;
372 stmt_ann_t ann;
373
374 if (bb->succ->succ_next)
375 return;
376
377 for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
378 {
379 stmt = bsi_stmt (bsi);
380
381 /* Ignore labels. */
382 if (TREE_CODE (stmt) == LABEL_EXPR)
383 continue;
384
385 get_stmt_operands (stmt);
386
387 /* Check for a call. */
388 if (TREE_CODE (stmt) == MODIFY_EXPR)
389 {
390 ass_var = TREE_OPERAND (stmt, 0);
391 call = TREE_OPERAND (stmt, 1);
392 if (TREE_CODE (call) == WITH_SIZE_EXPR)
393 call = TREE_OPERAND (call, 0);
394 }
395 else
396 {
397 ass_var = NULL_TREE;
398 call = stmt;
399 }
400
401 if (TREE_CODE (call) == CALL_EXPR)
402 break;
403
404 /* If the statement has virtual or volatile operands, fail. */
405 ann = stmt_ann (stmt);
406 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann))
407 || NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann))
408 || NUM_VUSES (VUSE_OPS (ann))
409 || ann->has_volatile_ops)
410 return;
411 }
412
413 if (bsi_end_p (bsi))
414 {
415 /* Recurse to the predecessors. */
416 for (e = bb->pred; e; e = e->pred_next)
417 find_tail_calls (e->src, ret);
418
419 return;
420 }
421
422 /* We found the call, check whether it is suitable. */
423 tail_recursion = false;
424 func = get_callee_fndecl (call);
425 if (func == current_function_decl)
426 {
427 for (param = DECL_ARGUMENTS (func), args = TREE_OPERAND (call, 1);
428 param && args;
429 param = TREE_CHAIN (param), args = TREE_CHAIN (args))
430 {
431 tree arg = TREE_VALUE (args);
432 if (param != arg
433 /* Make sure there are no problems with copying. Note we must
434 have a copyable type and the two arguments must have reasonably
435 equivalent types. The latter requirement could be relaxed if
436 we emitted a suitable type conversion statement. */
437 && (!is_gimple_reg_type (TREE_TYPE (param))
438 || !lang_hooks.types_compatible_p (TREE_TYPE (param),
439 TREE_TYPE (arg))))
440 break;
441 }
442 if (!args && !param)
443 tail_recursion = true;
444 }
445
446 /* Now check the statements after the call. None of them has virtual
447 operands, so they may only depend on the call through its return
448 value. The return value should also be dependent on each of them,
449 since we are running after dce. */
450 m = NULL_TREE;
451 a = NULL_TREE;
452
453 abb = bb;
454 absi = bsi;
455 while (1)
456 {
457 bsi_next (&absi);
458
459 while (bsi_end_p (absi))
460 {
461 ass_var = propagate_through_phis (ass_var, abb->succ);
462 abb = abb->succ->dest;
463 absi = bsi_start (abb);
464 }
465
466 stmt = bsi_stmt (absi);
467
468 if (TREE_CODE (stmt) == LABEL_EXPR)
469 continue;
470
471 if (TREE_CODE (stmt) == RETURN_EXPR)
472 break;
473
474 if (TREE_CODE (stmt) != MODIFY_EXPR)
475 return;
476
477 if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
478 return;
479 }
480
481 /* See if this is a tail call we can handle. */
482 ret_var = TREE_OPERAND (stmt, 0);
483 if (ret_var
484 && TREE_CODE (ret_var) == MODIFY_EXPR)
485 {
486 tree ret_op = TREE_OPERAND (ret_var, 1);
487 STRIP_NOPS (ret_op);
488 if (!tail_recursion
489 && TREE_CODE (ret_op) != SSA_NAME)
490 return;
491
492 if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
493 return;
494 ret_var = TREE_OPERAND (ret_var, 0);
495 }
496
497 /* We may proceed if there either is no return value, or the return value
498 is identical to the call's return. */
499 if (ret_var
500 && (ret_var != ass_var))
501 return;
502
503 /* If this is not a tail recursive call, we cannot handle addends or
504 multiplicands. */
505 if (!tail_recursion && (m || a))
506 return;
507
508 nw = xmalloc (sizeof (struct tailcall));
509
510 nw->call_block = bb;
511 nw->call_bsi = bsi;
512
513 nw->tail_recursion = tail_recursion;
514
515 nw->mult = m;
516 nw->add = a;
517
518 nw->next = *ret;
519 *ret = nw;
520 }
521
522 /* Adjust the accumulator values according to A and M after BSI, and update
523 the phi nodes on edge BACK. */
524
525 static void
526 adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
527 {
528 tree stmt, var, phi, tmp;
529 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
530 tree a_acc_arg = a_acc, m_acc_arg = m_acc;
531
532 if (a)
533 {
534 if (m_acc)
535 {
536 if (integer_onep (a))
537 var = m_acc;
538 else
539 {
540 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
541 build (MULT_EXPR, ret_type, m_acc, a));
542
543 tmp = create_tmp_var (ret_type, "acc_tmp");
544 add_referenced_tmp_var (tmp);
545
546 var = make_ssa_name (tmp, stmt);
547 TREE_OPERAND (stmt, 0) = var;
548 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
549 }
550 }
551 else
552 var = a;
553
554 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
555 build (PLUS_EXPR, ret_type, a_acc, var));
556 var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
557 TREE_OPERAND (stmt, 0) = var;
558 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
559 a_acc_arg = var;
560 }
561
562 if (m)
563 {
564 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
565 build (MULT_EXPR, ret_type, m_acc, m));
566 var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
567 TREE_OPERAND (stmt, 0) = var;
568 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
569 m_acc_arg = var;
570 }
571
572 if (a_acc)
573 {
574 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
575 if (PHI_RESULT (phi) == a_acc)
576 break;
577
578 add_phi_arg (&phi, a_acc_arg, back);
579 }
580
581 if (m_acc)
582 {
583 for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
584 if (PHI_RESULT (phi) == m_acc)
585 break;
586
587 add_phi_arg (&phi, m_acc_arg, back);
588 }
589 }
590
591 /* Adjust value of the return at the end of BB according to M and A
592 accumulators. */
593
594 static void
595 adjust_return_value (basic_block bb, tree m, tree a)
596 {
597 tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
598 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
599 block_stmt_iterator bsi = bsi_last (bb);
600
601 gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
602
603 ret_var = TREE_OPERAND (ret_stmt, 0);
604 if (!ret_var)
605 return;
606
607 if (TREE_CODE (ret_var) == MODIFY_EXPR)
608 {
609 ret_var->common.ann = (tree_ann_t) stmt_ann (ret_stmt);
610 bsi_replace (&bsi, ret_var, true);
611 SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var, 0)) = ret_var;
612 ret_var = TREE_OPERAND (ret_var, 0);
613 ret_stmt = build1 (RETURN_EXPR, TREE_TYPE (ret_stmt), ret_var);
614 bsi_insert_after (&bsi, ret_stmt, BSI_NEW_STMT);
615 }
616
617 if (m)
618 {
619 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
620 build (MULT_EXPR, ret_type, m_acc, ret_var));
621
622 tmp = create_tmp_var (ret_type, "acc_tmp");
623 add_referenced_tmp_var (tmp);
624
625 var = make_ssa_name (tmp, stmt);
626 TREE_OPERAND (stmt, 0) = var;
627 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
628 }
629 else
630 var = ret_var;
631
632 if (a)
633 {
634 stmt = build (MODIFY_EXPR, ret_type, NULL_TREE,
635 build (PLUS_EXPR, ret_type, a_acc, var));
636
637 tmp = create_tmp_var (ret_type, "acc_tmp");
638 add_referenced_tmp_var (tmp);
639
640 var = make_ssa_name (tmp, stmt);
641 TREE_OPERAND (stmt, 0) = var;
642 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
643 }
644
645 TREE_OPERAND (ret_stmt, 0) = var;
646 modify_stmt (ret_stmt);
647 }
648
649 /* Eliminates tail call described by T. TMP_VARS is a list of
650 temporary variables used to copy the function arguments. */
651
652 static void
653 eliminate_tail_call (struct tailcall *t)
654 {
655 tree param, stmt, args, rslt, call;
656 basic_block bb, first;
657 edge e;
658 tree phi;
659 stmt_ann_t ann;
660 v_may_def_optype v_may_defs;
661 unsigned i;
662 block_stmt_iterator bsi;
663
664 stmt = bsi_stmt (t->call_bsi);
665 get_stmt_operands (stmt);
666 ann = stmt_ann (stmt);
667 bb = t->call_block;
668
669 if (dump_file && (dump_flags & TDF_DETAILS))
670 {
671 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
672 bb->index);
673 print_generic_stmt (dump_file, stmt, TDF_SLIM);
674 fprintf (dump_file, "\n");
675 }
676
677 if (TREE_CODE (stmt) == MODIFY_EXPR)
678 stmt = TREE_OPERAND (stmt, 1);
679
680 first = ENTRY_BLOCK_PTR->succ->dest;
681
682 /* Remove the code after call_bsi that will become unreachable. The
683 possibly unreachable code in other blocks is removed later in
684 cfg cleanup. */
685 bsi = t->call_bsi;
686 bsi_next (&bsi);
687 while (!bsi_end_p (bsi))
688 {
689 /* Do not remove the return statement, so that redirect_edge_and_branch
690 sees how the block ends. */
691 if (TREE_CODE (bsi_stmt (bsi)) == RETURN_EXPR)
692 break;
693
694 bsi_remove (&bsi);
695 }
696
697 /* Replace the call by a jump to the start of function. */
698 e = redirect_edge_and_branch (t->call_block->succ, first);
699 gcc_assert (e);
700 PENDING_STMT (e) = NULL_TREE;
701
702 /* Add phi node entries for arguments. Not every PHI node corresponds to
703 a function argument (there may be PHI nodes for virtual definitions of the
704 eliminated calls), so we search for a PHI corresponding to each argument
705 rather than searching for which argument a PHI node corresponds to. */
706
707 for (param = DECL_ARGUMENTS (current_function_decl),
708 args = TREE_OPERAND (stmt, 1);
709 param;
710 param = TREE_CHAIN (param),
711 args = TREE_CHAIN (args))
712 {
713
714 for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
715 if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
716 break;
717
718 /* The phi node indeed does not have to be there, in case the operand is
719 invariant in the function. */
720 if (!phi)
721 continue;
722
723 add_phi_arg (&phi, TREE_VALUE (args), e);
724 }
725
726 /* Add phi nodes for the call clobbered variables. */
727 v_may_defs = V_MAY_DEF_OPS (ann);
728 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
729 {
730 param = SSA_NAME_VAR (V_MAY_DEF_RESULT (v_may_defs, i));
731 for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
732 if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
733 break;
734
735 if (!phi)
736 {
737 tree name = var_ann (param)->default_def;
738 tree new_name;
739
740 if (!name)
741 {
742 /* It may happen that the tag does not have a default_def in case
743 when all uses of it are dominated by a MUST_DEF. This however
744 means that it is not necessary to add a phi node for this
745 tag. */
746 continue;
747 }
748 new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
749
750 var_ann (param)->default_def = new_name;
751 phi = create_phi_node (name, first);
752 SSA_NAME_DEF_STMT (name) = phi;
753 add_phi_arg (&phi, new_name, ENTRY_BLOCK_PTR->succ);
754
755 /* For all calls the same set of variables should be clobbered. This
756 means that there always should be the appropriate phi node except
757 for the first time we eliminate the call. */
758 gcc_assert (!first->pred->pred_next->pred_next);
759 }
760
761 add_phi_arg (&phi, V_MAY_DEF_OP (v_may_defs, i), e);
762 }
763
764 /* Update the values of accumulators. */
765 adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
766
767 call = bsi_stmt (t->call_bsi);
768 if (TREE_CODE (call) == MODIFY_EXPR)
769 {
770 rslt = TREE_OPERAND (call, 0);
771
772 /* Result of the call will no longer be defined. So adjust the
773 SSA_NAME_DEF_STMT accordingly. */
774 SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
775 }
776
777 bsi_remove (&t->call_bsi);
778 }
779
780 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
781 mark the tailcalls for the sibcall optimization. */
782
783 static bool
784 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
785 {
786 if (t->tail_recursion)
787 {
788 eliminate_tail_call (t);
789 return true;
790 }
791
792 if (opt_tailcalls)
793 {
794 tree stmt = bsi_stmt (t->call_bsi);
795
796 stmt = get_call_expr_in (stmt);
797 CALL_EXPR_TAILCALL (stmt) = 1;
798 if (dump_file && (dump_flags & TDF_DETAILS))
799 {
800 fprintf (dump_file, "Found tail call ");
801 print_generic_expr (dump_file, stmt, dump_flags);
802 fprintf (dump_file, " in bb %i\n", t->call_block->index);
803 }
804 }
805
806 return false;
807 }
808
809 /* Optimizes tail calls in the function, turning the tail recursion
810 into iteration. */
811
812 static void
813 tree_optimize_tail_calls_1 (bool opt_tailcalls)
814 {
815 edge e;
816 bool phis_constructed = false;
817 struct tailcall *tailcalls = NULL, *act, *next;
818 bool changed = false;
819 basic_block first = ENTRY_BLOCK_PTR->succ->dest;
820 tree stmt, param, ret_type, tmp, phi;
821
822 if (!suitable_for_tail_opt_p ())
823 return;
824 if (opt_tailcalls)
825 opt_tailcalls = suitable_for_tail_call_opt_p ();
826
827 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
828 {
829 /* Only traverse the normal exits, i.e. those that end with return
830 statement. */
831 stmt = last_stmt (e->src);
832
833 if (stmt
834 && TREE_CODE (stmt) == RETURN_EXPR)
835 find_tail_calls (e->src, &tailcalls);
836 }
837
838 /* Construct the phi nodes and accumulators if necessary. */
839 a_acc = m_acc = NULL_TREE;
840 for (act = tailcalls; act; act = act->next)
841 {
842 if (!act->tail_recursion)
843 continue;
844
845 if (!phis_constructed)
846 {
847 /* Ensure that there is only one predecessor of the block. */
848 if (first->pred->pred_next)
849 first = split_edge (ENTRY_BLOCK_PTR->succ);
850
851 /* Copy the args if needed. */
852 for (param = DECL_ARGUMENTS (current_function_decl);
853 param;
854 param = TREE_CHAIN (param))
855 if (var_ann (param)
856 /* Also parameters that are only defined but never used need not
857 be copied. */
858 && (var_ann (param)->default_def
859 && TREE_CODE (var_ann (param)->default_def) == SSA_NAME))
860 {
861 tree name = var_ann (param)->default_def;
862 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
863 tree phi;
864
865 var_ann (param)->default_def = new_name;
866 phi = create_phi_node (name, first);
867 SSA_NAME_DEF_STMT (name) = phi;
868 add_phi_arg (&phi, new_name, first->pred);
869 }
870 phis_constructed = true;
871 }
872
873 if (act->add && !a_acc)
874 {
875 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
876
877 tmp = create_tmp_var (ret_type, "add_acc");
878 add_referenced_tmp_var (tmp);
879
880 phi = create_phi_node (tmp, first);
881 add_phi_arg (&phi, build_int_cst (ret_type, 0), first->pred);
882 a_acc = PHI_RESULT (phi);
883 }
884
885 if (act->mult && !m_acc)
886 {
887 ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
888
889 tmp = create_tmp_var (ret_type, "mult_acc");
890 add_referenced_tmp_var (tmp);
891
892 phi = create_phi_node (tmp, first);
893 add_phi_arg (&phi, build_int_cst (ret_type, 1), first->pred);
894 m_acc = PHI_RESULT (phi);
895 }
896 }
897
898 for (; tailcalls; tailcalls = next)
899 {
900 next = tailcalls->next;
901 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
902 free (tailcalls);
903 }
904
905 if (a_acc || m_acc)
906 {
907 /* Modify the remaining return statements. */
908 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
909 {
910 stmt = last_stmt (e->src);
911
912 if (stmt
913 && TREE_CODE (stmt) == RETURN_EXPR)
914 adjust_return_value (e->src, m_acc, a_acc);
915 }
916 }
917
918 if (changed)
919 {
920 free_dominance_info (CDI_DOMINATORS);
921 cleanup_tree_cfg ();
922 }
923 }
924
925 static void
926 execute_tail_recursion (void)
927 {
928 tree_optimize_tail_calls_1 (false);
929 }
930
931 static bool
932 gate_tail_calls (void)
933 {
934 return flag_optimize_sibling_calls != 0;
935 }
936
937 static void
938 execute_tail_calls (void)
939 {
940 tree_optimize_tail_calls_1 (true);
941 }
942
943 struct tree_opt_pass pass_tail_recursion =
944 {
945 "tailr", /* name */
946 NULL, /* gate */
947 execute_tail_recursion, /* execute */
948 NULL, /* sub */
949 NULL, /* next */
950 0, /* static_pass_number */
951 0, /* tv_id */
952 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
953 0, /* properties_provided */
954 0, /* properties_destroyed */
955 0, /* todo_flags_start */
956 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
957 0 /* letter */
958 };
959
960 struct tree_opt_pass pass_tail_calls =
961 {
962 "tailc", /* name */
963 gate_tail_calls, /* gate */
964 execute_tail_calls, /* execute */
965 NULL, /* sub */
966 NULL, /* next */
967 0, /* static_pass_number */
968 0, /* tv_id */
969 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
970 0, /* properties_provided */
971 0, /* properties_destroyed */
972 0, /* todo_flags_start */
973 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
974 0 /* letter */
975 };