re PR tree-optimization/43833 (false warning: array subscript is above array bounds...
[gcc.git] / gcc / tree-tailcall.c
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "function.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
34 #include "except.h"
35 #include "tree-pass.h"
36 #include "flags.h"
37 #include "langhooks.h"
38 #include "dbgcnt.h"
39
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
43
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
47
48 int sum (int n)
49 {
50 if (n > 0)
51 return n + sum (n - 1);
52 else
53 return 0;
54 }
55
56 is transformed into
57
58 int sum (int n)
59 {
60 int acc = 0;
61
62 while (n > 0)
63 acc += n--;
64
65 return acc;
66 }
67
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
73 omit the accumulator.
74
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
79
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
86 are unchanged.
87
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
91
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
97
98 TODO -- it is possible to do similar tricks for other operations. */
99
100 /* A structure that describes the tailcall. */
101
102 struct tailcall
103 {
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
106
107 /* True if it is a call to the current function. */
108 bool tail_recursion;
109
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
112 tree mult, add;
113
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
116 };
117
118 /* The variables holding the value of multiplicative and additive
119 accumulator. */
120 static tree m_acc, a_acc;
121
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
126
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
129
130 static bool
131 suitable_for_tail_opt_p (void)
132 {
133 if (cfun->stdarg)
134 return false;
135
136 return true;
137 }
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
142
143 static bool
144 suitable_for_tail_call_opt_p (void)
145 {
146 tree param;
147
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun->calls_alloca)
151 return false;
152
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
157 return false;
158
159 /* Any function that calls setjmp might have longjmp called from
160 any called function. ??? We really should represent this
161 properly in the CFG so that this needn't be special cased. */
162 if (cfun->calls_setjmp)
163 return false;
164
165 /* ??? It is OK if the argument of a function is taken in some cases,
166 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
167 for (param = DECL_ARGUMENTS (current_function_decl);
168 param;
169 param = TREE_CHAIN (param))
170 if (TREE_ADDRESSABLE (param))
171 return false;
172
173 return true;
174 }
175
176 /* Checks whether the expression EXPR in stmt AT is independent of the
177 statement pointed to by GSI (in a sense that we already know EXPR's value
178 at GSI). We use the fact that we are only called from the chain of
179 basic blocks that have only single successor. Returns the expression
180 containing the value of EXPR at GSI. */
181
182 static tree
183 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
184 {
185 basic_block bb, call_bb, at_bb;
186 edge e;
187 edge_iterator ei;
188
189 if (is_gimple_min_invariant (expr))
190 return expr;
191
192 if (TREE_CODE (expr) != SSA_NAME)
193 return NULL_TREE;
194
195 /* Mark the blocks in the chain leading to the end. */
196 at_bb = gimple_bb (at);
197 call_bb = gimple_bb (gsi_stmt (gsi));
198 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
199 bb->aux = &bb->aux;
200 bb->aux = &bb->aux;
201
202 while (1)
203 {
204 at = SSA_NAME_DEF_STMT (expr);
205 bb = gimple_bb (at);
206
207 /* The default definition or defined before the chain. */
208 if (!bb || !bb->aux)
209 break;
210
211 if (bb == call_bb)
212 {
213 for (; !gsi_end_p (gsi); gsi_next (&gsi))
214 if (gsi_stmt (gsi) == at)
215 break;
216
217 if (!gsi_end_p (gsi))
218 expr = NULL_TREE;
219 break;
220 }
221
222 if (gimple_code (at) != GIMPLE_PHI)
223 {
224 expr = NULL_TREE;
225 break;
226 }
227
228 FOR_EACH_EDGE (e, ei, bb->preds)
229 if (e->src->aux)
230 break;
231 gcc_assert (e);
232
233 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
234 if (TREE_CODE (expr) != SSA_NAME)
235 {
236 /* The value is a constant. */
237 break;
238 }
239 }
240
241 /* Unmark the blocks. */
242 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
243 bb->aux = NULL;
244 bb->aux = NULL;
245
246 return expr;
247 }
248
249 /* Simulates the effect of an assignment STMT on the return value of the tail
250 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
251 additive factor for the real return value. */
252
253 static bool
254 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
255 tree *a, tree *ass_var)
256 {
257 tree op0, op1, non_ass_var;
258 tree dest = gimple_assign_lhs (stmt);
259 enum tree_code code = gimple_assign_rhs_code (stmt);
260 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
261 tree src_var = gimple_assign_rhs1 (stmt);
262
263 /* See if this is a simple copy operation of an SSA name to the function
264 result. In that case we may have a simple tail call. Ignore type
265 conversions that can never produce extra code between the function
266 call and the function return. */
267 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
268 && (TREE_CODE (src_var) == SSA_NAME))
269 {
270 /* Reject a tailcall if the type conversion might need
271 additional code. */
272 if (gimple_assign_cast_p (stmt)
273 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
274 return false;
275
276 if (src_var != *ass_var)
277 return false;
278
279 *ass_var = dest;
280 return true;
281 }
282
283 if (rhs_class != GIMPLE_BINARY_RHS)
284 return false;
285
286 /* Accumulator optimizations will reverse the order of operations.
287 We can only do that for floating-point types if we're assuming
288 that addition and multiplication are associative. */
289 if (!flag_associative_math)
290 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
291 return false;
292
293 /* We only handle the code like
294
295 x = call ();
296 y = m * x;
297 z = y + a;
298 return z;
299
300 TODO -- Extend it for cases where the linear transformation of the output
301 is expressed in a more complicated way. */
302
303 op0 = gimple_assign_rhs1 (stmt);
304 op1 = gimple_assign_rhs2 (stmt);
305
306 if (op0 == *ass_var
307 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
308 ;
309 else if (op1 == *ass_var
310 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
311 ;
312 else
313 return false;
314
315 switch (code)
316 {
317 case PLUS_EXPR:
318 *a = non_ass_var;
319 *ass_var = dest;
320 return true;
321
322 case MULT_EXPR:
323 *m = non_ass_var;
324 *ass_var = dest;
325 return true;
326
327 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR,
328 POINTER_PLUS_EXPR). */
329
330 default:
331 return false;
332 }
333 }
334
335 /* Propagate VAR through phis on edge E. */
336
337 static tree
338 propagate_through_phis (tree var, edge e)
339 {
340 basic_block dest = e->dest;
341 gimple_stmt_iterator gsi;
342
343 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
344 {
345 gimple phi = gsi_stmt (gsi);
346 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
347 return PHI_RESULT (phi);
348 }
349 return var;
350 }
351
352 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
353 added to the start of RET. */
354
355 static void
356 find_tail_calls (basic_block bb, struct tailcall **ret)
357 {
358 tree ass_var = NULL_TREE, ret_var, func, param;
359 gimple stmt, call = NULL;
360 gimple_stmt_iterator gsi, agsi;
361 bool tail_recursion;
362 struct tailcall *nw;
363 edge e;
364 tree m, a;
365 basic_block abb;
366 size_t idx;
367 tree var;
368 referenced_var_iterator rvi;
369
370 if (!single_succ_p (bb))
371 return;
372
373 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
374 {
375 stmt = gsi_stmt (gsi);
376
377 /* Ignore labels. */
378 if (gimple_code (stmt) == GIMPLE_LABEL || is_gimple_debug (stmt))
379 continue;
380
381 /* Check for a call. */
382 if (is_gimple_call (stmt))
383 {
384 call = stmt;
385 ass_var = gimple_call_lhs (stmt);
386 break;
387 }
388
389 /* If the statement references memory or volatile operands, fail. */
390 if (gimple_references_memory_p (stmt)
391 || gimple_has_volatile_ops (stmt))
392 return;
393 }
394
395 if (gsi_end_p (gsi))
396 {
397 edge_iterator ei;
398 /* Recurse to the predecessors. */
399 FOR_EACH_EDGE (e, ei, bb->preds)
400 find_tail_calls (e->src, ret);
401
402 return;
403 }
404
405 /* If the LHS of our call is not just a simple register, we can't
406 transform this into a tail or sibling call. This situation happens,
407 in (e.g.) "*p = foo()" where foo returns a struct. In this case
408 we won't have a temporary here, but we need to carry out the side
409 effect anyway, so tailcall is impossible.
410
411 ??? In some situations (when the struct is returned in memory via
412 invisible argument) we could deal with this, e.g. by passing 'p'
413 itself as that argument to foo, but it's too early to do this here,
414 and expand_call() will not handle it anyway. If it ever can, then
415 we need to revisit this here, to allow that situation. */
416 if (ass_var && !is_gimple_reg (ass_var))
417 return;
418
419 /* We found the call, check whether it is suitable. */
420 tail_recursion = false;
421 func = gimple_call_fndecl (call);
422 if (func == current_function_decl)
423 {
424 tree arg;
425
426 for (param = DECL_ARGUMENTS (func), idx = 0;
427 param && idx < gimple_call_num_args (call);
428 param = TREE_CHAIN (param), idx ++)
429 {
430 arg = gimple_call_arg (call, idx);
431 if (param != arg)
432 {
433 /* Make sure there are no problems with copying. The parameter
434 have a copyable type and the two arguments must have reasonably
435 equivalent types. The latter requirement could be relaxed if
436 we emitted a suitable type conversion statement. */
437 if (!is_gimple_reg_type (TREE_TYPE (param))
438 || !useless_type_conversion_p (TREE_TYPE (param),
439 TREE_TYPE (arg)))
440 break;
441
442 /* The parameter should be a real operand, so that phi node
443 created for it at the start of the function has the meaning
444 of copying the value. This test implies is_gimple_reg_type
445 from the previous condition, however this one could be
446 relaxed by being more careful with copying the new value
447 of the parameter (emitting appropriate GIMPLE_ASSIGN and
448 updating the virtual operands). */
449 if (!is_gimple_reg (param))
450 break;
451 }
452 }
453 if (idx == gimple_call_num_args (call) && !param)
454 tail_recursion = true;
455 }
456
457 /* Make sure the tail invocation of this function does not refer
458 to local variables. */
459 FOR_EACH_REFERENCED_VAR (var, rvi)
460 {
461 if (TREE_CODE (var) != PARM_DECL
462 && auto_var_in_fn_p (var, cfun->decl)
463 && ref_maybe_used_by_stmt_p (call, var))
464 return;
465 }
466
467 /* Now check the statements after the call. None of them has virtual
468 operands, so they may only depend on the call through its return
469 value. The return value should also be dependent on each of them,
470 since we are running after dce. */
471 m = NULL_TREE;
472 a = NULL_TREE;
473
474 abb = bb;
475 agsi = gsi;
476 while (1)
477 {
478 tree tmp_a = NULL_TREE;
479 tree tmp_m = NULL_TREE;
480 gsi_next (&agsi);
481
482 while (gsi_end_p (agsi))
483 {
484 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
485 abb = single_succ (abb);
486 agsi = gsi_start_bb (abb);
487 }
488
489 stmt = gsi_stmt (agsi);
490
491 if (gimple_code (stmt) == GIMPLE_LABEL)
492 continue;
493
494 if (gimple_code (stmt) == GIMPLE_RETURN)
495 break;
496
497 if (is_gimple_debug (stmt))
498 continue;
499
500 if (gimple_code (stmt) != GIMPLE_ASSIGN)
501 return;
502
503 /* This is a gimple assign. */
504 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
505 return;
506
507 if (tmp_a)
508 {
509 if (a)
510 a = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp_a), a, tmp_a);
511 else
512 a = tmp_a;
513 }
514 if (tmp_m)
515 {
516 if (m)
517 m = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), m, tmp_m);
518 else
519 m = tmp_m;
520
521 if (a)
522 a = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), a, tmp_m);
523 }
524 }
525
526 /* See if this is a tail call we can handle. */
527 ret_var = gimple_return_retval (stmt);
528
529 /* We may proceed if there either is no return value, or the return value
530 is identical to the call's return. */
531 if (ret_var
532 && (ret_var != ass_var))
533 return;
534
535 /* If this is not a tail recursive call, we cannot handle addends or
536 multiplicands. */
537 if (!tail_recursion && (m || a))
538 return;
539
540 nw = XNEW (struct tailcall);
541
542 nw->call_gsi = gsi;
543
544 nw->tail_recursion = tail_recursion;
545
546 nw->mult = m;
547 nw->add = a;
548
549 nw->next = *ret;
550 *ret = nw;
551 }
552
553 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
554
555 static void
556 add_successor_phi_arg (edge e, tree var, tree phi_arg)
557 {
558 gimple_stmt_iterator gsi;
559
560 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
561 if (PHI_RESULT (gsi_stmt (gsi)) == var)
562 break;
563
564 gcc_assert (!gsi_end_p (gsi));
565 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
566 }
567
568 /* Creates a GIMPLE statement which computes the operation specified by
569 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
570 statement in the position specified by GSI and UPDATE. Returns the
571 tree node of the statement's result. */
572
573 static tree
574 adjust_return_value_with_ops (enum tree_code code, const char *label,
575 tree acc, tree op1, gimple_stmt_iterator gsi)
576 {
577
578 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
579 tree tmp = create_tmp_reg (ret_type, label);
580 gimple stmt;
581 tree result;
582
583 add_referenced_var (tmp);
584
585 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
586 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
587 else
588 {
589 tree rhs = fold_convert (TREE_TYPE (acc),
590 fold_build2 (code,
591 TREE_TYPE (op1),
592 fold_convert (TREE_TYPE (op1), acc),
593 op1));
594 rhs = force_gimple_operand_gsi (&gsi, rhs,
595 false, NULL, true, GSI_CONTINUE_LINKING);
596 stmt = gimple_build_assign (NULL_TREE, rhs);
597 }
598
599 result = make_ssa_name (tmp, stmt);
600 gimple_assign_set_lhs (stmt, result);
601 update_stmt (stmt);
602 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
603 return result;
604 }
605
606 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
607 the computation specified by CODE and OP1 and insert the statement
608 at the position specified by GSI as a new statement. Returns new SSA name
609 of updated accumulator. */
610
611 static tree
612 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
613 gimple_stmt_iterator gsi)
614 {
615 gimple stmt;
616 tree var;
617 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
618 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
619 else
620 {
621 tree rhs = fold_convert (TREE_TYPE (acc),
622 fold_build2 (code,
623 TREE_TYPE (op1),
624 fold_convert (TREE_TYPE (op1), acc),
625 op1));
626 rhs = force_gimple_operand_gsi (&gsi, rhs,
627 false, NULL, false, GSI_CONTINUE_LINKING);
628 stmt = gimple_build_assign (NULL_TREE, rhs);
629 }
630 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
631 gimple_assign_set_lhs (stmt, var);
632 update_stmt (stmt);
633 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
634 return var;
635 }
636
637 /* Adjust the accumulator values according to A and M after GSI, and update
638 the phi nodes on edge BACK. */
639
640 static void
641 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
642 {
643 tree var, a_acc_arg, m_acc_arg;
644
645 if (m)
646 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
647 if (a)
648 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
649
650 a_acc_arg = a_acc;
651 m_acc_arg = m_acc;
652 if (a)
653 {
654 if (m_acc)
655 {
656 if (integer_onep (a))
657 var = m_acc;
658 else
659 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
660 a, gsi);
661 }
662 else
663 var = a;
664
665 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
666 }
667
668 if (m)
669 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
670
671 if (a_acc)
672 add_successor_phi_arg (back, a_acc, a_acc_arg);
673
674 if (m_acc)
675 add_successor_phi_arg (back, m_acc, m_acc_arg);
676 }
677
678 /* Adjust value of the return at the end of BB according to M and A
679 accumulators. */
680
681 static void
682 adjust_return_value (basic_block bb, tree m, tree a)
683 {
684 tree retval;
685 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
686 gimple_stmt_iterator gsi = gsi_last_bb (bb);
687
688 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
689
690 retval = gimple_return_retval (ret_stmt);
691 if (!retval || retval == error_mark_node)
692 return;
693
694 if (m)
695 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
696 gsi);
697 if (a)
698 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
699 gsi);
700 gimple_return_set_retval (ret_stmt, retval);
701 update_stmt (ret_stmt);
702 }
703
704 /* Subtract COUNT and FREQUENCY from the basic block and it's
705 outgoing edge. */
706 static void
707 decrease_profile (basic_block bb, gcov_type count, int frequency)
708 {
709 edge e;
710 bb->count -= count;
711 if (bb->count < 0)
712 bb->count = 0;
713 bb->frequency -= frequency;
714 if (bb->frequency < 0)
715 bb->frequency = 0;
716 if (!single_succ_p (bb))
717 {
718 gcc_assert (!EDGE_COUNT (bb->succs));
719 return;
720 }
721 e = single_succ_edge (bb);
722 e->count -= count;
723 if (e->count < 0)
724 e->count = 0;
725 }
726
727 /* Returns true if argument PARAM of the tail recursive call needs to be copied
728 when the call is eliminated. */
729
730 static bool
731 arg_needs_copy_p (tree param)
732 {
733 tree def;
734
735 if (!is_gimple_reg (param) || !var_ann (param))
736 return false;
737
738 /* Parameters that are only defined but never used need not be copied. */
739 def = gimple_default_def (cfun, param);
740 if (!def)
741 return false;
742
743 return true;
744 }
745
746 /* Eliminates tail call described by T. TMP_VARS is a list of
747 temporary variables used to copy the function arguments. */
748
749 static void
750 eliminate_tail_call (struct tailcall *t)
751 {
752 tree param, rslt;
753 gimple stmt, call;
754 tree arg;
755 size_t idx;
756 basic_block bb, first;
757 edge e;
758 gimple phi;
759 gimple_stmt_iterator gsi;
760 gimple orig_stmt;
761
762 stmt = orig_stmt = gsi_stmt (t->call_gsi);
763 bb = gsi_bb (t->call_gsi);
764
765 if (dump_file && (dump_flags & TDF_DETAILS))
766 {
767 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
768 bb->index);
769 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
770 fprintf (dump_file, "\n");
771 }
772
773 gcc_assert (is_gimple_call (stmt));
774
775 first = single_succ (ENTRY_BLOCK_PTR);
776
777 /* Remove the code after call_gsi that will become unreachable. The
778 possibly unreachable code in other blocks is removed later in
779 cfg cleanup. */
780 gsi = t->call_gsi;
781 gsi_next (&gsi);
782 while (!gsi_end_p (gsi))
783 {
784 gimple t = gsi_stmt (gsi);
785 /* Do not remove the return statement, so that redirect_edge_and_branch
786 sees how the block ends. */
787 if (gimple_code (t) == GIMPLE_RETURN)
788 break;
789
790 gsi_remove (&gsi, true);
791 release_defs (t);
792 }
793
794 /* Number of executions of function has reduced by the tailcall. */
795 e = single_succ_edge (gsi_bb (t->call_gsi));
796 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
797 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
798 if (e->dest != EXIT_BLOCK_PTR)
799 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
800
801 /* Replace the call by a jump to the start of function. */
802 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
803 first);
804 gcc_assert (e);
805 PENDING_STMT (e) = NULL;
806
807 /* Add phi node entries for arguments. The ordering of the phi nodes should
808 be the same as the ordering of the arguments. */
809 for (param = DECL_ARGUMENTS (current_function_decl),
810 idx = 0, gsi = gsi_start_phis (first);
811 param;
812 param = TREE_CHAIN (param), idx++)
813 {
814 if (!arg_needs_copy_p (param))
815 continue;
816
817 arg = gimple_call_arg (stmt, idx);
818 phi = gsi_stmt (gsi);
819 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
820
821 add_phi_arg (phi, arg, e, gimple_location (stmt));
822 gsi_next (&gsi);
823 }
824
825 /* Update the values of accumulators. */
826 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
827
828 call = gsi_stmt (t->call_gsi);
829 rslt = gimple_call_lhs (call);
830 if (rslt != NULL_TREE)
831 {
832 /* Result of the call will no longer be defined. So adjust the
833 SSA_NAME_DEF_STMT accordingly. */
834 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
835 }
836
837 gsi_remove (&t->call_gsi, true);
838 release_defs (call);
839 }
840
841 /* Add phi nodes for the virtual operands defined in the function to the
842 header of the loop created by tail recursion elimination.
843
844 Originally, we used to add phi nodes only for call clobbered variables,
845 as the value of the non-call clobbered ones obviously cannot be used
846 or changed within the recursive call. However, the local variables
847 from multiple calls now share the same location, so the virtual ssa form
848 requires us to say that the location dies on further iterations of the loop,
849 which requires adding phi nodes.
850 */
851 static void
852 add_virtual_phis (void)
853 {
854 referenced_var_iterator rvi;
855 tree var;
856
857 /* The problematic part is that there is no way how to know what
858 to put into phi nodes (there in fact does not have to be such
859 ssa name available). A solution would be to have an artificial
860 use/kill for all virtual operands in EXIT node. Unless we have
861 this, we cannot do much better than to rebuild the ssa form for
862 possibly affected virtual ssa names from scratch. */
863
864 FOR_EACH_REFERENCED_VAR (var, rvi)
865 {
866 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
867 mark_sym_for_renaming (var);
868 }
869 }
870
871 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
872 mark the tailcalls for the sibcall optimization. */
873
874 static bool
875 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
876 {
877 if (t->tail_recursion)
878 {
879 eliminate_tail_call (t);
880 return true;
881 }
882
883 if (opt_tailcalls)
884 {
885 gimple stmt = gsi_stmt (t->call_gsi);
886
887 gimple_call_set_tail (stmt, true);
888 if (dump_file && (dump_flags & TDF_DETAILS))
889 {
890 fprintf (dump_file, "Found tail call ");
891 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
892 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
893 }
894 }
895
896 return false;
897 }
898
899 /* Creates a tail-call accumulator of the same type as the return type of the
900 current function. LABEL is the name used to creating the temporary
901 variable for the accumulator. The accumulator will be inserted in the
902 phis of a basic block BB with single predecessor with an initial value
903 INIT converted to the current function return type. */
904
905 static tree
906 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
907 {
908 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
909 tree tmp = create_tmp_reg (ret_type, label);
910 gimple phi;
911
912 add_referenced_var (tmp);
913 phi = create_phi_node (tmp, bb);
914 /* RET_TYPE can be a float when -ffast-maths is enabled. */
915 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
916 UNKNOWN_LOCATION);
917 return PHI_RESULT (phi);
918 }
919
920 /* Optimizes tail calls in the function, turning the tail recursion
921 into iteration. */
922
923 static unsigned int
924 tree_optimize_tail_calls_1 (bool opt_tailcalls)
925 {
926 edge e;
927 bool phis_constructed = false;
928 struct tailcall *tailcalls = NULL, *act, *next;
929 bool changed = false;
930 basic_block first = single_succ (ENTRY_BLOCK_PTR);
931 tree param;
932 gimple stmt;
933 edge_iterator ei;
934
935 if (!suitable_for_tail_opt_p ())
936 return 0;
937 if (opt_tailcalls)
938 opt_tailcalls = suitable_for_tail_call_opt_p ();
939
940 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
941 {
942 /* Only traverse the normal exits, i.e. those that end with return
943 statement. */
944 stmt = last_stmt (e->src);
945
946 if (stmt
947 && gimple_code (stmt) == GIMPLE_RETURN)
948 find_tail_calls (e->src, &tailcalls);
949 }
950
951 /* Construct the phi nodes and accumulators if necessary. */
952 a_acc = m_acc = NULL_TREE;
953 for (act = tailcalls; act; act = act->next)
954 {
955 if (!act->tail_recursion)
956 continue;
957
958 if (!phis_constructed)
959 {
960 /* Ensure that there is only one predecessor of the block
961 or if there are existing degenerate PHI nodes. */
962 if (!single_pred_p (first)
963 || !gimple_seq_empty_p (phi_nodes (first)))
964 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
965
966 /* Copy the args if needed. */
967 for (param = DECL_ARGUMENTS (current_function_decl);
968 param;
969 param = TREE_CHAIN (param))
970 if (arg_needs_copy_p (param))
971 {
972 tree name = gimple_default_def (cfun, param);
973 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
974 gimple phi;
975
976 set_default_def (param, new_name);
977 phi = create_phi_node (name, first);
978 SSA_NAME_DEF_STMT (name) = phi;
979 add_phi_arg (phi, new_name, single_pred_edge (first),
980 EXPR_LOCATION (param));
981 }
982 phis_constructed = true;
983 }
984
985 if (act->add && !a_acc)
986 a_acc = create_tailcall_accumulator ("add_acc", first,
987 integer_zero_node);
988
989 if (act->mult && !m_acc)
990 m_acc = create_tailcall_accumulator ("mult_acc", first,
991 integer_one_node);
992 }
993
994 for (; tailcalls; tailcalls = next)
995 {
996 next = tailcalls->next;
997 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
998 free (tailcalls);
999 }
1000
1001 if (a_acc || m_acc)
1002 {
1003 /* Modify the remaining return statements. */
1004 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1005 {
1006 stmt = last_stmt (e->src);
1007
1008 if (stmt
1009 && gimple_code (stmt) == GIMPLE_RETURN)
1010 adjust_return_value (e->src, m_acc, a_acc);
1011 }
1012 }
1013
1014 if (changed)
1015 free_dominance_info (CDI_DOMINATORS);
1016
1017 if (phis_constructed)
1018 add_virtual_phis ();
1019 if (changed)
1020 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1021 return 0;
1022 }
1023
1024 static unsigned int
1025 execute_tail_recursion (void)
1026 {
1027 return tree_optimize_tail_calls_1 (false);
1028 }
1029
1030 static bool
1031 gate_tail_calls (void)
1032 {
1033 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1034 }
1035
1036 static unsigned int
1037 execute_tail_calls (void)
1038 {
1039 return tree_optimize_tail_calls_1 (true);
1040 }
1041
1042 struct gimple_opt_pass pass_tail_recursion =
1043 {
1044 {
1045 GIMPLE_PASS,
1046 "tailr", /* name */
1047 gate_tail_calls, /* gate */
1048 execute_tail_recursion, /* execute */
1049 NULL, /* sub */
1050 NULL, /* next */
1051 0, /* static_pass_number */
1052 TV_NONE, /* tv_id */
1053 PROP_cfg | PROP_ssa, /* properties_required */
1054 0, /* properties_provided */
1055 0, /* properties_destroyed */
1056 0, /* todo_flags_start */
1057 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1058 }
1059 };
1060
1061 struct gimple_opt_pass pass_tail_calls =
1062 {
1063 {
1064 GIMPLE_PASS,
1065 "tailc", /* name */
1066 gate_tail_calls, /* gate */
1067 execute_tail_calls, /* execute */
1068 NULL, /* sub */
1069 NULL, /* next */
1070 0, /* static_pass_number */
1071 TV_NONE, /* tv_id */
1072 PROP_cfg | PROP_ssa, /* properties_required */
1073 0, /* properties_provided */
1074 0, /* properties_destroyed */
1075 0, /* todo_flags_start */
1076 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1077 }
1078 };