gcc-gdb-test.exp (gdb-test): Reorder matchers to give more consistent results.
[gcc.git] / gcc / tree-tailcall.c
1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "gimple-pretty-print.h"
32 #include "except.h"
33 #include "tree-pass.h"
34 #include "flags.h"
35 #include "langhooks.h"
36 #include "dbgcnt.h"
37 #include "target.h"
38
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
42
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
46
47 int sum (int n)
48 {
49 if (n > 0)
50 return n + sum (n - 1);
51 else
52 return 0;
53 }
54
55 is transformed into
56
57 int sum (int n)
58 {
59 int acc = 0;
60
61 while (n > 0)
62 acc += n--;
63
64 return acc;
65 }
66
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
72 omit the accumulator.
73
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
78
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
81
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
85 are unchanged.
86
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
90
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
92
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
96
97 TODO -- it is possible to do similar tricks for other operations. */
98
99 /* A structure that describes the tailcall. */
100
101 struct tailcall
102 {
103 /* The iterator pointing to the call statement. */
104 gimple_stmt_iterator call_gsi;
105
106 /* True if it is a call to the current function. */
107 bool tail_recursion;
108
109 /* The return value of the caller is mult * f + add, where f is the return
110 value of the call. */
111 tree mult, add;
112
113 /* Next tailcall in the chain. */
114 struct tailcall *next;
115 };
116
117 /* The variables holding the value of multiplicative and additive
118 accumulator. */
119 static tree m_acc, a_acc;
120
121 static bool suitable_for_tail_opt_p (void);
122 static bool optimize_tail_call (struct tailcall *, bool);
123 static void eliminate_tail_call (struct tailcall *);
124 static void find_tail_calls (basic_block, struct tailcall **);
125
126 /* Returns false when the function is not suitable for tail call optimization
127 from some reason (e.g. if it takes variable number of arguments). */
128
129 static bool
130 suitable_for_tail_opt_p (void)
131 {
132 if (cfun->stdarg)
133 return false;
134
135 return true;
136 }
137 /* Returns false when the function is not suitable for tail call optimization
138 from some reason (e.g. if it takes variable number of arguments).
139 This test must pass in addition to suitable_for_tail_opt_p in order to make
140 tail call discovery happen. */
141
142 static bool
143 suitable_for_tail_call_opt_p (void)
144 {
145 tree param;
146
147 /* alloca (until we have stack slot life analysis) inhibits
148 sibling call optimizations, but not tail recursion. */
149 if (cfun->calls_alloca)
150 return false;
151
152 /* If we are using sjlj exceptions, we may need to add a call to
153 _Unwind_SjLj_Unregister at exit of the function. Which means
154 that we cannot do any sibcall transformations. */
155 if (targetm.except_unwind_info (&global_options) == UI_SJLJ
156 && current_function_has_exception_handlers ())
157 return false;
158
159 /* Any function that calls setjmp might have longjmp called from
160 any called function. ??? We really should represent this
161 properly in the CFG so that this needn't be special cased. */
162 if (cfun->calls_setjmp)
163 return false;
164
165 /* ??? It is OK if the argument of a function is taken in some cases,
166 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
167 for (param = DECL_ARGUMENTS (current_function_decl);
168 param;
169 param = DECL_CHAIN (param))
170 if (TREE_ADDRESSABLE (param))
171 return false;
172
173 return true;
174 }
175
176 /* Checks whether the expression EXPR in stmt AT is independent of the
177 statement pointed to by GSI (in a sense that we already know EXPR's value
178 at GSI). We use the fact that we are only called from the chain of
179 basic blocks that have only single successor. Returns the expression
180 containing the value of EXPR at GSI. */
181
182 static tree
183 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
184 {
185 basic_block bb, call_bb, at_bb;
186 edge e;
187 edge_iterator ei;
188
189 if (is_gimple_min_invariant (expr))
190 return expr;
191
192 if (TREE_CODE (expr) != SSA_NAME)
193 return NULL_TREE;
194
195 /* Mark the blocks in the chain leading to the end. */
196 at_bb = gimple_bb (at);
197 call_bb = gimple_bb (gsi_stmt (gsi));
198 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
199 bb->aux = &bb->aux;
200 bb->aux = &bb->aux;
201
202 while (1)
203 {
204 at = SSA_NAME_DEF_STMT (expr);
205 bb = gimple_bb (at);
206
207 /* The default definition or defined before the chain. */
208 if (!bb || !bb->aux)
209 break;
210
211 if (bb == call_bb)
212 {
213 for (; !gsi_end_p (gsi); gsi_next (&gsi))
214 if (gsi_stmt (gsi) == at)
215 break;
216
217 if (!gsi_end_p (gsi))
218 expr = NULL_TREE;
219 break;
220 }
221
222 if (gimple_code (at) != GIMPLE_PHI)
223 {
224 expr = NULL_TREE;
225 break;
226 }
227
228 FOR_EACH_EDGE (e, ei, bb->preds)
229 if (e->src->aux)
230 break;
231 gcc_assert (e);
232
233 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
234 if (TREE_CODE (expr) != SSA_NAME)
235 {
236 /* The value is a constant. */
237 break;
238 }
239 }
240
241 /* Unmark the blocks. */
242 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
243 bb->aux = NULL;
244 bb->aux = NULL;
245
246 return expr;
247 }
248
249 /* Simulates the effect of an assignment STMT on the return value of the tail
250 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
251 additive factor for the real return value. */
252
253 static bool
254 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
255 tree *a, tree *ass_var)
256 {
257 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
258 tree dest = gimple_assign_lhs (stmt);
259 enum tree_code code = gimple_assign_rhs_code (stmt);
260 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
261 tree src_var = gimple_assign_rhs1 (stmt);
262
263 /* See if this is a simple copy operation of an SSA name to the function
264 result. In that case we may have a simple tail call. Ignore type
265 conversions that can never produce extra code between the function
266 call and the function return. */
267 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
268 && (TREE_CODE (src_var) == SSA_NAME))
269 {
270 /* Reject a tailcall if the type conversion might need
271 additional code. */
272 if (gimple_assign_cast_p (stmt)
273 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
274 return false;
275
276 if (src_var != *ass_var)
277 return false;
278
279 *ass_var = dest;
280 return true;
281 }
282
283 switch (rhs_class)
284 {
285 case GIMPLE_BINARY_RHS:
286 op1 = gimple_assign_rhs2 (stmt);
287
288 /* Fall through. */
289
290 case GIMPLE_UNARY_RHS:
291 op0 = gimple_assign_rhs1 (stmt);
292 break;
293
294 default:
295 return false;
296 }
297
298 /* Accumulator optimizations will reverse the order of operations.
299 We can only do that for floating-point types if we're assuming
300 that addition and multiplication are associative. */
301 if (!flag_associative_math)
302 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
303 return false;
304
305 if (rhs_class == GIMPLE_UNARY_RHS)
306 ;
307 else if (op0 == *ass_var
308 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
309 ;
310 else if (op1 == *ass_var
311 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
312 ;
313 else
314 return false;
315
316 switch (code)
317 {
318 case PLUS_EXPR:
319 *a = non_ass_var;
320 *ass_var = dest;
321 return true;
322
323 case MULT_EXPR:
324 *m = non_ass_var;
325 *ass_var = dest;
326 return true;
327
328 case NEGATE_EXPR:
329 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
330 *m = build_real (TREE_TYPE (op0), dconstm1);
331 else
332 *m = build_int_cst (TREE_TYPE (op0), -1);
333
334 *ass_var = dest;
335 return true;
336
337 case MINUS_EXPR:
338 if (*ass_var == op0)
339 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
340 else
341 {
342 if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
343 *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
344 else
345 *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
346
347 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
348 }
349
350 *ass_var = dest;
351 return true;
352
353 /* TODO -- Handle POINTER_PLUS_EXPR. */
354
355 default:
356 return false;
357 }
358 }
359
360 /* Propagate VAR through phis on edge E. */
361
362 static tree
363 propagate_through_phis (tree var, edge e)
364 {
365 basic_block dest = e->dest;
366 gimple_stmt_iterator gsi;
367
368 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
369 {
370 gimple phi = gsi_stmt (gsi);
371 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
372 return PHI_RESULT (phi);
373 }
374 return var;
375 }
376
377 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
378 added to the start of RET. */
379
380 static void
381 find_tail_calls (basic_block bb, struct tailcall **ret)
382 {
383 tree ass_var = NULL_TREE, ret_var, func, param;
384 gimple stmt, call = NULL;
385 gimple_stmt_iterator gsi, agsi;
386 bool tail_recursion;
387 struct tailcall *nw;
388 edge e;
389 tree m, a;
390 basic_block abb;
391 size_t idx;
392 tree var;
393 referenced_var_iterator rvi;
394
395 if (!single_succ_p (bb))
396 return;
397
398 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
399 {
400 stmt = gsi_stmt (gsi);
401
402 /* Ignore labels, returns and debug stmts. */
403 if (gimple_code (stmt) == GIMPLE_LABEL
404 || gimple_code (stmt) == GIMPLE_RETURN
405 || is_gimple_debug (stmt))
406 continue;
407
408 /* Check for a call. */
409 if (is_gimple_call (stmt))
410 {
411 call = stmt;
412 ass_var = gimple_call_lhs (stmt);
413 break;
414 }
415
416 /* If the statement references memory or volatile operands, fail. */
417 if (gimple_references_memory_p (stmt)
418 || gimple_has_volatile_ops (stmt))
419 return;
420 }
421
422 if (gsi_end_p (gsi))
423 {
424 edge_iterator ei;
425 /* Recurse to the predecessors. */
426 FOR_EACH_EDGE (e, ei, bb->preds)
427 find_tail_calls (e->src, ret);
428
429 return;
430 }
431
432 /* If the LHS of our call is not just a simple register, we can't
433 transform this into a tail or sibling call. This situation happens,
434 in (e.g.) "*p = foo()" where foo returns a struct. In this case
435 we won't have a temporary here, but we need to carry out the side
436 effect anyway, so tailcall is impossible.
437
438 ??? In some situations (when the struct is returned in memory via
439 invisible argument) we could deal with this, e.g. by passing 'p'
440 itself as that argument to foo, but it's too early to do this here,
441 and expand_call() will not handle it anyway. If it ever can, then
442 we need to revisit this here, to allow that situation. */
443 if (ass_var && !is_gimple_reg (ass_var))
444 return;
445
446 /* We found the call, check whether it is suitable. */
447 tail_recursion = false;
448 func = gimple_call_fndecl (call);
449 if (func == current_function_decl)
450 {
451 tree arg;
452
453 for (param = DECL_ARGUMENTS (func), idx = 0;
454 param && idx < gimple_call_num_args (call);
455 param = DECL_CHAIN (param), idx ++)
456 {
457 arg = gimple_call_arg (call, idx);
458 if (param != arg)
459 {
460 /* Make sure there are no problems with copying. The parameter
461 have a copyable type and the two arguments must have reasonably
462 equivalent types. The latter requirement could be relaxed if
463 we emitted a suitable type conversion statement. */
464 if (!is_gimple_reg_type (TREE_TYPE (param))
465 || !useless_type_conversion_p (TREE_TYPE (param),
466 TREE_TYPE (arg)))
467 break;
468
469 /* The parameter should be a real operand, so that phi node
470 created for it at the start of the function has the meaning
471 of copying the value. This test implies is_gimple_reg_type
472 from the previous condition, however this one could be
473 relaxed by being more careful with copying the new value
474 of the parameter (emitting appropriate GIMPLE_ASSIGN and
475 updating the virtual operands). */
476 if (!is_gimple_reg (param))
477 break;
478 }
479 }
480 if (idx == gimple_call_num_args (call) && !param)
481 tail_recursion = true;
482 }
483
484 /* Make sure the tail invocation of this function does not refer
485 to local variables. */
486 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
487 {
488 if (TREE_CODE (var) != PARM_DECL
489 && auto_var_in_fn_p (var, cfun->decl)
490 && (ref_maybe_used_by_stmt_p (call, var)
491 || call_may_clobber_ref_p (call, var)))
492 return;
493 }
494
495 /* Now check the statements after the call. None of them has virtual
496 operands, so they may only depend on the call through its return
497 value. The return value should also be dependent on each of them,
498 since we are running after dce. */
499 m = NULL_TREE;
500 a = NULL_TREE;
501
502 abb = bb;
503 agsi = gsi;
504 while (1)
505 {
506 tree tmp_a = NULL_TREE;
507 tree tmp_m = NULL_TREE;
508 gsi_next (&agsi);
509
510 while (gsi_end_p (agsi))
511 {
512 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
513 abb = single_succ (abb);
514 agsi = gsi_start_bb (abb);
515 }
516
517 stmt = gsi_stmt (agsi);
518
519 if (gimple_code (stmt) == GIMPLE_LABEL)
520 continue;
521
522 if (gimple_code (stmt) == GIMPLE_RETURN)
523 break;
524
525 if (is_gimple_debug (stmt))
526 continue;
527
528 if (gimple_code (stmt) != GIMPLE_ASSIGN)
529 return;
530
531 /* This is a gimple assign. */
532 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
533 return;
534
535 if (tmp_a)
536 {
537 tree type = TREE_TYPE (tmp_a);
538 if (a)
539 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
540 else
541 a = tmp_a;
542 }
543 if (tmp_m)
544 {
545 tree type = TREE_TYPE (tmp_m);
546 if (m)
547 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
548 else
549 m = tmp_m;
550
551 if (a)
552 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
553 }
554 }
555
556 /* See if this is a tail call we can handle. */
557 ret_var = gimple_return_retval (stmt);
558
559 /* We may proceed if there either is no return value, or the return value
560 is identical to the call's return. */
561 if (ret_var
562 && (ret_var != ass_var))
563 return;
564
565 /* If this is not a tail recursive call, we cannot handle addends or
566 multiplicands. */
567 if (!tail_recursion && (m || a))
568 return;
569
570 nw = XNEW (struct tailcall);
571
572 nw->call_gsi = gsi;
573
574 nw->tail_recursion = tail_recursion;
575
576 nw->mult = m;
577 nw->add = a;
578
579 nw->next = *ret;
580 *ret = nw;
581 }
582
583 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
584
585 static void
586 add_successor_phi_arg (edge e, tree var, tree phi_arg)
587 {
588 gimple_stmt_iterator gsi;
589
590 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
591 if (PHI_RESULT (gsi_stmt (gsi)) == var)
592 break;
593
594 gcc_assert (!gsi_end_p (gsi));
595 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
596 }
597
598 /* Creates a GIMPLE statement which computes the operation specified by
599 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
600 statement in the position specified by GSI and UPDATE. Returns the
601 tree node of the statement's result. */
602
603 static tree
604 adjust_return_value_with_ops (enum tree_code code, const char *label,
605 tree acc, tree op1, gimple_stmt_iterator gsi)
606 {
607
608 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
609 tree tmp = create_tmp_reg (ret_type, label);
610 gimple stmt;
611 tree result;
612
613 add_referenced_var (tmp);
614
615 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
616 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
617 else
618 {
619 tree rhs = fold_convert (TREE_TYPE (acc),
620 fold_build2 (code,
621 TREE_TYPE (op1),
622 fold_convert (TREE_TYPE (op1), acc),
623 op1));
624 rhs = force_gimple_operand_gsi (&gsi, rhs,
625 false, NULL, true, GSI_CONTINUE_LINKING);
626 stmt = gimple_build_assign (NULL_TREE, rhs);
627 }
628
629 result = make_ssa_name (tmp, stmt);
630 gimple_assign_set_lhs (stmt, result);
631 update_stmt (stmt);
632 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
633 return result;
634 }
635
636 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
637 the computation specified by CODE and OP1 and insert the statement
638 at the position specified by GSI as a new statement. Returns new SSA name
639 of updated accumulator. */
640
641 static tree
642 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
643 gimple_stmt_iterator gsi)
644 {
645 gimple stmt;
646 tree var;
647 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
648 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
649 else
650 {
651 tree rhs = fold_convert (TREE_TYPE (acc),
652 fold_build2 (code,
653 TREE_TYPE (op1),
654 fold_convert (TREE_TYPE (op1), acc),
655 op1));
656 rhs = force_gimple_operand_gsi (&gsi, rhs,
657 false, NULL, false, GSI_CONTINUE_LINKING);
658 stmt = gimple_build_assign (NULL_TREE, rhs);
659 }
660 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
661 gimple_assign_set_lhs (stmt, var);
662 update_stmt (stmt);
663 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
664 return var;
665 }
666
667 /* Adjust the accumulator values according to A and M after GSI, and update
668 the phi nodes on edge BACK. */
669
670 static void
671 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
672 {
673 tree var, a_acc_arg, m_acc_arg;
674
675 if (m)
676 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
677 if (a)
678 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
679
680 a_acc_arg = a_acc;
681 m_acc_arg = m_acc;
682 if (a)
683 {
684 if (m_acc)
685 {
686 if (integer_onep (a))
687 var = m_acc;
688 else
689 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
690 a, gsi);
691 }
692 else
693 var = a;
694
695 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
696 }
697
698 if (m)
699 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
700
701 if (a_acc)
702 add_successor_phi_arg (back, a_acc, a_acc_arg);
703
704 if (m_acc)
705 add_successor_phi_arg (back, m_acc, m_acc_arg);
706 }
707
708 /* Adjust value of the return at the end of BB according to M and A
709 accumulators. */
710
711 static void
712 adjust_return_value (basic_block bb, tree m, tree a)
713 {
714 tree retval;
715 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
716 gimple_stmt_iterator gsi = gsi_last_bb (bb);
717
718 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
719
720 retval = gimple_return_retval (ret_stmt);
721 if (!retval || retval == error_mark_node)
722 return;
723
724 if (m)
725 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
726 gsi);
727 if (a)
728 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
729 gsi);
730 gimple_return_set_retval (ret_stmt, retval);
731 update_stmt (ret_stmt);
732 }
733
734 /* Subtract COUNT and FREQUENCY from the basic block and it's
735 outgoing edge. */
736 static void
737 decrease_profile (basic_block bb, gcov_type count, int frequency)
738 {
739 edge e;
740 bb->count -= count;
741 if (bb->count < 0)
742 bb->count = 0;
743 bb->frequency -= frequency;
744 if (bb->frequency < 0)
745 bb->frequency = 0;
746 if (!single_succ_p (bb))
747 {
748 gcc_assert (!EDGE_COUNT (bb->succs));
749 return;
750 }
751 e = single_succ_edge (bb);
752 e->count -= count;
753 if (e->count < 0)
754 e->count = 0;
755 }
756
757 /* Returns true if argument PARAM of the tail recursive call needs to be copied
758 when the call is eliminated. */
759
760 static bool
761 arg_needs_copy_p (tree param)
762 {
763 tree def;
764
765 if (!is_gimple_reg (param) || !var_ann (param))
766 return false;
767
768 /* Parameters that are only defined but never used need not be copied. */
769 def = gimple_default_def (cfun, param);
770 if (!def)
771 return false;
772
773 return true;
774 }
775
776 /* Eliminates tail call described by T. TMP_VARS is a list of
777 temporary variables used to copy the function arguments. */
778
779 static void
780 eliminate_tail_call (struct tailcall *t)
781 {
782 tree param, rslt;
783 gimple stmt, call;
784 tree arg;
785 size_t idx;
786 basic_block bb, first;
787 edge e;
788 gimple phi;
789 gimple_stmt_iterator gsi;
790 gimple orig_stmt;
791
792 stmt = orig_stmt = gsi_stmt (t->call_gsi);
793 bb = gsi_bb (t->call_gsi);
794
795 if (dump_file && (dump_flags & TDF_DETAILS))
796 {
797 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
798 bb->index);
799 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
800 fprintf (dump_file, "\n");
801 }
802
803 gcc_assert (is_gimple_call (stmt));
804
805 first = single_succ (ENTRY_BLOCK_PTR);
806
807 /* Remove the code after call_gsi that will become unreachable. The
808 possibly unreachable code in other blocks is removed later in
809 cfg cleanup. */
810 gsi = t->call_gsi;
811 gsi_next (&gsi);
812 while (!gsi_end_p (gsi))
813 {
814 gimple t = gsi_stmt (gsi);
815 /* Do not remove the return statement, so that redirect_edge_and_branch
816 sees how the block ends. */
817 if (gimple_code (t) == GIMPLE_RETURN)
818 break;
819
820 gsi_remove (&gsi, true);
821 release_defs (t);
822 }
823
824 /* Number of executions of function has reduced by the tailcall. */
825 e = single_succ_edge (gsi_bb (t->call_gsi));
826 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
827 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
828 if (e->dest != EXIT_BLOCK_PTR)
829 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
830
831 /* Replace the call by a jump to the start of function. */
832 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
833 first);
834 gcc_assert (e);
835 PENDING_STMT (e) = NULL;
836
837 /* Add phi node entries for arguments. The ordering of the phi nodes should
838 be the same as the ordering of the arguments. */
839 for (param = DECL_ARGUMENTS (current_function_decl),
840 idx = 0, gsi = gsi_start_phis (first);
841 param;
842 param = DECL_CHAIN (param), idx++)
843 {
844 if (!arg_needs_copy_p (param))
845 continue;
846
847 arg = gimple_call_arg (stmt, idx);
848 phi = gsi_stmt (gsi);
849 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
850
851 add_phi_arg (phi, arg, e, gimple_location (stmt));
852 gsi_next (&gsi);
853 }
854
855 /* Update the values of accumulators. */
856 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
857
858 call = gsi_stmt (t->call_gsi);
859 rslt = gimple_call_lhs (call);
860 if (rslt != NULL_TREE)
861 {
862 /* Result of the call will no longer be defined. So adjust the
863 SSA_NAME_DEF_STMT accordingly. */
864 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
865 }
866
867 gsi_remove (&t->call_gsi, true);
868 release_defs (call);
869 }
870
871 /* Add phi nodes for the virtual operands defined in the function to the
872 header of the loop created by tail recursion elimination.
873
874 Originally, we used to add phi nodes only for call clobbered variables,
875 as the value of the non-call clobbered ones obviously cannot be used
876 or changed within the recursive call. However, the local variables
877 from multiple calls now share the same location, so the virtual ssa form
878 requires us to say that the location dies on further iterations of the loop,
879 which requires adding phi nodes.
880 */
881 static void
882 add_virtual_phis (void)
883 {
884 referenced_var_iterator rvi;
885 tree var;
886
887 /* The problematic part is that there is no way how to know what
888 to put into phi nodes (there in fact does not have to be such
889 ssa name available). A solution would be to have an artificial
890 use/kill for all virtual operands in EXIT node. Unless we have
891 this, we cannot do much better than to rebuild the ssa form for
892 possibly affected virtual ssa names from scratch. */
893
894 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
895 {
896 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
897 mark_sym_for_renaming (var);
898 }
899 }
900
901 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
902 mark the tailcalls for the sibcall optimization. */
903
904 static bool
905 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
906 {
907 if (t->tail_recursion)
908 {
909 eliminate_tail_call (t);
910 return true;
911 }
912
913 if (opt_tailcalls)
914 {
915 gimple stmt = gsi_stmt (t->call_gsi);
916
917 gimple_call_set_tail (stmt, true);
918 if (dump_file && (dump_flags & TDF_DETAILS))
919 {
920 fprintf (dump_file, "Found tail call ");
921 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
922 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
923 }
924 }
925
926 return false;
927 }
928
929 /* Creates a tail-call accumulator of the same type as the return type of the
930 current function. LABEL is the name used to creating the temporary
931 variable for the accumulator. The accumulator will be inserted in the
932 phis of a basic block BB with single predecessor with an initial value
933 INIT converted to the current function return type. */
934
935 static tree
936 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
937 {
938 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
939 tree tmp = create_tmp_reg (ret_type, label);
940 gimple phi;
941
942 add_referenced_var (tmp);
943 phi = create_phi_node (tmp, bb);
944 /* RET_TYPE can be a float when -ffast-maths is enabled. */
945 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
946 UNKNOWN_LOCATION);
947 return PHI_RESULT (phi);
948 }
949
950 /* Optimizes tail calls in the function, turning the tail recursion
951 into iteration. */
952
953 static unsigned int
954 tree_optimize_tail_calls_1 (bool opt_tailcalls)
955 {
956 edge e;
957 bool phis_constructed = false;
958 struct tailcall *tailcalls = NULL, *act, *next;
959 bool changed = false;
960 basic_block first = single_succ (ENTRY_BLOCK_PTR);
961 tree param;
962 gimple stmt;
963 edge_iterator ei;
964
965 if (!suitable_for_tail_opt_p ())
966 return 0;
967 if (opt_tailcalls)
968 opt_tailcalls = suitable_for_tail_call_opt_p ();
969
970 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
971 {
972 /* Only traverse the normal exits, i.e. those that end with return
973 statement. */
974 stmt = last_stmt (e->src);
975
976 if (stmt
977 && gimple_code (stmt) == GIMPLE_RETURN)
978 find_tail_calls (e->src, &tailcalls);
979 }
980
981 /* Construct the phi nodes and accumulators if necessary. */
982 a_acc = m_acc = NULL_TREE;
983 for (act = tailcalls; act; act = act->next)
984 {
985 if (!act->tail_recursion)
986 continue;
987
988 if (!phis_constructed)
989 {
990 /* Ensure that there is only one predecessor of the block
991 or if there are existing degenerate PHI nodes. */
992 if (!single_pred_p (first)
993 || !gimple_seq_empty_p (phi_nodes (first)))
994 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
995
996 /* Copy the args if needed. */
997 for (param = DECL_ARGUMENTS (current_function_decl);
998 param;
999 param = DECL_CHAIN (param))
1000 if (arg_needs_copy_p (param))
1001 {
1002 tree name = gimple_default_def (cfun, param);
1003 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1004 gimple phi;
1005
1006 set_default_def (param, new_name);
1007 phi = create_phi_node (name, first);
1008 SSA_NAME_DEF_STMT (name) = phi;
1009 add_phi_arg (phi, new_name, single_pred_edge (first),
1010 EXPR_LOCATION (param));
1011 }
1012 phis_constructed = true;
1013 }
1014
1015 if (act->add && !a_acc)
1016 a_acc = create_tailcall_accumulator ("add_acc", first,
1017 integer_zero_node);
1018
1019 if (act->mult && !m_acc)
1020 m_acc = create_tailcall_accumulator ("mult_acc", first,
1021 integer_one_node);
1022 }
1023
1024 if (a_acc || m_acc)
1025 {
1026 /* When the tail call elimination using accumulators is performed,
1027 statements adding the accumulated value are inserted at all exits.
1028 This turns all other tail calls to non-tail ones. */
1029 opt_tailcalls = false;
1030 }
1031
1032 for (; tailcalls; tailcalls = next)
1033 {
1034 next = tailcalls->next;
1035 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1036 free (tailcalls);
1037 }
1038
1039 if (a_acc || m_acc)
1040 {
1041 /* Modify the remaining return statements. */
1042 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1043 {
1044 stmt = last_stmt (e->src);
1045
1046 if (stmt
1047 && gimple_code (stmt) == GIMPLE_RETURN)
1048 adjust_return_value (e->src, m_acc, a_acc);
1049 }
1050 }
1051
1052 if (changed)
1053 free_dominance_info (CDI_DOMINATORS);
1054
1055 if (phis_constructed)
1056 add_virtual_phis ();
1057 if (changed)
1058 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1059 return 0;
1060 }
1061
1062 static unsigned int
1063 execute_tail_recursion (void)
1064 {
1065 return tree_optimize_tail_calls_1 (false);
1066 }
1067
1068 static bool
1069 gate_tail_calls (void)
1070 {
1071 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1072 }
1073
1074 static unsigned int
1075 execute_tail_calls (void)
1076 {
1077 return tree_optimize_tail_calls_1 (true);
1078 }
1079
1080 struct gimple_opt_pass pass_tail_recursion =
1081 {
1082 {
1083 GIMPLE_PASS,
1084 "tailr", /* name */
1085 gate_tail_calls, /* gate */
1086 execute_tail_recursion, /* execute */
1087 NULL, /* sub */
1088 NULL, /* next */
1089 0, /* static_pass_number */
1090 TV_NONE, /* tv_id */
1091 PROP_cfg | PROP_ssa, /* properties_required */
1092 0, /* properties_provided */
1093 0, /* properties_destroyed */
1094 0, /* todo_flags_start */
1095 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1096 }
1097 };
1098
1099 struct gimple_opt_pass pass_tail_calls =
1100 {
1101 {
1102 GIMPLE_PASS,
1103 "tailc", /* name */
1104 gate_tail_calls, /* gate */
1105 execute_tail_calls, /* execute */
1106 NULL, /* sub */
1107 NULL, /* next */
1108 0, /* static_pass_number */
1109 TV_NONE, /* tv_id */
1110 PROP_cfg | PROP_ssa, /* properties_required */
1111 0, /* properties_provided */
1112 0, /* properties_destroyed */
1113 0, /* todo_flags_start */
1114 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1115 }
1116 };