[ARM/AArch64][testsuite] Add vmull tests.
[gcc.git] / gcc / tree-tailcall.c
1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "tm_p.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-ssa.h"
51 #include "tree-cfg.h"
52 #include "tree-phinodes.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-into-ssa.h"
56 #include "hashtab.h"
57 #include "rtl.h"
58 #include "flags.h"
59 #include "statistics.h"
60 #include "real.h"
61 #include "fixed-value.h"
62 #include "insn-config.h"
63 #include "expmed.h"
64 #include "dojump.h"
65 #include "explow.h"
66 #include "calls.h"
67 #include "emit-rtl.h"
68 #include "varasm.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "tree-dfa.h"
72 #include "gimple-pretty-print.h"
73 #include "except.h"
74 #include "tree-pass.h"
75 #include "langhooks.h"
76 #include "dbgcnt.h"
77 #include "target.h"
78 #include "cfgloop.h"
79 #include "common/common-target.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "ipa-utils.h"
85
86 /* The file implements the tail recursion elimination. It is also used to
87 analyze the tail calls in general, passing the results to the rtl level
88 where they are used for sibcall optimization.
89
90 In addition to the standard tail recursion elimination, we handle the most
91 trivial cases of making the call tail recursive by creating accumulators.
92 For example the following function
93
94 int sum (int n)
95 {
96 if (n > 0)
97 return n + sum (n - 1);
98 else
99 return 0;
100 }
101
102 is transformed into
103
104 int sum (int n)
105 {
106 int acc = 0;
107
108 while (n > 0)
109 acc += n--;
110
111 return acc;
112 }
113
114 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
115 when we reach the return x statement, we should return a_acc + x * m_acc
116 instead. They are initially initialized to 0 and 1, respectively,
117 so the semantics of the function is obviously preserved. If we are
118 guaranteed that the value of the accumulator never change, we
119 omit the accumulator.
120
121 There are three cases how the function may exit. The first one is
122 handled in adjust_return_value, the other two in adjust_accumulator_values
123 (the second case is actually a special case of the third one and we
124 present it separately just for clarity):
125
126 1) Just return x, where x is not in any of the remaining special shapes.
127 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
128
129 2) return f (...), where f is the current function, is rewritten in a
130 classical tail-recursion elimination way, into assignment of arguments
131 and jump to the start of the function. Values of the accumulators
132 are unchanged.
133
134 3) return a + m * f(...), where a and m do not depend on call to f.
135 To preserve the semantics described before we want this to be rewritten
136 in such a way that we finally return
137
138 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
139
140 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
141 eliminate the tail call to f. Special cases when the value is just
142 added or just multiplied are obtained by setting a = 0 or m = 1.
143
144 TODO -- it is possible to do similar tricks for other operations. */
145
146 /* A structure that describes the tailcall. */
147
148 struct tailcall
149 {
150 /* The iterator pointing to the call statement. */
151 gimple_stmt_iterator call_gsi;
152
153 /* True if it is a call to the current function. */
154 bool tail_recursion;
155
156 /* The return value of the caller is mult * f + add, where f is the return
157 value of the call. */
158 tree mult, add;
159
160 /* Next tailcall in the chain. */
161 struct tailcall *next;
162 };
163
164 /* The variables holding the value of multiplicative and additive
165 accumulator. */
166 static tree m_acc, a_acc;
167
168 static bool suitable_for_tail_opt_p (void);
169 static bool optimize_tail_call (struct tailcall *, bool);
170 static void eliminate_tail_call (struct tailcall *);
171 static void find_tail_calls (basic_block, struct tailcall **);
172
173 /* Returns false when the function is not suitable for tail call optimization
174 from some reason (e.g. if it takes variable number of arguments). */
175
176 static bool
177 suitable_for_tail_opt_p (void)
178 {
179 if (cfun->stdarg)
180 return false;
181
182 return true;
183 }
184 /* Returns false when the function is not suitable for tail call optimization
185 from some reason (e.g. if it takes variable number of arguments).
186 This test must pass in addition to suitable_for_tail_opt_p in order to make
187 tail call discovery happen. */
188
189 static bool
190 suitable_for_tail_call_opt_p (void)
191 {
192 tree param;
193
194 /* alloca (until we have stack slot life analysis) inhibits
195 sibling call optimizations, but not tail recursion. */
196 if (cfun->calls_alloca)
197 return false;
198
199 /* If we are using sjlj exceptions, we may need to add a call to
200 _Unwind_SjLj_Unregister at exit of the function. Which means
201 that we cannot do any sibcall transformations. */
202 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
203 && current_function_has_exception_handlers ())
204 return false;
205
206 /* Any function that calls setjmp might have longjmp called from
207 any called function. ??? We really should represent this
208 properly in the CFG so that this needn't be special cased. */
209 if (cfun->calls_setjmp)
210 return false;
211
212 /* ??? It is OK if the argument of a function is taken in some cases,
213 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
214 for (param = DECL_ARGUMENTS (current_function_decl);
215 param;
216 param = DECL_CHAIN (param))
217 if (TREE_ADDRESSABLE (param))
218 return false;
219
220 return true;
221 }
222
223 /* Checks whether the expression EXPR in stmt AT is independent of the
224 statement pointed to by GSI (in a sense that we already know EXPR's value
225 at GSI). We use the fact that we are only called from the chain of
226 basic blocks that have only single successor. Returns the expression
227 containing the value of EXPR at GSI. */
228
229 static tree
230 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
231 {
232 basic_block bb, call_bb, at_bb;
233 edge e;
234 edge_iterator ei;
235
236 if (is_gimple_min_invariant (expr))
237 return expr;
238
239 if (TREE_CODE (expr) != SSA_NAME)
240 return NULL_TREE;
241
242 /* Mark the blocks in the chain leading to the end. */
243 at_bb = gimple_bb (at);
244 call_bb = gimple_bb (gsi_stmt (gsi));
245 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
246 bb->aux = &bb->aux;
247 bb->aux = &bb->aux;
248
249 while (1)
250 {
251 at = SSA_NAME_DEF_STMT (expr);
252 bb = gimple_bb (at);
253
254 /* The default definition or defined before the chain. */
255 if (!bb || !bb->aux)
256 break;
257
258 if (bb == call_bb)
259 {
260 for (; !gsi_end_p (gsi); gsi_next (&gsi))
261 if (gsi_stmt (gsi) == at)
262 break;
263
264 if (!gsi_end_p (gsi))
265 expr = NULL_TREE;
266 break;
267 }
268
269 if (gimple_code (at) != GIMPLE_PHI)
270 {
271 expr = NULL_TREE;
272 break;
273 }
274
275 FOR_EACH_EDGE (e, ei, bb->preds)
276 if (e->src->aux)
277 break;
278 gcc_assert (e);
279
280 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
281 if (TREE_CODE (expr) != SSA_NAME)
282 {
283 /* The value is a constant. */
284 break;
285 }
286 }
287
288 /* Unmark the blocks. */
289 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
290 bb->aux = NULL;
291 bb->aux = NULL;
292
293 return expr;
294 }
295
296 /* Simulates the effect of an assignment STMT on the return value of the tail
297 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
298 additive factor for the real return value. */
299
300 static bool
301 process_assignment (gassign *stmt, gimple_stmt_iterator call, tree *m,
302 tree *a, tree *ass_var)
303 {
304 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
305 tree dest = gimple_assign_lhs (stmt);
306 enum tree_code code = gimple_assign_rhs_code (stmt);
307 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
308 tree src_var = gimple_assign_rhs1 (stmt);
309
310 /* See if this is a simple copy operation of an SSA name to the function
311 result. In that case we may have a simple tail call. Ignore type
312 conversions that can never produce extra code between the function
313 call and the function return. */
314 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
315 && (TREE_CODE (src_var) == SSA_NAME))
316 {
317 /* Reject a tailcall if the type conversion might need
318 additional code. */
319 if (gimple_assign_cast_p (stmt))
320 {
321 if (TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
322 return false;
323
324 /* Even if the type modes are the same, if the precision of the
325 type is smaller than mode's precision,
326 reduce_to_bit_field_precision would generate additional code. */
327 if (INTEGRAL_TYPE_P (TREE_TYPE (dest))
328 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (dest)))
329 > TYPE_PRECISION (TREE_TYPE (dest))))
330 return false;
331 }
332
333 if (src_var != *ass_var)
334 return false;
335
336 *ass_var = dest;
337 return true;
338 }
339
340 switch (rhs_class)
341 {
342 case GIMPLE_BINARY_RHS:
343 op1 = gimple_assign_rhs2 (stmt);
344
345 /* Fall through. */
346
347 case GIMPLE_UNARY_RHS:
348 op0 = gimple_assign_rhs1 (stmt);
349 break;
350
351 default:
352 return false;
353 }
354
355 /* Accumulator optimizations will reverse the order of operations.
356 We can only do that for floating-point types if we're assuming
357 that addition and multiplication are associative. */
358 if (!flag_associative_math)
359 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
360 return false;
361
362 if (rhs_class == GIMPLE_UNARY_RHS)
363 ;
364 else if (op0 == *ass_var
365 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
366 ;
367 else if (op1 == *ass_var
368 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
369 ;
370 else
371 return false;
372
373 switch (code)
374 {
375 case PLUS_EXPR:
376 *a = non_ass_var;
377 *ass_var = dest;
378 return true;
379
380 case POINTER_PLUS_EXPR:
381 if (op0 != *ass_var)
382 return false;
383 *a = non_ass_var;
384 *ass_var = dest;
385 return true;
386
387 case MULT_EXPR:
388 *m = non_ass_var;
389 *ass_var = dest;
390 return true;
391
392 case NEGATE_EXPR:
393 *m = build_minus_one_cst (TREE_TYPE (op0));
394 *ass_var = dest;
395 return true;
396
397 case MINUS_EXPR:
398 if (*ass_var == op0)
399 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
400 else
401 {
402 *m = build_minus_one_cst (TREE_TYPE (non_ass_var));
403 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
404 }
405
406 *ass_var = dest;
407 return true;
408
409 /* TODO -- Handle POINTER_PLUS_EXPR. */
410
411 default:
412 return false;
413 }
414 }
415
416 /* Propagate VAR through phis on edge E. */
417
418 static tree
419 propagate_through_phis (tree var, edge e)
420 {
421 basic_block dest = e->dest;
422 gphi_iterator gsi;
423
424 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
425 {
426 gphi *phi = gsi.phi ();
427 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
428 return PHI_RESULT (phi);
429 }
430 return var;
431 }
432
433 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
434 added to the start of RET. */
435
436 static void
437 find_tail_calls (basic_block bb, struct tailcall **ret)
438 {
439 tree ass_var = NULL_TREE, ret_var, func, param;
440 gimple stmt;
441 gcall *call = NULL;
442 gimple_stmt_iterator gsi, agsi;
443 bool tail_recursion;
444 struct tailcall *nw;
445 edge e;
446 tree m, a;
447 basic_block abb;
448 size_t idx;
449 tree var;
450
451 if (!single_succ_p (bb))
452 return;
453
454 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
455 {
456 stmt = gsi_stmt (gsi);
457
458 /* Ignore labels, returns, clobbers and debug stmts. */
459 if (gimple_code (stmt) == GIMPLE_LABEL
460 || gimple_code (stmt) == GIMPLE_RETURN
461 || gimple_clobber_p (stmt)
462 || is_gimple_debug (stmt))
463 continue;
464
465 /* Check for a call. */
466 if (is_gimple_call (stmt))
467 {
468 call = as_a <gcall *> (stmt);
469 ass_var = gimple_call_lhs (call);
470 break;
471 }
472
473 /* If the statement references memory or volatile operands, fail. */
474 if (gimple_references_memory_p (stmt)
475 || gimple_has_volatile_ops (stmt))
476 return;
477 }
478
479 if (gsi_end_p (gsi))
480 {
481 edge_iterator ei;
482 /* Recurse to the predecessors. */
483 FOR_EACH_EDGE (e, ei, bb->preds)
484 find_tail_calls (e->src, ret);
485
486 return;
487 }
488
489 /* If the LHS of our call is not just a simple register, we can't
490 transform this into a tail or sibling call. This situation happens,
491 in (e.g.) "*p = foo()" where foo returns a struct. In this case
492 we won't have a temporary here, but we need to carry out the side
493 effect anyway, so tailcall is impossible.
494
495 ??? In some situations (when the struct is returned in memory via
496 invisible argument) we could deal with this, e.g. by passing 'p'
497 itself as that argument to foo, but it's too early to do this here,
498 and expand_call() will not handle it anyway. If it ever can, then
499 we need to revisit this here, to allow that situation. */
500 if (ass_var && !is_gimple_reg (ass_var))
501 return;
502
503 /* We found the call, check whether it is suitable. */
504 tail_recursion = false;
505 func = gimple_call_fndecl (call);
506 if (func
507 && !DECL_BUILT_IN (func)
508 && recursive_call_p (current_function_decl, func))
509 {
510 tree arg;
511
512 for (param = DECL_ARGUMENTS (func), idx = 0;
513 param && idx < gimple_call_num_args (call);
514 param = DECL_CHAIN (param), idx ++)
515 {
516 arg = gimple_call_arg (call, idx);
517 if (param != arg)
518 {
519 /* Make sure there are no problems with copying. The parameter
520 have a copyable type and the two arguments must have reasonably
521 equivalent types. The latter requirement could be relaxed if
522 we emitted a suitable type conversion statement. */
523 if (!is_gimple_reg_type (TREE_TYPE (param))
524 || !useless_type_conversion_p (TREE_TYPE (param),
525 TREE_TYPE (arg)))
526 break;
527
528 /* The parameter should be a real operand, so that phi node
529 created for it at the start of the function has the meaning
530 of copying the value. This test implies is_gimple_reg_type
531 from the previous condition, however this one could be
532 relaxed by being more careful with copying the new value
533 of the parameter (emitting appropriate GIMPLE_ASSIGN and
534 updating the virtual operands). */
535 if (!is_gimple_reg (param))
536 break;
537 }
538 }
539 if (idx == gimple_call_num_args (call) && !param)
540 tail_recursion = true;
541 }
542
543 /* Make sure the tail invocation of this function does not refer
544 to local variables. */
545 FOR_EACH_LOCAL_DECL (cfun, idx, var)
546 {
547 if (TREE_CODE (var) != PARM_DECL
548 && auto_var_in_fn_p (var, cfun->decl)
549 && (ref_maybe_used_by_stmt_p (call, var)
550 || call_may_clobber_ref_p (call, var)))
551 return;
552 }
553
554 /* Now check the statements after the call. None of them has virtual
555 operands, so they may only depend on the call through its return
556 value. The return value should also be dependent on each of them,
557 since we are running after dce. */
558 m = NULL_TREE;
559 a = NULL_TREE;
560
561 abb = bb;
562 agsi = gsi;
563 while (1)
564 {
565 tree tmp_a = NULL_TREE;
566 tree tmp_m = NULL_TREE;
567 gsi_next (&agsi);
568
569 while (gsi_end_p (agsi))
570 {
571 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
572 abb = single_succ (abb);
573 agsi = gsi_start_bb (abb);
574 }
575
576 stmt = gsi_stmt (agsi);
577
578 if (gimple_code (stmt) == GIMPLE_LABEL)
579 continue;
580
581 if (gimple_code (stmt) == GIMPLE_RETURN)
582 break;
583
584 if (gimple_clobber_p (stmt))
585 continue;
586
587 if (is_gimple_debug (stmt))
588 continue;
589
590 if (gimple_code (stmt) != GIMPLE_ASSIGN)
591 return;
592
593 /* This is a gimple assign. */
594 if (! process_assignment (as_a <gassign *> (stmt), gsi, &tmp_m,
595 &tmp_a, &ass_var))
596 return;
597
598 if (tmp_a)
599 {
600 tree type = TREE_TYPE (tmp_a);
601 if (a)
602 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
603 else
604 a = tmp_a;
605 }
606 if (tmp_m)
607 {
608 tree type = TREE_TYPE (tmp_m);
609 if (m)
610 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
611 else
612 m = tmp_m;
613
614 if (a)
615 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
616 }
617 }
618
619 /* See if this is a tail call we can handle. */
620 ret_var = gimple_return_retval (as_a <greturn *> (stmt));
621
622 /* We may proceed if there either is no return value, or the return value
623 is identical to the call's return. */
624 if (ret_var
625 && (ret_var != ass_var))
626 return;
627
628 /* If this is not a tail recursive call, we cannot handle addends or
629 multiplicands. */
630 if (!tail_recursion && (m || a))
631 return;
632
633 /* For pointers only allow additions. */
634 if (m && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
635 return;
636
637 nw = XNEW (struct tailcall);
638
639 nw->call_gsi = gsi;
640
641 nw->tail_recursion = tail_recursion;
642
643 nw->mult = m;
644 nw->add = a;
645
646 nw->next = *ret;
647 *ret = nw;
648 }
649
650 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
651
652 static void
653 add_successor_phi_arg (edge e, tree var, tree phi_arg)
654 {
655 gphi_iterator gsi;
656
657 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
658 if (PHI_RESULT (gsi.phi ()) == var)
659 break;
660
661 gcc_assert (!gsi_end_p (gsi));
662 add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
663 }
664
665 /* Creates a GIMPLE statement which computes the operation specified by
666 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
667 statement in the position specified by GSI. Returns the
668 tree node of the statement's result. */
669
670 static tree
671 adjust_return_value_with_ops (enum tree_code code, const char *label,
672 tree acc, tree op1, gimple_stmt_iterator gsi)
673 {
674
675 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
676 tree result = make_temp_ssa_name (ret_type, NULL, label);
677 gassign *stmt;
678
679 if (POINTER_TYPE_P (ret_type))
680 {
681 gcc_assert (code == PLUS_EXPR && TREE_TYPE (acc) == sizetype);
682 code = POINTER_PLUS_EXPR;
683 }
684 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1))
685 && code != POINTER_PLUS_EXPR)
686 stmt = gimple_build_assign (result, code, acc, op1);
687 else
688 {
689 tree tem;
690 if (code == POINTER_PLUS_EXPR)
691 tem = fold_build2 (code, TREE_TYPE (op1), op1, acc);
692 else
693 tem = fold_build2 (code, TREE_TYPE (op1),
694 fold_convert (TREE_TYPE (op1), acc), op1);
695 tree rhs = fold_convert (ret_type, tem);
696 rhs = force_gimple_operand_gsi (&gsi, rhs,
697 false, NULL, true, GSI_SAME_STMT);
698 stmt = gimple_build_assign (result, rhs);
699 }
700
701 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
702 return result;
703 }
704
705 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
706 the computation specified by CODE and OP1 and insert the statement
707 at the position specified by GSI as a new statement. Returns new SSA name
708 of updated accumulator. */
709
710 static tree
711 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
712 gimple_stmt_iterator gsi)
713 {
714 gassign *stmt;
715 tree var = copy_ssa_name (acc);
716 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
717 stmt = gimple_build_assign (var, code, acc, op1);
718 else
719 {
720 tree rhs = fold_convert (TREE_TYPE (acc),
721 fold_build2 (code,
722 TREE_TYPE (op1),
723 fold_convert (TREE_TYPE (op1), acc),
724 op1));
725 rhs = force_gimple_operand_gsi (&gsi, rhs,
726 false, NULL, false, GSI_CONTINUE_LINKING);
727 stmt = gimple_build_assign (var, rhs);
728 }
729 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
730 return var;
731 }
732
733 /* Adjust the accumulator values according to A and M after GSI, and update
734 the phi nodes on edge BACK. */
735
736 static void
737 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
738 {
739 tree var, a_acc_arg, m_acc_arg;
740
741 if (m)
742 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
743 if (a)
744 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
745
746 a_acc_arg = a_acc;
747 m_acc_arg = m_acc;
748 if (a)
749 {
750 if (m_acc)
751 {
752 if (integer_onep (a))
753 var = m_acc;
754 else
755 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
756 a, gsi);
757 }
758 else
759 var = a;
760
761 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
762 }
763
764 if (m)
765 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
766
767 if (a_acc)
768 add_successor_phi_arg (back, a_acc, a_acc_arg);
769
770 if (m_acc)
771 add_successor_phi_arg (back, m_acc, m_acc_arg);
772 }
773
774 /* Adjust value of the return at the end of BB according to M and A
775 accumulators. */
776
777 static void
778 adjust_return_value (basic_block bb, tree m, tree a)
779 {
780 tree retval;
781 greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb)));
782 gimple_stmt_iterator gsi = gsi_last_bb (bb);
783
784 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
785
786 retval = gimple_return_retval (ret_stmt);
787 if (!retval || retval == error_mark_node)
788 return;
789
790 if (m)
791 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
792 gsi);
793 if (a)
794 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
795 gsi);
796 gimple_return_set_retval (ret_stmt, retval);
797 update_stmt (ret_stmt);
798 }
799
800 /* Subtract COUNT and FREQUENCY from the basic block and it's
801 outgoing edge. */
802 static void
803 decrease_profile (basic_block bb, gcov_type count, int frequency)
804 {
805 edge e;
806 bb->count -= count;
807 if (bb->count < 0)
808 bb->count = 0;
809 bb->frequency -= frequency;
810 if (bb->frequency < 0)
811 bb->frequency = 0;
812 if (!single_succ_p (bb))
813 {
814 gcc_assert (!EDGE_COUNT (bb->succs));
815 return;
816 }
817 e = single_succ_edge (bb);
818 e->count -= count;
819 if (e->count < 0)
820 e->count = 0;
821 }
822
823 /* Returns true if argument PARAM of the tail recursive call needs to be copied
824 when the call is eliminated. */
825
826 static bool
827 arg_needs_copy_p (tree param)
828 {
829 tree def;
830
831 if (!is_gimple_reg (param))
832 return false;
833
834 /* Parameters that are only defined but never used need not be copied. */
835 def = ssa_default_def (cfun, param);
836 if (!def)
837 return false;
838
839 return true;
840 }
841
842 /* Eliminates tail call described by T. TMP_VARS is a list of
843 temporary variables used to copy the function arguments. */
844
845 static void
846 eliminate_tail_call (struct tailcall *t)
847 {
848 tree param, rslt;
849 gimple stmt, call;
850 tree arg;
851 size_t idx;
852 basic_block bb, first;
853 edge e;
854 gphi *phi;
855 gphi_iterator gpi;
856 gimple_stmt_iterator gsi;
857 gimple orig_stmt;
858
859 stmt = orig_stmt = gsi_stmt (t->call_gsi);
860 bb = gsi_bb (t->call_gsi);
861
862 if (dump_file && (dump_flags & TDF_DETAILS))
863 {
864 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
865 bb->index);
866 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
867 fprintf (dump_file, "\n");
868 }
869
870 gcc_assert (is_gimple_call (stmt));
871
872 first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
873
874 /* Remove the code after call_gsi that will become unreachable. The
875 possibly unreachable code in other blocks is removed later in
876 cfg cleanup. */
877 gsi = t->call_gsi;
878 gsi_next (&gsi);
879 while (!gsi_end_p (gsi))
880 {
881 gimple t = gsi_stmt (gsi);
882 /* Do not remove the return statement, so that redirect_edge_and_branch
883 sees how the block ends. */
884 if (gimple_code (t) == GIMPLE_RETURN)
885 break;
886
887 gsi_remove (&gsi, true);
888 release_defs (t);
889 }
890
891 /* Number of executions of function has reduced by the tailcall. */
892 e = single_succ_edge (gsi_bb (t->call_gsi));
893 decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e));
894 decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count,
895 EDGE_FREQUENCY (e));
896 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
897 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
898
899 /* Replace the call by a jump to the start of function. */
900 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
901 first);
902 gcc_assert (e);
903 PENDING_STMT (e) = NULL;
904
905 /* Add phi node entries for arguments. The ordering of the phi nodes should
906 be the same as the ordering of the arguments. */
907 for (param = DECL_ARGUMENTS (current_function_decl),
908 idx = 0, gpi = gsi_start_phis (first);
909 param;
910 param = DECL_CHAIN (param), idx++)
911 {
912 if (!arg_needs_copy_p (param))
913 continue;
914
915 arg = gimple_call_arg (stmt, idx);
916 phi = gpi.phi ();
917 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
918
919 add_phi_arg (phi, arg, e, gimple_location (stmt));
920 gsi_next (&gpi);
921 }
922
923 /* Update the values of accumulators. */
924 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
925
926 call = gsi_stmt (t->call_gsi);
927 rslt = gimple_call_lhs (call);
928 if (rslt != NULL_TREE)
929 {
930 /* Result of the call will no longer be defined. So adjust the
931 SSA_NAME_DEF_STMT accordingly. */
932 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
933 }
934
935 gsi_remove (&t->call_gsi, true);
936 release_defs (call);
937 }
938
939 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
940 mark the tailcalls for the sibcall optimization. */
941
942 static bool
943 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
944 {
945 if (t->tail_recursion)
946 {
947 eliminate_tail_call (t);
948 return true;
949 }
950
951 if (opt_tailcalls)
952 {
953 gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi));
954
955 gimple_call_set_tail (stmt, true);
956 cfun->tail_call_marked = true;
957 if (dump_file && (dump_flags & TDF_DETAILS))
958 {
959 fprintf (dump_file, "Found tail call ");
960 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
961 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
962 }
963 }
964
965 return false;
966 }
967
968 /* Creates a tail-call accumulator of the same type as the return type of the
969 current function. LABEL is the name used to creating the temporary
970 variable for the accumulator. The accumulator will be inserted in the
971 phis of a basic block BB with single predecessor with an initial value
972 INIT converted to the current function return type. */
973
974 static tree
975 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
976 {
977 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
978 if (POINTER_TYPE_P (ret_type))
979 ret_type = sizetype;
980
981 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
982 gphi *phi;
983
984 phi = create_phi_node (tmp, bb);
985 /* RET_TYPE can be a float when -ffast-maths is enabled. */
986 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
987 UNKNOWN_LOCATION);
988 return PHI_RESULT (phi);
989 }
990
991 /* Optimizes tail calls in the function, turning the tail recursion
992 into iteration. */
993
994 static unsigned int
995 tree_optimize_tail_calls_1 (bool opt_tailcalls)
996 {
997 edge e;
998 bool phis_constructed = false;
999 struct tailcall *tailcalls = NULL, *act, *next;
1000 bool changed = false;
1001 basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1002 tree param;
1003 gimple stmt;
1004 edge_iterator ei;
1005
1006 if (!suitable_for_tail_opt_p ())
1007 return 0;
1008 if (opt_tailcalls)
1009 opt_tailcalls = suitable_for_tail_call_opt_p ();
1010
1011 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1012 {
1013 /* Only traverse the normal exits, i.e. those that end with return
1014 statement. */
1015 stmt = last_stmt (e->src);
1016
1017 if (stmt
1018 && gimple_code (stmt) == GIMPLE_RETURN)
1019 find_tail_calls (e->src, &tailcalls);
1020 }
1021
1022 /* Construct the phi nodes and accumulators if necessary. */
1023 a_acc = m_acc = NULL_TREE;
1024 for (act = tailcalls; act; act = act->next)
1025 {
1026 if (!act->tail_recursion)
1027 continue;
1028
1029 if (!phis_constructed)
1030 {
1031 /* Ensure that there is only one predecessor of the block
1032 or if there are existing degenerate PHI nodes. */
1033 if (!single_pred_p (first)
1034 || !gimple_seq_empty_p (phi_nodes (first)))
1035 first =
1036 split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1037
1038 /* Copy the args if needed. */
1039 for (param = DECL_ARGUMENTS (current_function_decl);
1040 param;
1041 param = DECL_CHAIN (param))
1042 if (arg_needs_copy_p (param))
1043 {
1044 tree name = ssa_default_def (cfun, param);
1045 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1046 gphi *phi;
1047
1048 set_ssa_default_def (cfun, param, new_name);
1049 phi = create_phi_node (name, first);
1050 add_phi_arg (phi, new_name, single_pred_edge (first),
1051 EXPR_LOCATION (param));
1052 }
1053 phis_constructed = true;
1054 }
1055
1056 if (act->add && !a_acc)
1057 a_acc = create_tailcall_accumulator ("add_acc", first,
1058 integer_zero_node);
1059
1060 if (act->mult && !m_acc)
1061 m_acc = create_tailcall_accumulator ("mult_acc", first,
1062 integer_one_node);
1063 }
1064
1065 if (a_acc || m_acc)
1066 {
1067 /* When the tail call elimination using accumulators is performed,
1068 statements adding the accumulated value are inserted at all exits.
1069 This turns all other tail calls to non-tail ones. */
1070 opt_tailcalls = false;
1071 }
1072
1073 for (; tailcalls; tailcalls = next)
1074 {
1075 next = tailcalls->next;
1076 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1077 free (tailcalls);
1078 }
1079
1080 if (a_acc || m_acc)
1081 {
1082 /* Modify the remaining return statements. */
1083 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
1084 {
1085 stmt = last_stmt (e->src);
1086
1087 if (stmt
1088 && gimple_code (stmt) == GIMPLE_RETURN)
1089 adjust_return_value (e->src, m_acc, a_acc);
1090 }
1091 }
1092
1093 if (changed)
1094 {
1095 /* We may have created new loops. Make them magically appear. */
1096 loops_state_set (LOOPS_NEED_FIXUP);
1097 free_dominance_info (CDI_DOMINATORS);
1098 }
1099
1100 /* Add phi nodes for the virtual operands defined in the function to the
1101 header of the loop created by tail recursion elimination. Do so
1102 by triggering the SSA renamer. */
1103 if (phis_constructed)
1104 mark_virtual_operands_for_renaming (cfun);
1105
1106 if (changed)
1107 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1108 return 0;
1109 }
1110
1111 static bool
1112 gate_tail_calls (void)
1113 {
1114 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1115 }
1116
1117 static unsigned int
1118 execute_tail_calls (void)
1119 {
1120 return tree_optimize_tail_calls_1 (true);
1121 }
1122
1123 namespace {
1124
1125 const pass_data pass_data_tail_recursion =
1126 {
1127 GIMPLE_PASS, /* type */
1128 "tailr", /* name */
1129 OPTGROUP_NONE, /* optinfo_flags */
1130 TV_NONE, /* tv_id */
1131 ( PROP_cfg | PROP_ssa ), /* properties_required */
1132 0, /* properties_provided */
1133 0, /* properties_destroyed */
1134 0, /* todo_flags_start */
1135 0, /* todo_flags_finish */
1136 };
1137
1138 class pass_tail_recursion : public gimple_opt_pass
1139 {
1140 public:
1141 pass_tail_recursion (gcc::context *ctxt)
1142 : gimple_opt_pass (pass_data_tail_recursion, ctxt)
1143 {}
1144
1145 /* opt_pass methods: */
1146 opt_pass * clone () { return new pass_tail_recursion (m_ctxt); }
1147 virtual bool gate (function *) { return gate_tail_calls (); }
1148 virtual unsigned int execute (function *)
1149 {
1150 return tree_optimize_tail_calls_1 (false);
1151 }
1152
1153 }; // class pass_tail_recursion
1154
1155 } // anon namespace
1156
1157 gimple_opt_pass *
1158 make_pass_tail_recursion (gcc::context *ctxt)
1159 {
1160 return new pass_tail_recursion (ctxt);
1161 }
1162
1163 namespace {
1164
1165 const pass_data pass_data_tail_calls =
1166 {
1167 GIMPLE_PASS, /* type */
1168 "tailc", /* name */
1169 OPTGROUP_NONE, /* optinfo_flags */
1170 TV_NONE, /* tv_id */
1171 ( PROP_cfg | PROP_ssa ), /* properties_required */
1172 0, /* properties_provided */
1173 0, /* properties_destroyed */
1174 0, /* todo_flags_start */
1175 0, /* todo_flags_finish */
1176 };
1177
1178 class pass_tail_calls : public gimple_opt_pass
1179 {
1180 public:
1181 pass_tail_calls (gcc::context *ctxt)
1182 : gimple_opt_pass (pass_data_tail_calls, ctxt)
1183 {}
1184
1185 /* opt_pass methods: */
1186 virtual bool gate (function *) { return gate_tail_calls (); }
1187 virtual unsigned int execute (function *) { return execute_tail_calls (); }
1188
1189 }; // class pass_tail_calls
1190
1191 } // anon namespace
1192
1193 gimple_opt_pass *
1194 make_pass_tail_calls (gcc::context *ctxt)
1195 {
1196 return new pass_tail_calls (ctxt);
1197 }