s390.c (s390_function_value): Rename to ...
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "langhooks.h"
29 #include "gimple-pretty-print.h"
30 #include "target.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
34
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
42
43
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
46
47 static bool
48 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
49 {
50 VEC (edge, heap) *stack = NULL;
51 edge e;
52 edge_iterator ei;
53 sbitmap visited;
54 bool ret;
55
56 if (va_arg_bb == va_start_bb)
57 return true;
58
59 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
60 return false;
61
62 visited = sbitmap_alloc (last_basic_block);
63 sbitmap_zero (visited);
64 ret = true;
65
66 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
67 VEC_safe_push (edge, heap, stack, e);
68
69 while (! VEC_empty (edge, stack))
70 {
71 basic_block src;
72
73 e = VEC_pop (edge, stack);
74 src = e->src;
75
76 if (e->flags & EDGE_COMPLEX)
77 {
78 ret = false;
79 break;
80 }
81
82 if (src == va_start_bb)
83 continue;
84
85 /* va_arg_bb can be executed more times than va_start_bb. */
86 if (src == va_arg_bb)
87 {
88 ret = false;
89 break;
90 }
91
92 gcc_assert (src != ENTRY_BLOCK_PTR);
93
94 if (! TEST_BIT (visited, src->index))
95 {
96 SET_BIT (visited, src->index);
97 FOR_EACH_EDGE (e, ei, src->preds)
98 VEC_safe_push (edge, heap, stack, e);
99 }
100 }
101
102 VEC_free (edge, heap, stack);
103 sbitmap_free (visited);
104 return ret;
105 }
106
107
108 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
109 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
110 GPR_P is true if this is GPR counter. */
111
112 static unsigned HOST_WIDE_INT
113 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
114 bool gpr_p)
115 {
116 tree lhs, orig_lhs;
117 gimple stmt;
118 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
119 unsigned int max_size;
120
121 if (si->offsets == NULL)
122 {
123 unsigned int i;
124
125 si->offsets = XNEWVEC (int, num_ssa_names);
126 for (i = 0; i < num_ssa_names; ++i)
127 si->offsets[i] = -1;
128 }
129
130 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
131 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
132 orig_lhs = lhs = rhs;
133 while (lhs)
134 {
135 enum tree_code rhs_code;
136
137 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
138 {
139 if (counter_val >= max_size)
140 {
141 ret = max_size;
142 break;
143 }
144
145 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
146 break;
147 }
148
149 stmt = SSA_NAME_DEF_STMT (lhs);
150
151 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
152 return (unsigned HOST_WIDE_INT) -1;
153
154 rhs_code = gimple_assign_rhs_code (stmt);
155 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
156 || gimple_assign_cast_p (stmt))
157 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
158 {
159 lhs = gimple_assign_rhs1 (stmt);
160 continue;
161 }
162
163 if ((rhs_code == POINTER_PLUS_EXPR
164 || rhs_code == PLUS_EXPR)
165 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
166 && host_integerp (gimple_assign_rhs2 (stmt), 1))
167 {
168 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
169 lhs = gimple_assign_rhs1 (stmt);
170 continue;
171 }
172
173 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
174 return (unsigned HOST_WIDE_INT) -1;
175
176 rhs = gimple_assign_rhs1 (stmt);
177 if (TREE_CODE (counter) != TREE_CODE (rhs))
178 return (unsigned HOST_WIDE_INT) -1;
179
180 if (TREE_CODE (counter) == COMPONENT_REF)
181 {
182 if (get_base_address (counter) != get_base_address (rhs)
183 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
184 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
185 return (unsigned HOST_WIDE_INT) -1;
186 }
187 else if (counter != rhs)
188 return (unsigned HOST_WIDE_INT) -1;
189
190 lhs = NULL;
191 }
192
193 lhs = orig_lhs;
194 val = ret + counter_val;
195 while (lhs)
196 {
197 enum tree_code rhs_code;
198
199 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
200 break;
201
202 if (val >= max_size)
203 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
204 else
205 si->offsets[SSA_NAME_VERSION (lhs)] = val;
206
207 stmt = SSA_NAME_DEF_STMT (lhs);
208
209 rhs_code = gimple_assign_rhs_code (stmt);
210 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
211 || gimple_assign_cast_p (stmt))
212 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
213 {
214 lhs = gimple_assign_rhs1 (stmt);
215 continue;
216 }
217
218 if ((rhs_code == POINTER_PLUS_EXPR
219 || rhs_code == PLUS_EXPR)
220 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
221 && host_integerp (gimple_assign_rhs2 (stmt), 1))
222 {
223 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
224 lhs = gimple_assign_rhs1 (stmt);
225 continue;
226 }
227
228 lhs = NULL;
229 }
230
231 return ret;
232 }
233
234
235 /* Called by walk_tree to look for references to va_list variables. */
236
237 static tree
238 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
239 void *data)
240 {
241 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
242 tree var = *tp;
243
244 if (TREE_CODE (var) == SSA_NAME)
245 var = SSA_NAME_VAR (var);
246
247 if (TREE_CODE (var) == VAR_DECL
248 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
249 return var;
250
251 return NULL_TREE;
252 }
253
254
255 /* Helper function of va_list_counter_struct_op. Compute
256 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
257 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
258 statement. GPR_P is true if AP is a GPR counter, false if it is
259 a FPR counter. */
260
261 static void
262 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
263 bool write_p)
264 {
265 unsigned HOST_WIDE_INT increment;
266
267 if (si->compute_sizes < 0)
268 {
269 si->compute_sizes = 0;
270 if (si->va_start_count == 1
271 && reachable_at_most_once (si->bb, si->va_start_bb))
272 si->compute_sizes = 1;
273
274 if (dump_file && (dump_flags & TDF_DETAILS))
275 fprintf (dump_file,
276 "bb%d will %sbe executed at most once for each va_start "
277 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
278 si->va_start_bb->index);
279 }
280
281 if (write_p
282 && si->compute_sizes
283 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
284 {
285 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
286 {
287 cfun->va_list_gpr_size += increment;
288 return;
289 }
290
291 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
292 {
293 cfun->va_list_fpr_size += increment;
294 return;
295 }
296 }
297
298 if (write_p || !si->compute_sizes)
299 {
300 if (gpr_p)
301 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
302 else
303 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
304 }
305 }
306
307
308 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
309 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
310 is false, AP has been seen in VAR = AP assignment.
311 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
312 va_arg operation that doesn't cause the va_list variable to escape
313 current function. */
314
315 static bool
316 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
317 bool write_p)
318 {
319 tree base;
320
321 if (TREE_CODE (ap) != COMPONENT_REF
322 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
323 return false;
324
325 if (TREE_CODE (var) != SSA_NAME
326 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
327 return false;
328
329 base = get_base_address (ap);
330 if (TREE_CODE (base) != VAR_DECL
331 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
332 return false;
333
334 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
335 va_list_counter_op (si, ap, var, true, write_p);
336 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
337 va_list_counter_op (si, ap, var, false, write_p);
338
339 return true;
340 }
341
342
343 /* Check for TEM = AP. Return true if found and the caller shouldn't
344 search for va_list references in the statement. */
345
346 static bool
347 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
348 {
349 if (TREE_CODE (ap) != VAR_DECL
350 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
351 return false;
352
353 if (TREE_CODE (tem) != SSA_NAME
354 || bitmap_bit_p (si->va_list_vars,
355 DECL_UID (SSA_NAME_VAR (tem)))
356 || is_global_var (SSA_NAME_VAR (tem)))
357 return false;
358
359 if (si->compute_sizes < 0)
360 {
361 si->compute_sizes = 0;
362 if (si->va_start_count == 1
363 && reachable_at_most_once (si->bb, si->va_start_bb))
364 si->compute_sizes = 1;
365
366 if (dump_file && (dump_flags & TDF_DETAILS))
367 fprintf (dump_file,
368 "bb%d will %sbe executed at most once for each va_start "
369 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
370 si->va_start_bb->index);
371 }
372
373 /* For void * or char * va_list types, there is just one counter.
374 If va_arg is used in a loop, we don't know how many registers need
375 saving. */
376 if (! si->compute_sizes)
377 return false;
378
379 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
380 return false;
381
382 /* Note the temporary, as we need to track whether it doesn't escape
383 the current function. */
384 bitmap_set_bit (si->va_list_escape_vars,
385 DECL_UID (SSA_NAME_VAR (tem)));
386 return true;
387 }
388
389
390 /* Check for:
391 tem1 = AP;
392 TEM2 = tem1 + CST;
393 AP = TEM2;
394 sequence and update cfun->va_list_gpr_size. Return true if found. */
395
396 static bool
397 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
398 {
399 unsigned HOST_WIDE_INT increment;
400
401 if (TREE_CODE (ap) != VAR_DECL
402 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
403 return false;
404
405 if (TREE_CODE (tem2) != SSA_NAME
406 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
407 return false;
408
409 if (si->compute_sizes <= 0)
410 return false;
411
412 increment = va_list_counter_bump (si, ap, tem2, true);
413 if (increment + 1 <= 1)
414 return false;
415
416 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
417 cfun->va_list_gpr_size += increment;
418 else
419 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
420
421 return true;
422 }
423
424
425 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
426 containing value of some va_list variable plus optionally some constant,
427 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
428 depending whether LHS is a function local temporary. */
429
430 static void
431 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
432 {
433 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
434 return;
435
436 if (TREE_CODE (rhs) != SSA_NAME
437 || ! bitmap_bit_p (si->va_list_escape_vars,
438 DECL_UID (SSA_NAME_VAR (rhs))))
439 return;
440
441 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
442 {
443 si->va_list_escapes = true;
444 return;
445 }
446
447 if (si->compute_sizes < 0)
448 {
449 si->compute_sizes = 0;
450 if (si->va_start_count == 1
451 && reachable_at_most_once (si->bb, si->va_start_bb))
452 si->compute_sizes = 1;
453
454 if (dump_file && (dump_flags & TDF_DETAILS))
455 fprintf (dump_file,
456 "bb%d will %sbe executed at most once for each va_start "
457 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
458 si->va_start_bb->index);
459 }
460
461 /* For void * or char * va_list types, there is just one counter.
462 If va_arg is used in a loop, we don't know how many registers need
463 saving. */
464 if (! si->compute_sizes)
465 {
466 si->va_list_escapes = true;
467 return;
468 }
469
470 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
471 == (unsigned HOST_WIDE_INT) -1)
472 {
473 si->va_list_escapes = true;
474 return;
475 }
476
477 bitmap_set_bit (si->va_list_escape_vars,
478 DECL_UID (SSA_NAME_VAR (lhs)));
479 }
480
481
482 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
483 Return true if va_list might be escaping. */
484
485 static bool
486 check_all_va_list_escapes (struct stdarg_info *si)
487 {
488 basic_block bb;
489
490 FOR_EACH_BB (bb)
491 {
492 gimple_stmt_iterator i;
493
494 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
495 {
496 gimple stmt = gsi_stmt (i);
497 tree use;
498 ssa_op_iter iter;
499
500 if (is_gimple_debug (stmt))
501 continue;
502
503 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
504 {
505 if (! bitmap_bit_p (si->va_list_escape_vars,
506 DECL_UID (SSA_NAME_VAR (use))))
507 continue;
508
509 if (is_gimple_assign (stmt))
510 {
511 tree rhs = gimple_assign_rhs1 (stmt);
512 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
513
514 /* x = *ap_temp; */
515 if (gimple_assign_rhs_code (stmt) == MEM_REF
516 && TREE_OPERAND (rhs, 0) == use
517 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
518 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
519 && si->offsets[SSA_NAME_VERSION (use)] != -1)
520 {
521 unsigned HOST_WIDE_INT gpr_size;
522 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
523
524 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
525 + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
526 + tree_low_cst (access_size, 1);
527 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
528 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
529 else if (gpr_size > cfun->va_list_gpr_size)
530 cfun->va_list_gpr_size = gpr_size;
531 continue;
532 }
533
534 /* va_arg sequences may contain
535 other_ap_temp = ap_temp;
536 other_ap_temp = ap_temp + constant;
537 other_ap_temp = (some_type *) ap_temp;
538 ap = ap_temp;
539 statements. */
540 if (rhs == use
541 && ((rhs_code == POINTER_PLUS_EXPR
542 && (TREE_CODE (gimple_assign_rhs2 (stmt))
543 == INTEGER_CST))
544 || gimple_assign_cast_p (stmt)
545 || (get_gimple_rhs_class (rhs_code)
546 == GIMPLE_SINGLE_RHS)))
547 {
548 tree lhs = gimple_assign_lhs (stmt);
549
550 if (TREE_CODE (lhs) == SSA_NAME
551 && bitmap_bit_p (si->va_list_escape_vars,
552 DECL_UID (SSA_NAME_VAR (lhs))))
553 continue;
554
555 if (TREE_CODE (lhs) == VAR_DECL
556 && bitmap_bit_p (si->va_list_vars,
557 DECL_UID (lhs)))
558 continue;
559 }
560 }
561
562 if (dump_file && (dump_flags & TDF_DETAILS))
563 {
564 fputs ("va_list escapes in ", dump_file);
565 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
566 fputc ('\n', dump_file);
567 }
568 return true;
569 }
570 }
571 }
572
573 return false;
574 }
575
576
577 /* Return true if this optimization pass should be done.
578 It makes only sense for stdarg functions. */
579
580 static bool
581 gate_optimize_stdarg (void)
582 {
583 /* This optimization is only for stdarg functions. */
584 return cfun->stdarg != 0;
585 }
586
587
588 /* Entry point to the stdarg optimization pass. */
589
590 static unsigned int
591 execute_optimize_stdarg (void)
592 {
593 basic_block bb;
594 bool va_list_escapes = false;
595 bool va_list_simple_ptr;
596 struct stdarg_info si;
597 struct walk_stmt_info wi;
598 const char *funcname = NULL;
599 tree cfun_va_list;
600
601 cfun->va_list_gpr_size = 0;
602 cfun->va_list_fpr_size = 0;
603 memset (&si, 0, sizeof (si));
604 si.va_list_vars = BITMAP_ALLOC (NULL);
605 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
606
607 if (dump_file)
608 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
609
610 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
611 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
612 && (TREE_TYPE (cfun_va_list) == void_type_node
613 || TREE_TYPE (cfun_va_list) == char_type_node);
614 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
615
616 FOR_EACH_BB (bb)
617 {
618 gimple_stmt_iterator i;
619
620 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
621 {
622 gimple stmt = gsi_stmt (i);
623 tree callee, ap;
624
625 if (!is_gimple_call (stmt))
626 continue;
627
628 callee = gimple_call_fndecl (stmt);
629 if (!callee
630 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
631 continue;
632
633 switch (DECL_FUNCTION_CODE (callee))
634 {
635 case BUILT_IN_VA_START:
636 break;
637 /* If old style builtins are used, don't optimize anything. */
638 case BUILT_IN_SAVEREGS:
639 case BUILT_IN_NEXT_ARG:
640 va_list_escapes = true;
641 continue;
642 default:
643 continue;
644 }
645
646 si.va_start_count++;
647 ap = gimple_call_arg (stmt, 0);
648
649 if (TREE_CODE (ap) != ADDR_EXPR)
650 {
651 va_list_escapes = true;
652 break;
653 }
654 ap = TREE_OPERAND (ap, 0);
655 if (TREE_CODE (ap) == ARRAY_REF)
656 {
657 if (! integer_zerop (TREE_OPERAND (ap, 1)))
658 {
659 va_list_escapes = true;
660 break;
661 }
662 ap = TREE_OPERAND (ap, 0);
663 }
664 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
665 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
666 || TREE_CODE (ap) != VAR_DECL)
667 {
668 va_list_escapes = true;
669 break;
670 }
671
672 if (is_global_var (ap))
673 {
674 va_list_escapes = true;
675 break;
676 }
677
678 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
679
680 /* VA_START_BB and VA_START_AP will be only used if there is just
681 one va_start in the function. */
682 si.va_start_bb = bb;
683 si.va_start_ap = ap;
684 }
685
686 if (va_list_escapes)
687 break;
688 }
689
690 /* If there were no va_start uses in the function, there is no need to
691 save anything. */
692 if (si.va_start_count == 0)
693 goto finish;
694
695 /* If some va_list arguments weren't local, we can't optimize. */
696 if (va_list_escapes)
697 goto finish;
698
699 /* For void * or char * va_list, something useful can be done only
700 if there is just one va_start. */
701 if (va_list_simple_ptr && si.va_start_count > 1)
702 {
703 va_list_escapes = true;
704 goto finish;
705 }
706
707 /* For struct * va_list, if the backend didn't tell us what the counter fields
708 are, there is nothing more we can do. */
709 if (!va_list_simple_ptr
710 && va_list_gpr_counter_field == NULL_TREE
711 && va_list_fpr_counter_field == NULL_TREE)
712 {
713 va_list_escapes = true;
714 goto finish;
715 }
716
717 /* For void * or char * va_list there is just one counter
718 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
719 if (va_list_simple_ptr)
720 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
721
722 calculate_dominance_info (CDI_DOMINATORS);
723 memset (&wi, 0, sizeof (wi));
724 wi.info = si.va_list_vars;
725
726 FOR_EACH_BB (bb)
727 {
728 gimple_stmt_iterator i;
729
730 si.compute_sizes = -1;
731 si.bb = bb;
732
733 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
734 them as assignments for the purpose of escape analysis. This is
735 not needed for non-simple va_list because virtual phis don't perform
736 any real data movement. */
737 if (va_list_simple_ptr)
738 {
739 tree lhs, rhs;
740 use_operand_p uop;
741 ssa_op_iter soi;
742
743 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
744 {
745 gimple phi = gsi_stmt (i);
746 lhs = PHI_RESULT (phi);
747
748 if (!is_gimple_reg (lhs))
749 continue;
750
751 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
752 {
753 rhs = USE_FROM_PTR (uop);
754 if (va_list_ptr_read (&si, rhs, lhs))
755 continue;
756 else if (va_list_ptr_write (&si, lhs, rhs))
757 continue;
758 else
759 check_va_list_escapes (&si, lhs, rhs);
760
761 if (si.va_list_escapes)
762 {
763 if (dump_file && (dump_flags & TDF_DETAILS))
764 {
765 fputs ("va_list escapes in ", dump_file);
766 print_gimple_stmt (dump_file, phi, 0, dump_flags);
767 fputc ('\n', dump_file);
768 }
769 va_list_escapes = true;
770 }
771 }
772 }
773 }
774
775 for (i = gsi_start_bb (bb);
776 !gsi_end_p (i) && !va_list_escapes;
777 gsi_next (&i))
778 {
779 gimple stmt = gsi_stmt (i);
780
781 /* Don't look at __builtin_va_{start,end}, they are ok. */
782 if (is_gimple_call (stmt))
783 {
784 tree callee = gimple_call_fndecl (stmt);
785
786 if (callee
787 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
788 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
789 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
790 continue;
791 }
792
793 if (is_gimple_assign (stmt))
794 {
795 tree lhs = gimple_assign_lhs (stmt);
796 tree rhs = gimple_assign_rhs1 (stmt);
797
798 if (va_list_simple_ptr)
799 {
800 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
801 == GIMPLE_SINGLE_RHS)
802 {
803 /* Check for tem = ap. */
804 if (va_list_ptr_read (&si, rhs, lhs))
805 continue;
806
807 /* Check for the last insn in:
808 tem1 = ap;
809 tem2 = tem1 + CST;
810 ap = tem2;
811 sequence. */
812 else if (va_list_ptr_write (&si, lhs, rhs))
813 continue;
814 }
815
816 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
817 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
818 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
819 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
820 == GIMPLE_SINGLE_RHS))
821 check_va_list_escapes (&si, lhs, rhs);
822 }
823 else
824 {
825 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
826 == GIMPLE_SINGLE_RHS)
827 {
828 /* Check for ap[0].field = temp. */
829 if (va_list_counter_struct_op (&si, lhs, rhs, true))
830 continue;
831
832 /* Check for temp = ap[0].field. */
833 else if (va_list_counter_struct_op (&si, rhs, lhs,
834 false))
835 continue;
836 }
837
838 /* Do any architecture specific checking. */
839 if (targetm.stdarg_optimize_hook
840 && targetm.stdarg_optimize_hook (&si, stmt))
841 continue;
842 }
843 }
844 else if (is_gimple_debug (stmt))
845 continue;
846
847 /* All other uses of va_list are either va_copy (that is not handled
848 in this optimization), taking address of va_list variable or
849 passing va_list to other functions (in that case va_list might
850 escape the function and therefore va_start needs to set it up
851 fully), or some unexpected use of va_list. None of these should
852 happen in a gimplified VA_ARG_EXPR. */
853 if (si.va_list_escapes
854 || walk_gimple_op (stmt, find_va_list_reference, &wi))
855 {
856 if (dump_file && (dump_flags & TDF_DETAILS))
857 {
858 fputs ("va_list escapes in ", dump_file);
859 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
860 fputc ('\n', dump_file);
861 }
862 va_list_escapes = true;
863 }
864 }
865
866 if (va_list_escapes)
867 break;
868 }
869
870 if (! va_list_escapes
871 && va_list_simple_ptr
872 && ! bitmap_empty_p (si.va_list_escape_vars)
873 && check_all_va_list_escapes (&si))
874 va_list_escapes = true;
875
876 finish:
877 if (va_list_escapes)
878 {
879 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
880 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
881 }
882 BITMAP_FREE (si.va_list_vars);
883 BITMAP_FREE (si.va_list_escape_vars);
884 free (si.offsets);
885 if (dump_file)
886 {
887 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
888 funcname, (int) va_list_escapes);
889 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
890 fputs ("all", dump_file);
891 else
892 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
893 fputs (" GPR units and ", dump_file);
894 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
895 fputs ("all", dump_file);
896 else
897 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
898 fputs (" FPR units.\n", dump_file);
899 }
900 return 0;
901 }
902
903
904 struct gimple_opt_pass pass_stdarg =
905 {
906 {
907 GIMPLE_PASS,
908 "stdarg", /* name */
909 gate_optimize_stdarg, /* gate */
910 execute_optimize_stdarg, /* execute */
911 NULL, /* sub */
912 NULL, /* next */
913 0, /* static_pass_number */
914 TV_NONE, /* tv_id */
915 PROP_cfg | PROP_ssa, /* properties_required */
916 0, /* properties_provided */
917 0, /* properties_destroyed */
918 0, /* todo_flags_start */
919 TODO_dump_func /* todo_flags_finish */
920 }
921 };