re PR fortran/54107 ([F03] Memory hog with abstract interface)
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "gimple-pretty-print.h"
29 #include "target.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-stdarg.h"
33
34 /* A simple pass that attempts to optimize stdarg functions on architectures
35 that need to save register arguments to stack on entry to stdarg functions.
36 If the function doesn't use any va_start macros, no registers need to
37 be saved. If va_start macros are used, the va_list variables don't escape
38 the function, it is only necessary to save registers that will be used
39 in va_arg macros. E.g. if va_arg is only used with integral types
40 in the function, floating point registers don't need to be saved, etc. */
41
42
43 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
44 is executed at most as many times as VA_START_BB. */
45
46 static bool
47 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
48 {
49 vec<edge> stack = vNULL;
50 edge e;
51 edge_iterator ei;
52 sbitmap visited;
53 bool ret;
54
55 if (va_arg_bb == va_start_bb)
56 return true;
57
58 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
59 return false;
60
61 visited = sbitmap_alloc (last_basic_block);
62 bitmap_clear (visited);
63 ret = true;
64
65 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
66 stack.safe_push (e);
67
68 while (! stack.is_empty ())
69 {
70 basic_block src;
71
72 e = stack.pop ();
73 src = e->src;
74
75 if (e->flags & EDGE_COMPLEX)
76 {
77 ret = false;
78 break;
79 }
80
81 if (src == va_start_bb)
82 continue;
83
84 /* va_arg_bb can be executed more times than va_start_bb. */
85 if (src == va_arg_bb)
86 {
87 ret = false;
88 break;
89 }
90
91 gcc_assert (src != ENTRY_BLOCK_PTR);
92
93 if (! bitmap_bit_p (visited, src->index))
94 {
95 bitmap_set_bit (visited, src->index);
96 FOR_EACH_EDGE (e, ei, src->preds)
97 stack.safe_push (e);
98 }
99 }
100
101 stack.release ();
102 sbitmap_free (visited);
103 return ret;
104 }
105
106
107 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
108 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
109 GPR_P is true if this is GPR counter. */
110
111 static unsigned HOST_WIDE_INT
112 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
113 bool gpr_p)
114 {
115 tree lhs, orig_lhs;
116 gimple stmt;
117 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
118 unsigned int max_size;
119
120 if (si->offsets == NULL)
121 {
122 unsigned int i;
123
124 si->offsets = XNEWVEC (int, num_ssa_names);
125 for (i = 0; i < num_ssa_names; ++i)
126 si->offsets[i] = -1;
127 }
128
129 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
130 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
131 orig_lhs = lhs = rhs;
132 while (lhs)
133 {
134 enum tree_code rhs_code;
135 tree rhs1;
136
137 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
138 {
139 if (counter_val >= max_size)
140 {
141 ret = max_size;
142 break;
143 }
144
145 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
146 break;
147 }
148
149 stmt = SSA_NAME_DEF_STMT (lhs);
150
151 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
152 return (unsigned HOST_WIDE_INT) -1;
153
154 rhs_code = gimple_assign_rhs_code (stmt);
155 rhs1 = gimple_assign_rhs1 (stmt);
156 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
157 || gimple_assign_cast_p (stmt))
158 && TREE_CODE (rhs1) == SSA_NAME)
159 {
160 lhs = rhs1;
161 continue;
162 }
163
164 if ((rhs_code == POINTER_PLUS_EXPR
165 || rhs_code == PLUS_EXPR)
166 && TREE_CODE (rhs1) == SSA_NAME
167 && host_integerp (gimple_assign_rhs2 (stmt), 1))
168 {
169 ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
170 lhs = rhs1;
171 continue;
172 }
173
174 if (rhs_code == ADDR_EXPR
175 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
176 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
177 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
178 {
179 ret += tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
180 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
181 continue;
182 }
183
184 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
185 return (unsigned HOST_WIDE_INT) -1;
186
187 rhs = gimple_assign_rhs1 (stmt);
188 if (TREE_CODE (counter) != TREE_CODE (rhs))
189 return (unsigned HOST_WIDE_INT) -1;
190
191 if (TREE_CODE (counter) == COMPONENT_REF)
192 {
193 if (get_base_address (counter) != get_base_address (rhs)
194 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
195 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
196 return (unsigned HOST_WIDE_INT) -1;
197 }
198 else if (counter != rhs)
199 return (unsigned HOST_WIDE_INT) -1;
200
201 lhs = NULL;
202 }
203
204 lhs = orig_lhs;
205 val = ret + counter_val;
206 while (lhs)
207 {
208 enum tree_code rhs_code;
209 tree rhs1;
210
211 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
212 break;
213
214 if (val >= max_size)
215 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
216 else
217 si->offsets[SSA_NAME_VERSION (lhs)] = val;
218
219 stmt = SSA_NAME_DEF_STMT (lhs);
220
221 rhs_code = gimple_assign_rhs_code (stmt);
222 rhs1 = gimple_assign_rhs1 (stmt);
223 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
224 || gimple_assign_cast_p (stmt))
225 && TREE_CODE (rhs1) == SSA_NAME)
226 {
227 lhs = rhs1;
228 continue;
229 }
230
231 if ((rhs_code == POINTER_PLUS_EXPR
232 || rhs_code == PLUS_EXPR)
233 && TREE_CODE (rhs1) == SSA_NAME
234 && host_integerp (gimple_assign_rhs2 (stmt), 1))
235 {
236 val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
237 lhs = rhs1;
238 continue;
239 }
240
241 if (rhs_code == ADDR_EXPR
242 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
243 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
244 && host_integerp (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1))
245 {
246 val -= tree_low_cst (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1), 1);
247 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
248 continue;
249 }
250
251 lhs = NULL;
252 }
253
254 return ret;
255 }
256
257
258 /* Called by walk_tree to look for references to va_list variables. */
259
260 static tree
261 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
262 void *data)
263 {
264 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
265 tree var = *tp;
266
267 if (TREE_CODE (var) == SSA_NAME)
268 {
269 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
270 return var;
271 }
272 else if (TREE_CODE (var) == VAR_DECL)
273 {
274 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
275 return var;
276 }
277
278 return NULL_TREE;
279 }
280
281
282 /* Helper function of va_list_counter_struct_op. Compute
283 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
284 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
285 statement. GPR_P is true if AP is a GPR counter, false if it is
286 a FPR counter. */
287
288 static void
289 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
290 bool write_p)
291 {
292 unsigned HOST_WIDE_INT increment;
293
294 if (si->compute_sizes < 0)
295 {
296 si->compute_sizes = 0;
297 if (si->va_start_count == 1
298 && reachable_at_most_once (si->bb, si->va_start_bb))
299 si->compute_sizes = 1;
300
301 if (dump_file && (dump_flags & TDF_DETAILS))
302 fprintf (dump_file,
303 "bb%d will %sbe executed at most once for each va_start "
304 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
305 si->va_start_bb->index);
306 }
307
308 if (write_p
309 && si->compute_sizes
310 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
311 {
312 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
313 {
314 cfun->va_list_gpr_size += increment;
315 return;
316 }
317
318 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
319 {
320 cfun->va_list_fpr_size += increment;
321 return;
322 }
323 }
324
325 if (write_p || !si->compute_sizes)
326 {
327 if (gpr_p)
328 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
329 else
330 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
331 }
332 }
333
334
335 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
336 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
337 is false, AP has been seen in VAR = AP assignment.
338 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
339 va_arg operation that doesn't cause the va_list variable to escape
340 current function. */
341
342 static bool
343 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
344 bool write_p)
345 {
346 tree base;
347
348 if (TREE_CODE (ap) != COMPONENT_REF
349 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
350 return false;
351
352 if (TREE_CODE (var) != SSA_NAME
353 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
354 return false;
355
356 base = get_base_address (ap);
357 if (TREE_CODE (base) != VAR_DECL
358 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
359 return false;
360
361 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
362 va_list_counter_op (si, ap, var, true, write_p);
363 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
364 va_list_counter_op (si, ap, var, false, write_p);
365
366 return true;
367 }
368
369
370 /* Check for TEM = AP. Return true if found and the caller shouldn't
371 search for va_list references in the statement. */
372
373 static bool
374 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
375 {
376 if (TREE_CODE (ap) != VAR_DECL
377 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
378 return false;
379
380 if (TREE_CODE (tem) != SSA_NAME
381 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
382 return false;
383
384 if (si->compute_sizes < 0)
385 {
386 si->compute_sizes = 0;
387 if (si->va_start_count == 1
388 && reachable_at_most_once (si->bb, si->va_start_bb))
389 si->compute_sizes = 1;
390
391 if (dump_file && (dump_flags & TDF_DETAILS))
392 fprintf (dump_file,
393 "bb%d will %sbe executed at most once for each va_start "
394 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
395 si->va_start_bb->index);
396 }
397
398 /* For void * or char * va_list types, there is just one counter.
399 If va_arg is used in a loop, we don't know how many registers need
400 saving. */
401 if (! si->compute_sizes)
402 return false;
403
404 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
405 return false;
406
407 /* Note the temporary, as we need to track whether it doesn't escape
408 the current function. */
409 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
410
411 return true;
412 }
413
414
415 /* Check for:
416 tem1 = AP;
417 TEM2 = tem1 + CST;
418 AP = TEM2;
419 sequence and update cfun->va_list_gpr_size. Return true if found. */
420
421 static bool
422 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
423 {
424 unsigned HOST_WIDE_INT increment;
425
426 if (TREE_CODE (ap) != VAR_DECL
427 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
428 return false;
429
430 if (TREE_CODE (tem2) != SSA_NAME
431 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
432 return false;
433
434 if (si->compute_sizes <= 0)
435 return false;
436
437 increment = va_list_counter_bump (si, ap, tem2, true);
438 if (increment + 1 <= 1)
439 return false;
440
441 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
442 cfun->va_list_gpr_size += increment;
443 else
444 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
445
446 return true;
447 }
448
449
450 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
451 containing value of some va_list variable plus optionally some constant,
452 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
453 depending whether LHS is a function local temporary. */
454
455 static void
456 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
457 {
458 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
459 return;
460
461 if (TREE_CODE (rhs) == SSA_NAME)
462 {
463 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
464 return;
465 }
466 else if (TREE_CODE (rhs) == ADDR_EXPR
467 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
469 {
470 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
471 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
472 return;
473 }
474 else
475 return;
476
477 if (TREE_CODE (lhs) != SSA_NAME)
478 {
479 si->va_list_escapes = true;
480 return;
481 }
482
483 if (si->compute_sizes < 0)
484 {
485 si->compute_sizes = 0;
486 if (si->va_start_count == 1
487 && reachable_at_most_once (si->bb, si->va_start_bb))
488 si->compute_sizes = 1;
489
490 if (dump_file && (dump_flags & TDF_DETAILS))
491 fprintf (dump_file,
492 "bb%d will %sbe executed at most once for each va_start "
493 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
494 si->va_start_bb->index);
495 }
496
497 /* For void * or char * va_list types, there is just one counter.
498 If va_arg is used in a loop, we don't know how many registers need
499 saving. */
500 if (! si->compute_sizes)
501 {
502 si->va_list_escapes = true;
503 return;
504 }
505
506 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
507 == (unsigned HOST_WIDE_INT) -1)
508 {
509 si->va_list_escapes = true;
510 return;
511 }
512
513 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
514 }
515
516
517 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
518 Return true if va_list might be escaping. */
519
520 static bool
521 check_all_va_list_escapes (struct stdarg_info *si)
522 {
523 basic_block bb;
524
525 FOR_EACH_BB (bb)
526 {
527 gimple_stmt_iterator i;
528
529 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
530 {
531 gimple stmt = gsi_stmt (i);
532 tree use;
533 ssa_op_iter iter;
534
535 if (is_gimple_debug (stmt))
536 continue;
537
538 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
539 {
540 if (! bitmap_bit_p (si->va_list_escape_vars,
541 SSA_NAME_VERSION (use)))
542 continue;
543
544 if (is_gimple_assign (stmt))
545 {
546 tree rhs = gimple_assign_rhs1 (stmt);
547 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
548
549 /* x = *ap_temp; */
550 if (rhs_code == MEM_REF
551 && TREE_OPERAND (rhs, 0) == use
552 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
553 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
554 && si->offsets[SSA_NAME_VERSION (use)] != -1)
555 {
556 unsigned HOST_WIDE_INT gpr_size;
557 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
558
559 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
560 + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
561 + tree_low_cst (access_size, 1);
562 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
563 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
564 else if (gpr_size > cfun->va_list_gpr_size)
565 cfun->va_list_gpr_size = gpr_size;
566 continue;
567 }
568
569 /* va_arg sequences may contain
570 other_ap_temp = ap_temp;
571 other_ap_temp = ap_temp + constant;
572 other_ap_temp = (some_type *) ap_temp;
573 ap = ap_temp;
574 statements. */
575 if (rhs == use
576 && ((rhs_code == POINTER_PLUS_EXPR
577 && (TREE_CODE (gimple_assign_rhs2 (stmt))
578 == INTEGER_CST))
579 || gimple_assign_cast_p (stmt)
580 || (get_gimple_rhs_class (rhs_code)
581 == GIMPLE_SINGLE_RHS)))
582 {
583 tree lhs = gimple_assign_lhs (stmt);
584
585 if (TREE_CODE (lhs) == SSA_NAME
586 && bitmap_bit_p (si->va_list_escape_vars,
587 SSA_NAME_VERSION (lhs)))
588 continue;
589
590 if (TREE_CODE (lhs) == VAR_DECL
591 && bitmap_bit_p (si->va_list_vars,
592 DECL_UID (lhs) + num_ssa_names))
593 continue;
594 }
595 else if (rhs_code == ADDR_EXPR
596 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
597 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
598 {
599 tree lhs = gimple_assign_lhs (stmt);
600
601 if (bitmap_bit_p (si->va_list_escape_vars,
602 SSA_NAME_VERSION (lhs)))
603 continue;
604 }
605 }
606
607 if (dump_file && (dump_flags & TDF_DETAILS))
608 {
609 fputs ("va_list escapes in ", dump_file);
610 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
611 fputc ('\n', dump_file);
612 }
613 return true;
614 }
615 }
616 }
617
618 return false;
619 }
620
621
622 /* Return true if this optimization pass should be done.
623 It makes only sense for stdarg functions. */
624
625 static bool
626 gate_optimize_stdarg (void)
627 {
628 /* This optimization is only for stdarg functions. */
629 return cfun->stdarg != 0;
630 }
631
632
633 /* Entry point to the stdarg optimization pass. */
634
635 static unsigned int
636 execute_optimize_stdarg (void)
637 {
638 basic_block bb;
639 bool va_list_escapes = false;
640 bool va_list_simple_ptr;
641 struct stdarg_info si;
642 struct walk_stmt_info wi;
643 const char *funcname = NULL;
644 tree cfun_va_list;
645
646 cfun->va_list_gpr_size = 0;
647 cfun->va_list_fpr_size = 0;
648 memset (&si, 0, sizeof (si));
649 si.va_list_vars = BITMAP_ALLOC (NULL);
650 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
651
652 if (dump_file)
653 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
654
655 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
656 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
657 && (TREE_TYPE (cfun_va_list) == void_type_node
658 || TREE_TYPE (cfun_va_list) == char_type_node);
659 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
660
661 FOR_EACH_BB (bb)
662 {
663 gimple_stmt_iterator i;
664
665 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
666 {
667 gimple stmt = gsi_stmt (i);
668 tree callee, ap;
669
670 if (!is_gimple_call (stmt))
671 continue;
672
673 callee = gimple_call_fndecl (stmt);
674 if (!callee
675 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
676 continue;
677
678 switch (DECL_FUNCTION_CODE (callee))
679 {
680 case BUILT_IN_VA_START:
681 break;
682 /* If old style builtins are used, don't optimize anything. */
683 case BUILT_IN_SAVEREGS:
684 case BUILT_IN_NEXT_ARG:
685 va_list_escapes = true;
686 continue;
687 default:
688 continue;
689 }
690
691 si.va_start_count++;
692 ap = gimple_call_arg (stmt, 0);
693
694 if (TREE_CODE (ap) != ADDR_EXPR)
695 {
696 va_list_escapes = true;
697 break;
698 }
699 ap = TREE_OPERAND (ap, 0);
700 if (TREE_CODE (ap) == ARRAY_REF)
701 {
702 if (! integer_zerop (TREE_OPERAND (ap, 1)))
703 {
704 va_list_escapes = true;
705 break;
706 }
707 ap = TREE_OPERAND (ap, 0);
708 }
709 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
710 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
711 || TREE_CODE (ap) != VAR_DECL)
712 {
713 va_list_escapes = true;
714 break;
715 }
716
717 if (is_global_var (ap))
718 {
719 va_list_escapes = true;
720 break;
721 }
722
723 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
724
725 /* VA_START_BB and VA_START_AP will be only used if there is just
726 one va_start in the function. */
727 si.va_start_bb = bb;
728 si.va_start_ap = ap;
729 }
730
731 if (va_list_escapes)
732 break;
733 }
734
735 /* If there were no va_start uses in the function, there is no need to
736 save anything. */
737 if (si.va_start_count == 0)
738 goto finish;
739
740 /* If some va_list arguments weren't local, we can't optimize. */
741 if (va_list_escapes)
742 goto finish;
743
744 /* For void * or char * va_list, something useful can be done only
745 if there is just one va_start. */
746 if (va_list_simple_ptr && si.va_start_count > 1)
747 {
748 va_list_escapes = true;
749 goto finish;
750 }
751
752 /* For struct * va_list, if the backend didn't tell us what the counter fields
753 are, there is nothing more we can do. */
754 if (!va_list_simple_ptr
755 && va_list_gpr_counter_field == NULL_TREE
756 && va_list_fpr_counter_field == NULL_TREE)
757 {
758 va_list_escapes = true;
759 goto finish;
760 }
761
762 /* For void * or char * va_list there is just one counter
763 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
764 if (va_list_simple_ptr)
765 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
766
767 calculate_dominance_info (CDI_DOMINATORS);
768 memset (&wi, 0, sizeof (wi));
769 wi.info = si.va_list_vars;
770
771 FOR_EACH_BB (bb)
772 {
773 gimple_stmt_iterator i;
774
775 si.compute_sizes = -1;
776 si.bb = bb;
777
778 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
779 them as assignments for the purpose of escape analysis. This is
780 not needed for non-simple va_list because virtual phis don't perform
781 any real data movement. */
782 if (va_list_simple_ptr)
783 {
784 tree lhs, rhs;
785 use_operand_p uop;
786 ssa_op_iter soi;
787
788 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
789 {
790 gimple phi = gsi_stmt (i);
791 lhs = PHI_RESULT (phi);
792
793 if (virtual_operand_p (lhs))
794 continue;
795
796 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
797 {
798 rhs = USE_FROM_PTR (uop);
799 if (va_list_ptr_read (&si, rhs, lhs))
800 continue;
801 else if (va_list_ptr_write (&si, lhs, rhs))
802 continue;
803 else
804 check_va_list_escapes (&si, lhs, rhs);
805
806 if (si.va_list_escapes)
807 {
808 if (dump_file && (dump_flags & TDF_DETAILS))
809 {
810 fputs ("va_list escapes in ", dump_file);
811 print_gimple_stmt (dump_file, phi, 0, dump_flags);
812 fputc ('\n', dump_file);
813 }
814 va_list_escapes = true;
815 }
816 }
817 }
818 }
819
820 for (i = gsi_start_bb (bb);
821 !gsi_end_p (i) && !va_list_escapes;
822 gsi_next (&i))
823 {
824 gimple stmt = gsi_stmt (i);
825
826 /* Don't look at __builtin_va_{start,end}, they are ok. */
827 if (is_gimple_call (stmt))
828 {
829 tree callee = gimple_call_fndecl (stmt);
830
831 if (callee
832 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
833 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
834 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
835 continue;
836 }
837
838 if (is_gimple_assign (stmt))
839 {
840 tree lhs = gimple_assign_lhs (stmt);
841 tree rhs = gimple_assign_rhs1 (stmt);
842
843 if (va_list_simple_ptr)
844 {
845 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
846 == GIMPLE_SINGLE_RHS)
847 {
848 /* Check for ap ={v} {}. */
849 if (TREE_CLOBBER_P (rhs))
850 continue;
851
852 /* Check for tem = ap. */
853 else if (va_list_ptr_read (&si, rhs, lhs))
854 continue;
855
856 /* Check for the last insn in:
857 tem1 = ap;
858 tem2 = tem1 + CST;
859 ap = tem2;
860 sequence. */
861 else if (va_list_ptr_write (&si, lhs, rhs))
862 continue;
863 }
864
865 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
866 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
867 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
868 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
869 == GIMPLE_SINGLE_RHS))
870 check_va_list_escapes (&si, lhs, rhs);
871 }
872 else
873 {
874 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
875 == GIMPLE_SINGLE_RHS)
876 {
877 /* Check for ap ={v} {}. */
878 if (TREE_CLOBBER_P (rhs))
879 continue;
880
881 /* Check for ap[0].field = temp. */
882 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
883 continue;
884
885 /* Check for temp = ap[0].field. */
886 else if (va_list_counter_struct_op (&si, rhs, lhs,
887 false))
888 continue;
889 }
890
891 /* Do any architecture specific checking. */
892 if (targetm.stdarg_optimize_hook
893 && targetm.stdarg_optimize_hook (&si, stmt))
894 continue;
895 }
896 }
897 else if (is_gimple_debug (stmt))
898 continue;
899
900 /* All other uses of va_list are either va_copy (that is not handled
901 in this optimization), taking address of va_list variable or
902 passing va_list to other functions (in that case va_list might
903 escape the function and therefore va_start needs to set it up
904 fully), or some unexpected use of va_list. None of these should
905 happen in a gimplified VA_ARG_EXPR. */
906 if (si.va_list_escapes
907 || walk_gimple_op (stmt, find_va_list_reference, &wi))
908 {
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 {
911 fputs ("va_list escapes in ", dump_file);
912 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
913 fputc ('\n', dump_file);
914 }
915 va_list_escapes = true;
916 }
917 }
918
919 if (va_list_escapes)
920 break;
921 }
922
923 if (! va_list_escapes
924 && va_list_simple_ptr
925 && ! bitmap_empty_p (si.va_list_escape_vars)
926 && check_all_va_list_escapes (&si))
927 va_list_escapes = true;
928
929 finish:
930 if (va_list_escapes)
931 {
932 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
933 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
934 }
935 BITMAP_FREE (si.va_list_vars);
936 BITMAP_FREE (si.va_list_escape_vars);
937 free (si.offsets);
938 if (dump_file)
939 {
940 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
941 funcname, (int) va_list_escapes);
942 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
943 fputs ("all", dump_file);
944 else
945 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
946 fputs (" GPR units and ", dump_file);
947 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
948 fputs ("all", dump_file);
949 else
950 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
951 fputs (" FPR units.\n", dump_file);
952 }
953 return 0;
954 }
955
956
957 struct gimple_opt_pass pass_stdarg =
958 {
959 {
960 GIMPLE_PASS,
961 "stdarg", /* name */
962 OPTGROUP_NONE, /* optinfo_flags */
963 gate_optimize_stdarg, /* gate */
964 execute_optimize_stdarg, /* execute */
965 NULL, /* sub */
966 NULL, /* next */
967 0, /* static_pass_number */
968 TV_NONE, /* tv_id */
969 PROP_cfg | PROP_ssa, /* properties_required */
970 0, /* properties_provided */
971 0, /* properties_destroyed */
972 0, /* todo_flags_start */
973 0 /* todo_flags_finish */
974 }
975 };