cond.md (stzx_16): Use register_operand for operand 0.
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "langhooks.h"
28 #include "gimple-pretty-print.h"
29 #include "target.h"
30 #include "bitmap.h"
31 #include "gimple.h"
32 #include "gimple-iterator.h"
33 #include "gimple-walk.h"
34 #include "gimple-ssa.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "sbitmap.h"
40 #include "tree-pass.h"
41 #include "tree-stdarg.h"
42
43 /* A simple pass that attempts to optimize stdarg functions on architectures
44 that need to save register arguments to stack on entry to stdarg functions.
45 If the function doesn't use any va_start macros, no registers need to
46 be saved. If va_start macros are used, the va_list variables don't escape
47 the function, it is only necessary to save registers that will be used
48 in va_arg macros. E.g. if va_arg is only used with integral types
49 in the function, floating point registers don't need to be saved, etc. */
50
51
52 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
53 is executed at most as many times as VA_START_BB. */
54
55 static bool
56 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
57 {
58 vec<edge> stack = vNULL;
59 edge e;
60 edge_iterator ei;
61 sbitmap visited;
62 bool ret;
63
64 if (va_arg_bb == va_start_bb)
65 return true;
66
67 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
68 return false;
69
70 visited = sbitmap_alloc (last_basic_block);
71 bitmap_clear (visited);
72 ret = true;
73
74 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
75 stack.safe_push (e);
76
77 while (! stack.is_empty ())
78 {
79 basic_block src;
80
81 e = stack.pop ();
82 src = e->src;
83
84 if (e->flags & EDGE_COMPLEX)
85 {
86 ret = false;
87 break;
88 }
89
90 if (src == va_start_bb)
91 continue;
92
93 /* va_arg_bb can be executed more times than va_start_bb. */
94 if (src == va_arg_bb)
95 {
96 ret = false;
97 break;
98 }
99
100 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
101
102 if (! bitmap_bit_p (visited, src->index))
103 {
104 bitmap_set_bit (visited, src->index);
105 FOR_EACH_EDGE (e, ei, src->preds)
106 stack.safe_push (e);
107 }
108 }
109
110 stack.release ();
111 sbitmap_free (visited);
112 return ret;
113 }
114
115
116 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
117 return constant, otherwise return HOST_WIDE_INT_M1U.
118 GPR_P is true if this is GPR counter. */
119
120 static unsigned HOST_WIDE_INT
121 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
122 bool gpr_p)
123 {
124 tree lhs, orig_lhs;
125 gimple stmt;
126 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
127 unsigned int max_size;
128
129 if (si->offsets == NULL)
130 {
131 unsigned int i;
132
133 si->offsets = XNEWVEC (int, num_ssa_names);
134 for (i = 0; i < num_ssa_names; ++i)
135 si->offsets[i] = -1;
136 }
137
138 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
139 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
140 orig_lhs = lhs = rhs;
141 while (lhs)
142 {
143 enum tree_code rhs_code;
144 tree rhs1;
145
146 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
147 {
148 if (counter_val >= max_size)
149 {
150 ret = max_size;
151 break;
152 }
153
154 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
155 break;
156 }
157
158 stmt = SSA_NAME_DEF_STMT (lhs);
159
160 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
161 return HOST_WIDE_INT_M1U;
162
163 rhs_code = gimple_assign_rhs_code (stmt);
164 rhs1 = gimple_assign_rhs1 (stmt);
165 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
166 || gimple_assign_cast_p (stmt))
167 && TREE_CODE (rhs1) == SSA_NAME)
168 {
169 lhs = rhs1;
170 continue;
171 }
172
173 if ((rhs_code == POINTER_PLUS_EXPR
174 || rhs_code == PLUS_EXPR)
175 && TREE_CODE (rhs1) == SSA_NAME
176 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
177 {
178 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
179 lhs = rhs1;
180 continue;
181 }
182
183 if (rhs_code == ADDR_EXPR
184 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
185 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
186 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
187 {
188 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
189 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
190 continue;
191 }
192
193 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
194 return HOST_WIDE_INT_M1U;
195
196 rhs = gimple_assign_rhs1 (stmt);
197 if (TREE_CODE (counter) != TREE_CODE (rhs))
198 return HOST_WIDE_INT_M1U;
199
200 if (TREE_CODE (counter) == COMPONENT_REF)
201 {
202 if (get_base_address (counter) != get_base_address (rhs)
203 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
204 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
205 return HOST_WIDE_INT_M1U;
206 }
207 else if (counter != rhs)
208 return HOST_WIDE_INT_M1U;
209
210 lhs = NULL;
211 }
212
213 lhs = orig_lhs;
214 val = ret + counter_val;
215 while (lhs)
216 {
217 enum tree_code rhs_code;
218 tree rhs1;
219
220 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
221 break;
222
223 if (val >= max_size)
224 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
225 else
226 si->offsets[SSA_NAME_VERSION (lhs)] = val;
227
228 stmt = SSA_NAME_DEF_STMT (lhs);
229
230 rhs_code = gimple_assign_rhs_code (stmt);
231 rhs1 = gimple_assign_rhs1 (stmt);
232 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
233 || gimple_assign_cast_p (stmt))
234 && TREE_CODE (rhs1) == SSA_NAME)
235 {
236 lhs = rhs1;
237 continue;
238 }
239
240 if ((rhs_code == POINTER_PLUS_EXPR
241 || rhs_code == PLUS_EXPR)
242 && TREE_CODE (rhs1) == SSA_NAME
243 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
244 {
245 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
246 lhs = rhs1;
247 continue;
248 }
249
250 if (rhs_code == ADDR_EXPR
251 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
252 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
253 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
254 {
255 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
256 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
257 continue;
258 }
259
260 lhs = NULL;
261 }
262
263 return ret;
264 }
265
266
267 /* Called by walk_tree to look for references to va_list variables. */
268
269 static tree
270 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
271 void *data)
272 {
273 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
274 tree var = *tp;
275
276 if (TREE_CODE (var) == SSA_NAME)
277 {
278 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
279 return var;
280 }
281 else if (TREE_CODE (var) == VAR_DECL)
282 {
283 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
284 return var;
285 }
286
287 return NULL_TREE;
288 }
289
290
291 /* Helper function of va_list_counter_struct_op. Compute
292 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
293 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
294 statement. GPR_P is true if AP is a GPR counter, false if it is
295 a FPR counter. */
296
297 static void
298 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
299 bool write_p)
300 {
301 unsigned HOST_WIDE_INT increment;
302
303 if (si->compute_sizes < 0)
304 {
305 si->compute_sizes = 0;
306 if (si->va_start_count == 1
307 && reachable_at_most_once (si->bb, si->va_start_bb))
308 si->compute_sizes = 1;
309
310 if (dump_file && (dump_flags & TDF_DETAILS))
311 fprintf (dump_file,
312 "bb%d will %sbe executed at most once for each va_start "
313 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
314 si->va_start_bb->index);
315 }
316
317 if (write_p
318 && si->compute_sizes
319 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
320 {
321 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
322 {
323 cfun->va_list_gpr_size += increment;
324 return;
325 }
326
327 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
328 {
329 cfun->va_list_fpr_size += increment;
330 return;
331 }
332 }
333
334 if (write_p || !si->compute_sizes)
335 {
336 if (gpr_p)
337 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
338 else
339 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
340 }
341 }
342
343
344 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
345 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
346 is false, AP has been seen in VAR = AP assignment.
347 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
348 va_arg operation that doesn't cause the va_list variable to escape
349 current function. */
350
351 static bool
352 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
353 bool write_p)
354 {
355 tree base;
356
357 if (TREE_CODE (ap) != COMPONENT_REF
358 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
359 return false;
360
361 if (TREE_CODE (var) != SSA_NAME
362 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
363 return false;
364
365 base = get_base_address (ap);
366 if (TREE_CODE (base) != VAR_DECL
367 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
368 return false;
369
370 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
371 va_list_counter_op (si, ap, var, true, write_p);
372 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
373 va_list_counter_op (si, ap, var, false, write_p);
374
375 return true;
376 }
377
378
379 /* Check for TEM = AP. Return true if found and the caller shouldn't
380 search for va_list references in the statement. */
381
382 static bool
383 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
384 {
385 if (TREE_CODE (ap) != VAR_DECL
386 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
387 return false;
388
389 if (TREE_CODE (tem) != SSA_NAME
390 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
391 return false;
392
393 if (si->compute_sizes < 0)
394 {
395 si->compute_sizes = 0;
396 if (si->va_start_count == 1
397 && reachable_at_most_once (si->bb, si->va_start_bb))
398 si->compute_sizes = 1;
399
400 if (dump_file && (dump_flags & TDF_DETAILS))
401 fprintf (dump_file,
402 "bb%d will %sbe executed at most once for each va_start "
403 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
404 si->va_start_bb->index);
405 }
406
407 /* For void * or char * va_list types, there is just one counter.
408 If va_arg is used in a loop, we don't know how many registers need
409 saving. */
410 if (! si->compute_sizes)
411 return false;
412
413 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
414 return false;
415
416 /* Note the temporary, as we need to track whether it doesn't escape
417 the current function. */
418 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
419
420 return true;
421 }
422
423
424 /* Check for:
425 tem1 = AP;
426 TEM2 = tem1 + CST;
427 AP = TEM2;
428 sequence and update cfun->va_list_gpr_size. Return true if found. */
429
430 static bool
431 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
432 {
433 unsigned HOST_WIDE_INT increment;
434
435 if (TREE_CODE (ap) != VAR_DECL
436 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
437 return false;
438
439 if (TREE_CODE (tem2) != SSA_NAME
440 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
441 return false;
442
443 if (si->compute_sizes <= 0)
444 return false;
445
446 increment = va_list_counter_bump (si, ap, tem2, true);
447 if (increment + 1 <= 1)
448 return false;
449
450 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
451 cfun->va_list_gpr_size += increment;
452 else
453 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
454
455 return true;
456 }
457
458
459 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
460 containing value of some va_list variable plus optionally some constant,
461 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
462 depending whether LHS is a function local temporary. */
463
464 static void
465 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
466 {
467 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
468 return;
469
470 if (TREE_CODE (rhs) == SSA_NAME)
471 {
472 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
473 return;
474 }
475 else if (TREE_CODE (rhs) == ADDR_EXPR
476 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
477 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
478 {
479 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
480 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
481 return;
482 }
483 else
484 return;
485
486 if (TREE_CODE (lhs) != SSA_NAME)
487 {
488 si->va_list_escapes = true;
489 return;
490 }
491
492 if (si->compute_sizes < 0)
493 {
494 si->compute_sizes = 0;
495 if (si->va_start_count == 1
496 && reachable_at_most_once (si->bb, si->va_start_bb))
497 si->compute_sizes = 1;
498
499 if (dump_file && (dump_flags & TDF_DETAILS))
500 fprintf (dump_file,
501 "bb%d will %sbe executed at most once for each va_start "
502 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
503 si->va_start_bb->index);
504 }
505
506 /* For void * or char * va_list types, there is just one counter.
507 If va_arg is used in a loop, we don't know how many registers need
508 saving. */
509 if (! si->compute_sizes)
510 {
511 si->va_list_escapes = true;
512 return;
513 }
514
515 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
516 == HOST_WIDE_INT_M1U)
517 {
518 si->va_list_escapes = true;
519 return;
520 }
521
522 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
523 }
524
525
526 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
527 Return true if va_list might be escaping. */
528
529 static bool
530 check_all_va_list_escapes (struct stdarg_info *si)
531 {
532 basic_block bb;
533
534 FOR_EACH_BB (bb)
535 {
536 gimple_stmt_iterator i;
537
538 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
539 {
540 tree lhs;
541 use_operand_p uop;
542 ssa_op_iter soi;
543 gimple phi = gsi_stmt (i);
544
545 lhs = PHI_RESULT (phi);
546 if (virtual_operand_p (lhs)
547 || bitmap_bit_p (si->va_list_escape_vars,
548 SSA_NAME_VERSION (lhs)))
549 continue;
550
551 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
552 {
553 tree rhs = USE_FROM_PTR (uop);
554 if (TREE_CODE (rhs) == SSA_NAME
555 && bitmap_bit_p (si->va_list_escape_vars,
556 SSA_NAME_VERSION (rhs)))
557 {
558 if (dump_file && (dump_flags & TDF_DETAILS))
559 {
560 fputs ("va_list escapes in ", dump_file);
561 print_gimple_stmt (dump_file, phi, 0, dump_flags);
562 fputc ('\n', dump_file);
563 }
564 return true;
565 }
566 }
567 }
568
569 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
570 {
571 gimple stmt = gsi_stmt (i);
572 tree use;
573 ssa_op_iter iter;
574
575 if (is_gimple_debug (stmt))
576 continue;
577
578 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
579 {
580 if (! bitmap_bit_p (si->va_list_escape_vars,
581 SSA_NAME_VERSION (use)))
582 continue;
583
584 if (is_gimple_assign (stmt))
585 {
586 tree rhs = gimple_assign_rhs1 (stmt);
587 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
588
589 /* x = *ap_temp; */
590 if (rhs_code == MEM_REF
591 && TREE_OPERAND (rhs, 0) == use
592 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
593 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
594 && si->offsets[SSA_NAME_VERSION (use)] != -1)
595 {
596 unsigned HOST_WIDE_INT gpr_size;
597 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
598
599 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
600 + tree_to_shwi (TREE_OPERAND (rhs, 1))
601 + tree_to_uhwi (access_size);
602 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
603 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
604 else if (gpr_size > cfun->va_list_gpr_size)
605 cfun->va_list_gpr_size = gpr_size;
606 continue;
607 }
608
609 /* va_arg sequences may contain
610 other_ap_temp = ap_temp;
611 other_ap_temp = ap_temp + constant;
612 other_ap_temp = (some_type *) ap_temp;
613 ap = ap_temp;
614 statements. */
615 if (rhs == use
616 && ((rhs_code == POINTER_PLUS_EXPR
617 && (TREE_CODE (gimple_assign_rhs2 (stmt))
618 == INTEGER_CST))
619 || gimple_assign_cast_p (stmt)
620 || (get_gimple_rhs_class (rhs_code)
621 == GIMPLE_SINGLE_RHS)))
622 {
623 tree lhs = gimple_assign_lhs (stmt);
624
625 if (TREE_CODE (lhs) == SSA_NAME
626 && bitmap_bit_p (si->va_list_escape_vars,
627 SSA_NAME_VERSION (lhs)))
628 continue;
629
630 if (TREE_CODE (lhs) == VAR_DECL
631 && bitmap_bit_p (si->va_list_vars,
632 DECL_UID (lhs) + num_ssa_names))
633 continue;
634 }
635 else if (rhs_code == ADDR_EXPR
636 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
637 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
638 {
639 tree lhs = gimple_assign_lhs (stmt);
640
641 if (bitmap_bit_p (si->va_list_escape_vars,
642 SSA_NAME_VERSION (lhs)))
643 continue;
644 }
645 }
646
647 if (dump_file && (dump_flags & TDF_DETAILS))
648 {
649 fputs ("va_list escapes in ", dump_file);
650 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
651 fputc ('\n', dump_file);
652 }
653 return true;
654 }
655 }
656 }
657
658 return false;
659 }
660
661
662 /* Return true if this optimization pass should be done.
663 It makes only sense for stdarg functions. */
664
665 static bool
666 gate_optimize_stdarg (void)
667 {
668 /* This optimization is only for stdarg functions. */
669 return cfun->stdarg != 0;
670 }
671
672
673 /* Entry point to the stdarg optimization pass. */
674
675 static unsigned int
676 execute_optimize_stdarg (void)
677 {
678 basic_block bb;
679 bool va_list_escapes = false;
680 bool va_list_simple_ptr;
681 struct stdarg_info si;
682 struct walk_stmt_info wi;
683 const char *funcname = NULL;
684 tree cfun_va_list;
685
686 cfun->va_list_gpr_size = 0;
687 cfun->va_list_fpr_size = 0;
688 memset (&si, 0, sizeof (si));
689 si.va_list_vars = BITMAP_ALLOC (NULL);
690 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
691
692 if (dump_file)
693 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
694
695 cfun_va_list = targetm.fn_abi_va_list (cfun->decl);
696 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
697 && (TREE_TYPE (cfun_va_list) == void_type_node
698 || TREE_TYPE (cfun_va_list) == char_type_node);
699 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
700
701 FOR_EACH_BB (bb)
702 {
703 gimple_stmt_iterator i;
704
705 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
706 {
707 gimple stmt = gsi_stmt (i);
708 tree callee, ap;
709
710 if (!is_gimple_call (stmt))
711 continue;
712
713 callee = gimple_call_fndecl (stmt);
714 if (!callee
715 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
716 continue;
717
718 switch (DECL_FUNCTION_CODE (callee))
719 {
720 case BUILT_IN_VA_START:
721 break;
722 /* If old style builtins are used, don't optimize anything. */
723 case BUILT_IN_SAVEREGS:
724 case BUILT_IN_NEXT_ARG:
725 va_list_escapes = true;
726 continue;
727 default:
728 continue;
729 }
730
731 si.va_start_count++;
732 ap = gimple_call_arg (stmt, 0);
733
734 if (TREE_CODE (ap) != ADDR_EXPR)
735 {
736 va_list_escapes = true;
737 break;
738 }
739 ap = TREE_OPERAND (ap, 0);
740 if (TREE_CODE (ap) == ARRAY_REF)
741 {
742 if (! integer_zerop (TREE_OPERAND (ap, 1)))
743 {
744 va_list_escapes = true;
745 break;
746 }
747 ap = TREE_OPERAND (ap, 0);
748 }
749 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
750 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (cfun->decl))
751 || TREE_CODE (ap) != VAR_DECL)
752 {
753 va_list_escapes = true;
754 break;
755 }
756
757 if (is_global_var (ap))
758 {
759 va_list_escapes = true;
760 break;
761 }
762
763 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
764
765 /* VA_START_BB and VA_START_AP will be only used if there is just
766 one va_start in the function. */
767 si.va_start_bb = bb;
768 si.va_start_ap = ap;
769 }
770
771 if (va_list_escapes)
772 break;
773 }
774
775 /* If there were no va_start uses in the function, there is no need to
776 save anything. */
777 if (si.va_start_count == 0)
778 goto finish;
779
780 /* If some va_list arguments weren't local, we can't optimize. */
781 if (va_list_escapes)
782 goto finish;
783
784 /* For void * or char * va_list, something useful can be done only
785 if there is just one va_start. */
786 if (va_list_simple_ptr && si.va_start_count > 1)
787 {
788 va_list_escapes = true;
789 goto finish;
790 }
791
792 /* For struct * va_list, if the backend didn't tell us what the counter fields
793 are, there is nothing more we can do. */
794 if (!va_list_simple_ptr
795 && va_list_gpr_counter_field == NULL_TREE
796 && va_list_fpr_counter_field == NULL_TREE)
797 {
798 va_list_escapes = true;
799 goto finish;
800 }
801
802 /* For void * or char * va_list there is just one counter
803 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
804 if (va_list_simple_ptr)
805 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
806
807 calculate_dominance_info (CDI_DOMINATORS);
808 memset (&wi, 0, sizeof (wi));
809 wi.info = si.va_list_vars;
810
811 FOR_EACH_BB (bb)
812 {
813 gimple_stmt_iterator i;
814
815 si.compute_sizes = -1;
816 si.bb = bb;
817
818 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
819 them as assignments for the purpose of escape analysis. This is
820 not needed for non-simple va_list because virtual phis don't perform
821 any real data movement. */
822 if (va_list_simple_ptr)
823 {
824 tree lhs, rhs;
825 use_operand_p uop;
826 ssa_op_iter soi;
827
828 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
829 {
830 gimple phi = gsi_stmt (i);
831 lhs = PHI_RESULT (phi);
832
833 if (virtual_operand_p (lhs))
834 continue;
835
836 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
837 {
838 rhs = USE_FROM_PTR (uop);
839 if (va_list_ptr_read (&si, rhs, lhs))
840 continue;
841 else if (va_list_ptr_write (&si, lhs, rhs))
842 continue;
843 else
844 check_va_list_escapes (&si, lhs, rhs);
845
846 if (si.va_list_escapes)
847 {
848 if (dump_file && (dump_flags & TDF_DETAILS))
849 {
850 fputs ("va_list escapes in ", dump_file);
851 print_gimple_stmt (dump_file, phi, 0, dump_flags);
852 fputc ('\n', dump_file);
853 }
854 va_list_escapes = true;
855 }
856 }
857 }
858 }
859
860 for (i = gsi_start_bb (bb);
861 !gsi_end_p (i) && !va_list_escapes;
862 gsi_next (&i))
863 {
864 gimple stmt = gsi_stmt (i);
865
866 /* Don't look at __builtin_va_{start,end}, they are ok. */
867 if (is_gimple_call (stmt))
868 {
869 tree callee = gimple_call_fndecl (stmt);
870
871 if (callee
872 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
873 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
874 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
875 continue;
876 }
877
878 if (is_gimple_assign (stmt))
879 {
880 tree lhs = gimple_assign_lhs (stmt);
881 tree rhs = gimple_assign_rhs1 (stmt);
882
883 if (va_list_simple_ptr)
884 {
885 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
886 == GIMPLE_SINGLE_RHS)
887 {
888 /* Check for ap ={v} {}. */
889 if (TREE_CLOBBER_P (rhs))
890 continue;
891
892 /* Check for tem = ap. */
893 else if (va_list_ptr_read (&si, rhs, lhs))
894 continue;
895
896 /* Check for the last insn in:
897 tem1 = ap;
898 tem2 = tem1 + CST;
899 ap = tem2;
900 sequence. */
901 else if (va_list_ptr_write (&si, lhs, rhs))
902 continue;
903 }
904
905 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
906 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
907 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
908 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
909 == GIMPLE_SINGLE_RHS))
910 check_va_list_escapes (&si, lhs, rhs);
911 }
912 else
913 {
914 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
915 == GIMPLE_SINGLE_RHS)
916 {
917 /* Check for ap ={v} {}. */
918 if (TREE_CLOBBER_P (rhs))
919 continue;
920
921 /* Check for ap[0].field = temp. */
922 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
923 continue;
924
925 /* Check for temp = ap[0].field. */
926 else if (va_list_counter_struct_op (&si, rhs, lhs,
927 false))
928 continue;
929 }
930
931 /* Do any architecture specific checking. */
932 if (targetm.stdarg_optimize_hook
933 && targetm.stdarg_optimize_hook (&si, stmt))
934 continue;
935 }
936 }
937 else if (is_gimple_debug (stmt))
938 continue;
939
940 /* All other uses of va_list are either va_copy (that is not handled
941 in this optimization), taking address of va_list variable or
942 passing va_list to other functions (in that case va_list might
943 escape the function and therefore va_start needs to set it up
944 fully), or some unexpected use of va_list. None of these should
945 happen in a gimplified VA_ARG_EXPR. */
946 if (si.va_list_escapes
947 || walk_gimple_op (stmt, find_va_list_reference, &wi))
948 {
949 if (dump_file && (dump_flags & TDF_DETAILS))
950 {
951 fputs ("va_list escapes in ", dump_file);
952 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
953 fputc ('\n', dump_file);
954 }
955 va_list_escapes = true;
956 }
957 }
958
959 if (va_list_escapes)
960 break;
961 }
962
963 if (! va_list_escapes
964 && va_list_simple_ptr
965 && ! bitmap_empty_p (si.va_list_escape_vars)
966 && check_all_va_list_escapes (&si))
967 va_list_escapes = true;
968
969 finish:
970 if (va_list_escapes)
971 {
972 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
973 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
974 }
975 BITMAP_FREE (si.va_list_vars);
976 BITMAP_FREE (si.va_list_escape_vars);
977 free (si.offsets);
978 if (dump_file)
979 {
980 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
981 funcname, (int) va_list_escapes);
982 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
983 fputs ("all", dump_file);
984 else
985 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
986 fputs (" GPR units and ", dump_file);
987 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
988 fputs ("all", dump_file);
989 else
990 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
991 fputs (" FPR units.\n", dump_file);
992 }
993 return 0;
994 }
995
996
997 namespace {
998
999 const pass_data pass_data_stdarg =
1000 {
1001 GIMPLE_PASS, /* type */
1002 "stdarg", /* name */
1003 OPTGROUP_NONE, /* optinfo_flags */
1004 true, /* has_gate */
1005 true, /* has_execute */
1006 TV_NONE, /* tv_id */
1007 ( PROP_cfg | PROP_ssa ), /* properties_required */
1008 0, /* properties_provided */
1009 0, /* properties_destroyed */
1010 0, /* todo_flags_start */
1011 0, /* todo_flags_finish */
1012 };
1013
1014 class pass_stdarg : public gimple_opt_pass
1015 {
1016 public:
1017 pass_stdarg (gcc::context *ctxt)
1018 : gimple_opt_pass (pass_data_stdarg, ctxt)
1019 {}
1020
1021 /* opt_pass methods: */
1022 bool gate () { return gate_optimize_stdarg (); }
1023 unsigned int execute () { return execute_optimize_stdarg (); }
1024
1025 }; // class pass_stdarg
1026
1027 } // anon namespace
1028
1029 gimple_opt_pass *
1030 make_pass_stdarg (gcc::context *ctxt)
1031 {
1032 return new pass_stdarg (ctxt);
1033 }