s-rident.ads (No_Dynamic_Sized_Objects): New restriction name.
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "hard-reg-set.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "langhooks.h"
32 #include "gimple-pretty-print.h"
33 #include "target.h"
34 #include "internal-fn.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "gimplify.h"
38 #include "tree-into-ssa.h"
39 #include "tree-cfg.h"
40 #include "tree-pass.h"
41 #include "tree-stdarg.h"
42
43 /* A simple pass that attempts to optimize stdarg functions on architectures
44 that need to save register arguments to stack on entry to stdarg functions.
45 If the function doesn't use any va_start macros, no registers need to
46 be saved. If va_start macros are used, the va_list variables don't escape
47 the function, it is only necessary to save registers that will be used
48 in va_arg macros. E.g. if va_arg is only used with integral types
49 in the function, floating point registers don't need to be saved, etc. */
50
51
52 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
53 is executed at most as many times as VA_START_BB. */
54
55 static bool
56 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
57 {
58 vec<edge> stack = vNULL;
59 edge e;
60 edge_iterator ei;
61 sbitmap visited;
62 bool ret;
63
64 if (va_arg_bb == va_start_bb)
65 return true;
66
67 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
68 return false;
69
70 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
71 bitmap_clear (visited);
72 ret = true;
73
74 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
75 stack.safe_push (e);
76
77 while (! stack.is_empty ())
78 {
79 basic_block src;
80
81 e = stack.pop ();
82 src = e->src;
83
84 if (e->flags & EDGE_COMPLEX)
85 {
86 ret = false;
87 break;
88 }
89
90 if (src == va_start_bb)
91 continue;
92
93 /* va_arg_bb can be executed more times than va_start_bb. */
94 if (src == va_arg_bb)
95 {
96 ret = false;
97 break;
98 }
99
100 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
101
102 if (! bitmap_bit_p (visited, src->index))
103 {
104 bitmap_set_bit (visited, src->index);
105 FOR_EACH_EDGE (e, ei, src->preds)
106 stack.safe_push (e);
107 }
108 }
109
110 stack.release ();
111 sbitmap_free (visited);
112 return ret;
113 }
114
115
116 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
117 return constant, otherwise return HOST_WIDE_INT_M1U.
118 GPR_P is true if this is GPR counter. */
119
120 static unsigned HOST_WIDE_INT
121 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
122 bool gpr_p)
123 {
124 tree lhs, orig_lhs;
125 gimple *stmt;
126 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
127 unsigned int max_size;
128
129 if (si->offsets == NULL)
130 {
131 unsigned int i;
132
133 si->offsets = XNEWVEC (int, num_ssa_names);
134 for (i = 0; i < num_ssa_names; ++i)
135 si->offsets[i] = -1;
136 }
137
138 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
139 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
140 orig_lhs = lhs = rhs;
141 while (lhs)
142 {
143 enum tree_code rhs_code;
144 tree rhs1;
145
146 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
147 {
148 if (counter_val >= max_size)
149 {
150 ret = max_size;
151 break;
152 }
153
154 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
155 break;
156 }
157
158 stmt = SSA_NAME_DEF_STMT (lhs);
159
160 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
161 return HOST_WIDE_INT_M1U;
162
163 rhs_code = gimple_assign_rhs_code (stmt);
164 rhs1 = gimple_assign_rhs1 (stmt);
165 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
166 || gimple_assign_cast_p (stmt))
167 && TREE_CODE (rhs1) == SSA_NAME)
168 {
169 lhs = rhs1;
170 continue;
171 }
172
173 if ((rhs_code == POINTER_PLUS_EXPR
174 || rhs_code == PLUS_EXPR)
175 && TREE_CODE (rhs1) == SSA_NAME
176 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
177 {
178 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
179 lhs = rhs1;
180 continue;
181 }
182
183 if (rhs_code == ADDR_EXPR
184 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
185 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
186 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
187 {
188 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
189 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
190 continue;
191 }
192
193 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
194 return HOST_WIDE_INT_M1U;
195
196 rhs = gimple_assign_rhs1 (stmt);
197 if (TREE_CODE (counter) != TREE_CODE (rhs))
198 return HOST_WIDE_INT_M1U;
199
200 if (TREE_CODE (counter) == COMPONENT_REF)
201 {
202 if (get_base_address (counter) != get_base_address (rhs)
203 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
204 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
205 return HOST_WIDE_INT_M1U;
206 }
207 else if (counter != rhs)
208 return HOST_WIDE_INT_M1U;
209
210 lhs = NULL;
211 }
212
213 lhs = orig_lhs;
214 val = ret + counter_val;
215 while (lhs)
216 {
217 enum tree_code rhs_code;
218 tree rhs1;
219
220 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
221 break;
222
223 if (val >= max_size)
224 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
225 else
226 si->offsets[SSA_NAME_VERSION (lhs)] = val;
227
228 stmt = SSA_NAME_DEF_STMT (lhs);
229
230 rhs_code = gimple_assign_rhs_code (stmt);
231 rhs1 = gimple_assign_rhs1 (stmt);
232 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
233 || gimple_assign_cast_p (stmt))
234 && TREE_CODE (rhs1) == SSA_NAME)
235 {
236 lhs = rhs1;
237 continue;
238 }
239
240 if ((rhs_code == POINTER_PLUS_EXPR
241 || rhs_code == PLUS_EXPR)
242 && TREE_CODE (rhs1) == SSA_NAME
243 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
244 {
245 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
246 lhs = rhs1;
247 continue;
248 }
249
250 if (rhs_code == ADDR_EXPR
251 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
252 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
253 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
254 {
255 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
256 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
257 continue;
258 }
259
260 lhs = NULL;
261 }
262
263 return ret;
264 }
265
266
267 /* Called by walk_tree to look for references to va_list variables. */
268
269 static tree
270 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
271 void *data)
272 {
273 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
274 tree var = *tp;
275
276 if (TREE_CODE (var) == SSA_NAME)
277 {
278 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
279 return var;
280 }
281 else if (TREE_CODE (var) == VAR_DECL)
282 {
283 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
284 return var;
285 }
286
287 return NULL_TREE;
288 }
289
290
291 /* Helper function of va_list_counter_struct_op. Compute
292 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
293 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
294 statement. GPR_P is true if AP is a GPR counter, false if it is
295 a FPR counter. */
296
297 static void
298 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
299 bool write_p)
300 {
301 unsigned HOST_WIDE_INT increment;
302
303 if (si->compute_sizes < 0)
304 {
305 si->compute_sizes = 0;
306 if (si->va_start_count == 1
307 && reachable_at_most_once (si->bb, si->va_start_bb))
308 si->compute_sizes = 1;
309
310 if (dump_file && (dump_flags & TDF_DETAILS))
311 fprintf (dump_file,
312 "bb%d will %sbe executed at most once for each va_start "
313 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
314 si->va_start_bb->index);
315 }
316
317 if (write_p
318 && si->compute_sizes
319 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
320 {
321 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
322 {
323 cfun->va_list_gpr_size += increment;
324 return;
325 }
326
327 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
328 {
329 cfun->va_list_fpr_size += increment;
330 return;
331 }
332 }
333
334 if (write_p || !si->compute_sizes)
335 {
336 if (gpr_p)
337 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
338 else
339 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
340 }
341 }
342
343
344 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
345 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
346 is false, AP has been seen in VAR = AP assignment.
347 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
348 va_arg operation that doesn't cause the va_list variable to escape
349 current function. */
350
351 static bool
352 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
353 bool write_p)
354 {
355 tree base;
356
357 if (TREE_CODE (ap) != COMPONENT_REF
358 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
359 return false;
360
361 if (TREE_CODE (var) != SSA_NAME
362 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
363 return false;
364
365 base = get_base_address (ap);
366 if (TREE_CODE (base) != VAR_DECL
367 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
368 return false;
369
370 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
371 va_list_counter_op (si, ap, var, true, write_p);
372 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
373 va_list_counter_op (si, ap, var, false, write_p);
374
375 return true;
376 }
377
378
379 /* Check for TEM = AP. Return true if found and the caller shouldn't
380 search for va_list references in the statement. */
381
382 static bool
383 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
384 {
385 if (TREE_CODE (ap) != VAR_DECL
386 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
387 return false;
388
389 if (TREE_CODE (tem) != SSA_NAME
390 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
391 return false;
392
393 if (si->compute_sizes < 0)
394 {
395 si->compute_sizes = 0;
396 if (si->va_start_count == 1
397 && reachable_at_most_once (si->bb, si->va_start_bb))
398 si->compute_sizes = 1;
399
400 if (dump_file && (dump_flags & TDF_DETAILS))
401 fprintf (dump_file,
402 "bb%d will %sbe executed at most once for each va_start "
403 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
404 si->va_start_bb->index);
405 }
406
407 /* For void * or char * va_list types, there is just one counter.
408 If va_arg is used in a loop, we don't know how many registers need
409 saving. */
410 if (! si->compute_sizes)
411 return false;
412
413 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
414 return false;
415
416 /* Note the temporary, as we need to track whether it doesn't escape
417 the current function. */
418 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
419
420 return true;
421 }
422
423
424 /* Check for:
425 tem1 = AP;
426 TEM2 = tem1 + CST;
427 AP = TEM2;
428 sequence and update cfun->va_list_gpr_size. Return true if found. */
429
430 static bool
431 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
432 {
433 unsigned HOST_WIDE_INT increment;
434
435 if (TREE_CODE (ap) != VAR_DECL
436 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
437 return false;
438
439 if (TREE_CODE (tem2) != SSA_NAME
440 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
441 return false;
442
443 if (si->compute_sizes <= 0)
444 return false;
445
446 increment = va_list_counter_bump (si, ap, tem2, true);
447 if (increment + 1 <= 1)
448 return false;
449
450 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
451 cfun->va_list_gpr_size += increment;
452 else
453 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
454
455 return true;
456 }
457
458
459 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
460 containing value of some va_list variable plus optionally some constant,
461 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
462 depending whether LHS is a function local temporary. */
463
464 static void
465 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
466 {
467 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
468 return;
469
470 if (TREE_CODE (rhs) == SSA_NAME)
471 {
472 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
473 return;
474 }
475 else if (TREE_CODE (rhs) == ADDR_EXPR
476 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
477 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
478 {
479 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
480 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
481 return;
482 }
483 else
484 return;
485
486 if (TREE_CODE (lhs) != SSA_NAME)
487 {
488 si->va_list_escapes = true;
489 return;
490 }
491
492 if (si->compute_sizes < 0)
493 {
494 si->compute_sizes = 0;
495 if (si->va_start_count == 1
496 && reachable_at_most_once (si->bb, si->va_start_bb))
497 si->compute_sizes = 1;
498
499 if (dump_file && (dump_flags & TDF_DETAILS))
500 fprintf (dump_file,
501 "bb%d will %sbe executed at most once for each va_start "
502 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
503 si->va_start_bb->index);
504 }
505
506 /* For void * or char * va_list types, there is just one counter.
507 If va_arg is used in a loop, we don't know how many registers need
508 saving. */
509 if (! si->compute_sizes)
510 {
511 si->va_list_escapes = true;
512 return;
513 }
514
515 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
516 == HOST_WIDE_INT_M1U)
517 {
518 si->va_list_escapes = true;
519 return;
520 }
521
522 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
523 }
524
525
526 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
527 Return true if va_list might be escaping. */
528
529 static bool
530 check_all_va_list_escapes (struct stdarg_info *si)
531 {
532 basic_block bb;
533
534 FOR_EACH_BB_FN (bb, cfun)
535 {
536 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
537 gsi_next (&i))
538 {
539 tree lhs;
540 use_operand_p uop;
541 ssa_op_iter soi;
542 gphi *phi = i.phi ();
543
544 lhs = PHI_RESULT (phi);
545 if (virtual_operand_p (lhs)
546 || bitmap_bit_p (si->va_list_escape_vars,
547 SSA_NAME_VERSION (lhs)))
548 continue;
549
550 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
551 {
552 tree rhs = USE_FROM_PTR (uop);
553 if (TREE_CODE (rhs) == SSA_NAME
554 && bitmap_bit_p (si->va_list_escape_vars,
555 SSA_NAME_VERSION (rhs)))
556 {
557 if (dump_file && (dump_flags & TDF_DETAILS))
558 {
559 fputs ("va_list escapes in ", dump_file);
560 print_gimple_stmt (dump_file, phi, 0, dump_flags);
561 fputc ('\n', dump_file);
562 }
563 return true;
564 }
565 }
566 }
567
568 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
569 gsi_next (&i))
570 {
571 gimple *stmt = gsi_stmt (i);
572 tree use;
573 ssa_op_iter iter;
574
575 if (is_gimple_debug (stmt))
576 continue;
577
578 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
579 {
580 if (! bitmap_bit_p (si->va_list_escape_vars,
581 SSA_NAME_VERSION (use)))
582 continue;
583
584 if (is_gimple_assign (stmt))
585 {
586 tree rhs = gimple_assign_rhs1 (stmt);
587 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
588
589 /* x = *ap_temp; */
590 if (rhs_code == MEM_REF
591 && TREE_OPERAND (rhs, 0) == use
592 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
593 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
594 && si->offsets[SSA_NAME_VERSION (use)] != -1)
595 {
596 unsigned HOST_WIDE_INT gpr_size;
597 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
598
599 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
600 + tree_to_shwi (TREE_OPERAND (rhs, 1))
601 + tree_to_uhwi (access_size);
602 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
603 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
604 else if (gpr_size > cfun->va_list_gpr_size)
605 cfun->va_list_gpr_size = gpr_size;
606 continue;
607 }
608
609 /* va_arg sequences may contain
610 other_ap_temp = ap_temp;
611 other_ap_temp = ap_temp + constant;
612 other_ap_temp = (some_type *) ap_temp;
613 ap = ap_temp;
614 statements. */
615 if (rhs == use
616 && ((rhs_code == POINTER_PLUS_EXPR
617 && (TREE_CODE (gimple_assign_rhs2 (stmt))
618 == INTEGER_CST))
619 || gimple_assign_cast_p (stmt)
620 || (get_gimple_rhs_class (rhs_code)
621 == GIMPLE_SINGLE_RHS)))
622 {
623 tree lhs = gimple_assign_lhs (stmt);
624
625 if (TREE_CODE (lhs) == SSA_NAME
626 && bitmap_bit_p (si->va_list_escape_vars,
627 SSA_NAME_VERSION (lhs)))
628 continue;
629
630 if (TREE_CODE (lhs) == VAR_DECL
631 && bitmap_bit_p (si->va_list_vars,
632 DECL_UID (lhs) + num_ssa_names))
633 continue;
634 }
635 else if (rhs_code == ADDR_EXPR
636 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
637 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
638 {
639 tree lhs = gimple_assign_lhs (stmt);
640
641 if (bitmap_bit_p (si->va_list_escape_vars,
642 SSA_NAME_VERSION (lhs)))
643 continue;
644 }
645 }
646
647 if (dump_file && (dump_flags & TDF_DETAILS))
648 {
649 fputs ("va_list escapes in ", dump_file);
650 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
651 fputc ('\n', dump_file);
652 }
653 return true;
654 }
655 }
656 }
657
658 return false;
659 }
660
661 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
662
663 static void
664 optimize_va_list_gpr_fpr_size (function *fun)
665 {
666 basic_block bb;
667 bool va_list_escapes = false;
668 bool va_list_simple_ptr;
669 struct stdarg_info si;
670 struct walk_stmt_info wi;
671 const char *funcname = NULL;
672 tree cfun_va_list;
673
674 fun->va_list_gpr_size = 0;
675 fun->va_list_fpr_size = 0;
676 memset (&si, 0, sizeof (si));
677 si.va_list_vars = BITMAP_ALLOC (NULL);
678 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
679
680 if (dump_file)
681 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
682
683 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
684 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
685 && (TREE_TYPE (cfun_va_list) == void_type_node
686 || TREE_TYPE (cfun_va_list) == char_type_node);
687 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
688
689 FOR_EACH_BB_FN (bb, fun)
690 {
691 gimple_stmt_iterator i;
692
693 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
694 {
695 gimple *stmt = gsi_stmt (i);
696 tree callee, ap;
697
698 if (!is_gimple_call (stmt))
699 continue;
700
701 callee = gimple_call_fndecl (stmt);
702 if (!callee
703 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
704 continue;
705
706 switch (DECL_FUNCTION_CODE (callee))
707 {
708 case BUILT_IN_VA_START:
709 break;
710 /* If old style builtins are used, don't optimize anything. */
711 case BUILT_IN_SAVEREGS:
712 case BUILT_IN_NEXT_ARG:
713 va_list_escapes = true;
714 continue;
715 default:
716 continue;
717 }
718
719 si.va_start_count++;
720 ap = gimple_call_arg (stmt, 0);
721
722 if (TREE_CODE (ap) != ADDR_EXPR)
723 {
724 va_list_escapes = true;
725 break;
726 }
727 ap = TREE_OPERAND (ap, 0);
728 if (TREE_CODE (ap) == ARRAY_REF)
729 {
730 if (! integer_zerop (TREE_OPERAND (ap, 1)))
731 {
732 va_list_escapes = true;
733 break;
734 }
735 ap = TREE_OPERAND (ap, 0);
736 }
737 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
738 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
739 || TREE_CODE (ap) != VAR_DECL)
740 {
741 va_list_escapes = true;
742 break;
743 }
744
745 if (is_global_var (ap))
746 {
747 va_list_escapes = true;
748 break;
749 }
750
751 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
752
753 /* VA_START_BB and VA_START_AP will be only used if there is just
754 one va_start in the function. */
755 si.va_start_bb = bb;
756 si.va_start_ap = ap;
757 }
758
759 if (va_list_escapes)
760 break;
761 }
762
763 /* If there were no va_start uses in the function, there is no need to
764 save anything. */
765 if (si.va_start_count == 0)
766 goto finish;
767
768 /* If some va_list arguments weren't local, we can't optimize. */
769 if (va_list_escapes)
770 goto finish;
771
772 /* For void * or char * va_list, something useful can be done only
773 if there is just one va_start. */
774 if (va_list_simple_ptr && si.va_start_count > 1)
775 {
776 va_list_escapes = true;
777 goto finish;
778 }
779
780 /* For struct * va_list, if the backend didn't tell us what the counter fields
781 are, there is nothing more we can do. */
782 if (!va_list_simple_ptr
783 && va_list_gpr_counter_field == NULL_TREE
784 && va_list_fpr_counter_field == NULL_TREE)
785 {
786 va_list_escapes = true;
787 goto finish;
788 }
789
790 /* For void * or char * va_list there is just one counter
791 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
792 if (va_list_simple_ptr)
793 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
794
795 calculate_dominance_info (CDI_DOMINATORS);
796 memset (&wi, 0, sizeof (wi));
797 wi.info = si.va_list_vars;
798
799 FOR_EACH_BB_FN (bb, fun)
800 {
801 si.compute_sizes = -1;
802 si.bb = bb;
803
804 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
805 them as assignments for the purpose of escape analysis. This is
806 not needed for non-simple va_list because virtual phis don't perform
807 any real data movement. Also, check PHI nodes for taking address of
808 the va_list vars. */
809 tree lhs, rhs;
810 use_operand_p uop;
811 ssa_op_iter soi;
812
813 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
814 gsi_next (&i))
815 {
816 gphi *phi = i.phi ();
817 lhs = PHI_RESULT (phi);
818
819 if (virtual_operand_p (lhs))
820 continue;
821
822 if (va_list_simple_ptr)
823 {
824 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
825 {
826 rhs = USE_FROM_PTR (uop);
827 if (va_list_ptr_read (&si, rhs, lhs))
828 continue;
829 else if (va_list_ptr_write (&si, lhs, rhs))
830 continue;
831 else
832 check_va_list_escapes (&si, lhs, rhs);
833
834 if (si.va_list_escapes)
835 {
836 if (dump_file && (dump_flags & TDF_DETAILS))
837 {
838 fputs ("va_list escapes in ", dump_file);
839 print_gimple_stmt (dump_file, phi, 0, dump_flags);
840 fputc ('\n', dump_file);
841 }
842 va_list_escapes = true;
843 }
844 }
845 }
846
847 for (unsigned j = 0; !va_list_escapes
848 && j < gimple_phi_num_args (phi); ++j)
849 if ((!va_list_simple_ptr
850 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
851 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
852 find_va_list_reference, &wi, NULL))
853 {
854 if (dump_file && (dump_flags & TDF_DETAILS))
855 {
856 fputs ("va_list escapes in ", dump_file);
857 print_gimple_stmt (dump_file, phi, 0, dump_flags);
858 fputc ('\n', dump_file);
859 }
860 va_list_escapes = true;
861 }
862 }
863
864 for (gimple_stmt_iterator i = gsi_start_bb (bb);
865 !gsi_end_p (i) && !va_list_escapes;
866 gsi_next (&i))
867 {
868 gimple *stmt = gsi_stmt (i);
869
870 /* Don't look at __builtin_va_{start,end}, they are ok. */
871 if (is_gimple_call (stmt))
872 {
873 tree callee = gimple_call_fndecl (stmt);
874
875 if (callee
876 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
877 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
878 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
879 continue;
880 }
881
882 if (is_gimple_assign (stmt))
883 {
884 lhs = gimple_assign_lhs (stmt);
885 rhs = gimple_assign_rhs1 (stmt);
886
887 if (va_list_simple_ptr)
888 {
889 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
890 == GIMPLE_SINGLE_RHS)
891 {
892 /* Check for ap ={v} {}. */
893 if (TREE_CLOBBER_P (rhs))
894 continue;
895
896 /* Check for tem = ap. */
897 else if (va_list_ptr_read (&si, rhs, lhs))
898 continue;
899
900 /* Check for the last insn in:
901 tem1 = ap;
902 tem2 = tem1 + CST;
903 ap = tem2;
904 sequence. */
905 else if (va_list_ptr_write (&si, lhs, rhs))
906 continue;
907 }
908
909 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
910 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
911 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
912 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
913 == GIMPLE_SINGLE_RHS))
914 check_va_list_escapes (&si, lhs, rhs);
915 }
916 else
917 {
918 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
919 == GIMPLE_SINGLE_RHS)
920 {
921 /* Check for ap ={v} {}. */
922 if (TREE_CLOBBER_P (rhs))
923 continue;
924
925 /* Check for ap[0].field = temp. */
926 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
927 continue;
928
929 /* Check for temp = ap[0].field. */
930 else if (va_list_counter_struct_op (&si, rhs, lhs,
931 false))
932 continue;
933 }
934
935 /* Do any architecture specific checking. */
936 if (targetm.stdarg_optimize_hook
937 && targetm.stdarg_optimize_hook (&si, stmt))
938 continue;
939 }
940 }
941 else if (is_gimple_debug (stmt))
942 continue;
943
944 /* All other uses of va_list are either va_copy (that is not handled
945 in this optimization), taking address of va_list variable or
946 passing va_list to other functions (in that case va_list might
947 escape the function and therefore va_start needs to set it up
948 fully), or some unexpected use of va_list. None of these should
949 happen in a gimplified VA_ARG_EXPR. */
950 if (si.va_list_escapes
951 || walk_gimple_op (stmt, find_va_list_reference, &wi))
952 {
953 if (dump_file && (dump_flags & TDF_DETAILS))
954 {
955 fputs ("va_list escapes in ", dump_file);
956 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
957 fputc ('\n', dump_file);
958 }
959 va_list_escapes = true;
960 }
961 }
962
963 if (va_list_escapes)
964 break;
965 }
966
967 if (! va_list_escapes
968 && va_list_simple_ptr
969 && ! bitmap_empty_p (si.va_list_escape_vars)
970 && check_all_va_list_escapes (&si))
971 va_list_escapes = true;
972
973 finish:
974 if (va_list_escapes)
975 {
976 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
977 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
978 }
979 BITMAP_FREE (si.va_list_vars);
980 BITMAP_FREE (si.va_list_escape_vars);
981 free (si.offsets);
982 if (dump_file)
983 {
984 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
985 funcname, (int) va_list_escapes);
986 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
987 fputs ("all", dump_file);
988 else
989 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
990 fputs (" GPR units and ", dump_file);
991 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
992 fputs ("all", dump_file);
993 else
994 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
995 fputs (" FPR units.\n", dump_file);
996 }
997 }
998
999 /* Return true if STMT is IFN_VA_ARG. */
1000
1001 static bool
1002 gimple_call_ifn_va_arg_p (gimple *stmt)
1003 {
1004 return (is_gimple_call (stmt)
1005 && gimple_call_internal_p (stmt)
1006 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1007 }
1008
1009 /* Expand IFN_VA_ARGs in FUN. */
1010
1011 static void
1012 expand_ifn_va_arg_1 (function *fun)
1013 {
1014 bool modified = false;
1015 basic_block bb;
1016 gimple_stmt_iterator i;
1017 location_t saved_location;
1018
1019 FOR_EACH_BB_FN (bb, fun)
1020 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1021 {
1022 gimple *stmt = gsi_stmt (i);
1023 tree ap, expr, lhs, type;
1024 gimple_seq pre = NULL, post = NULL;
1025
1026 if (!gimple_call_ifn_va_arg_p (stmt))
1027 continue;
1028
1029 modified = true;
1030
1031 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1032 ap = gimple_call_arg (stmt, 0);
1033
1034 /* Balanced out the &ap, usually added by build_va_arg. */
1035 ap = build_fold_indirect_ref (ap);
1036
1037 push_gimplify_context (false);
1038 saved_location = input_location;
1039 input_location = gimple_location (stmt);
1040
1041 /* Make it easier for the backends by protecting the valist argument
1042 from multiple evaluations. */
1043 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1044
1045 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1046
1047 lhs = gimple_call_lhs (stmt);
1048 if (lhs != NULL_TREE)
1049 {
1050 unsigned int nargs = gimple_call_num_args (stmt);
1051 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1052
1053 if (nargs == 3)
1054 {
1055 /* We've transported the size of with WITH_SIZE_EXPR here as
1056 the last argument of the internal fn call. Now reinstate
1057 it. */
1058 tree size = gimple_call_arg (stmt, nargs - 1);
1059 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1060 }
1061
1062 /* We use gimplify_assign here, rather than gimple_build_assign,
1063 because gimple_assign knows how to deal with variable-sized
1064 types. */
1065 gimplify_assign (lhs, expr, &pre);
1066 }
1067 else
1068 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
1069
1070 input_location = saved_location;
1071 pop_gimplify_context (NULL);
1072
1073 gimple_seq_add_seq (&pre, post);
1074 update_modified_stmts (pre);
1075
1076 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1077 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1078 inbetween. */
1079 gimple_find_sub_bbs (pre, &i);
1080
1081 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1082 bb. */
1083 unlink_stmt_vdef (stmt);
1084 release_ssa_name_fn (fun, gimple_vdef (stmt));
1085 gsi_remove (&i, true);
1086 gcc_assert (gsi_end_p (i));
1087
1088 /* We're walking here into the bbs which contain the expansion of
1089 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1090 expanding. We could try to skip walking these bbs, perhaps by
1091 walking backwards over gimples and bbs. */
1092 break;
1093 }
1094
1095 if (!modified)
1096 return;
1097
1098 free_dominance_info (CDI_DOMINATORS);
1099 update_ssa (TODO_update_ssa);
1100 }
1101
1102 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1103
1104 static void
1105 expand_ifn_va_arg (function *fun)
1106 {
1107 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1108 expand_ifn_va_arg_1 (fun);
1109
1110 #if ENABLE_CHECKING
1111 basic_block bb;
1112 gimple_stmt_iterator i;
1113 FOR_EACH_BB_FN (bb, fun)
1114 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1115 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1116 #endif
1117 }
1118
1119 namespace {
1120
1121 const pass_data pass_data_stdarg =
1122 {
1123 GIMPLE_PASS, /* type */
1124 "stdarg", /* name */
1125 OPTGROUP_NONE, /* optinfo_flags */
1126 TV_NONE, /* tv_id */
1127 ( PROP_cfg | PROP_ssa ), /* properties_required */
1128 PROP_gimple_lva, /* properties_provided */
1129 0, /* properties_destroyed */
1130 0, /* todo_flags_start */
1131 0, /* todo_flags_finish */
1132 };
1133
1134 class pass_stdarg : public gimple_opt_pass
1135 {
1136 public:
1137 pass_stdarg (gcc::context *ctxt)
1138 : gimple_opt_pass (pass_data_stdarg, ctxt)
1139 {}
1140
1141 /* opt_pass methods: */
1142 virtual bool gate (function *)
1143 {
1144 /* Always run this pass, in order to expand va_arg internal_fns. We
1145 also need to do that if fun->stdarg == 0, because a va_arg may also
1146 occur in a function without varargs, f.i. if when passing a va_list to
1147 another function. */
1148 return true;
1149 }
1150
1151 virtual unsigned int execute (function *);
1152
1153 }; // class pass_stdarg
1154
1155 unsigned int
1156 pass_stdarg::execute (function *fun)
1157 {
1158 /* TODO: Postpone expand_ifn_va_arg till after
1159 optimize_va_list_gpr_fpr_size. */
1160 expand_ifn_va_arg (fun);
1161
1162 if (flag_stdarg_opt
1163 /* This optimization is only for stdarg functions. */
1164 && fun->stdarg != 0)
1165 optimize_va_list_gpr_fpr_size (fun);
1166
1167 return 0;
1168 }
1169
1170 } // anon namespace
1171
1172 gimple_opt_pass *
1173 make_pass_stdarg (gcc::context *ctxt)
1174 {
1175 return new pass_stdarg (ctxt);
1176 }
1177
1178 namespace {
1179
1180 const pass_data pass_data_lower_vaarg =
1181 {
1182 GIMPLE_PASS, /* type */
1183 "lower_vaarg", /* name */
1184 OPTGROUP_NONE, /* optinfo_flags */
1185 TV_NONE, /* tv_id */
1186 ( PROP_cfg | PROP_ssa ), /* properties_required */
1187 PROP_gimple_lva, /* properties_provided */
1188 0, /* properties_destroyed */
1189 0, /* todo_flags_start */
1190 0, /* todo_flags_finish */
1191 };
1192
1193 class pass_lower_vaarg : public gimple_opt_pass
1194 {
1195 public:
1196 pass_lower_vaarg (gcc::context *ctxt)
1197 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1198 {}
1199
1200 /* opt_pass methods: */
1201 virtual bool gate (function *)
1202 {
1203 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1204 }
1205
1206 virtual unsigned int execute (function *);
1207
1208 }; // class pass_lower_vaarg
1209
1210 unsigned int
1211 pass_lower_vaarg::execute (function *fun)
1212 {
1213 expand_ifn_va_arg (fun);
1214 return 0;
1215 }
1216
1217 } // anon namespace
1218
1219 gimple_opt_pass *
1220 make_pass_lower_vaarg (gcc::context *ctxt)
1221 {
1222 return new pass_lower_vaarg (ctxt);
1223 }