See <https://gcc.gnu.org/ml/gcc-patches/2015-05/msg01977.html> for
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "langhooks.h"
40 #include "gimple-pretty-print.h"
41 #include "target.h"
42 #include "bitmap.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimple-walk.h"
54 #include "gimple-ssa.h"
55 #include "gimplify.h"
56 #include "tree-phinodes.h"
57 #include "ssa-iterators.h"
58 #include "stringpool.h"
59 #include "tree-ssanames.h"
60 #include "tree-into-ssa.h"
61 #include "sbitmap.h"
62 #include "tree-cfg.h"
63 #include "tree-pass.h"
64 #include "tree-stdarg.h"
65
66 /* A simple pass that attempts to optimize stdarg functions on architectures
67 that need to save register arguments to stack on entry to stdarg functions.
68 If the function doesn't use any va_start macros, no registers need to
69 be saved. If va_start macros are used, the va_list variables don't escape
70 the function, it is only necessary to save registers that will be used
71 in va_arg macros. E.g. if va_arg is only used with integral types
72 in the function, floating point registers don't need to be saved, etc. */
73
74
75 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
76 is executed at most as many times as VA_START_BB. */
77
78 static bool
79 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
80 {
81 vec<edge> stack = vNULL;
82 edge e;
83 edge_iterator ei;
84 sbitmap visited;
85 bool ret;
86
87 if (va_arg_bb == va_start_bb)
88 return true;
89
90 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
91 return false;
92
93 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
94 bitmap_clear (visited);
95 ret = true;
96
97 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
98 stack.safe_push (e);
99
100 while (! stack.is_empty ())
101 {
102 basic_block src;
103
104 e = stack.pop ();
105 src = e->src;
106
107 if (e->flags & EDGE_COMPLEX)
108 {
109 ret = false;
110 break;
111 }
112
113 if (src == va_start_bb)
114 continue;
115
116 /* va_arg_bb can be executed more times than va_start_bb. */
117 if (src == va_arg_bb)
118 {
119 ret = false;
120 break;
121 }
122
123 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
124
125 if (! bitmap_bit_p (visited, src->index))
126 {
127 bitmap_set_bit (visited, src->index);
128 FOR_EACH_EDGE (e, ei, src->preds)
129 stack.safe_push (e);
130 }
131 }
132
133 stack.release ();
134 sbitmap_free (visited);
135 return ret;
136 }
137
138
139 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
140 return constant, otherwise return HOST_WIDE_INT_M1U.
141 GPR_P is true if this is GPR counter. */
142
143 static unsigned HOST_WIDE_INT
144 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
145 bool gpr_p)
146 {
147 tree lhs, orig_lhs;
148 gimple stmt;
149 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
150 unsigned int max_size;
151
152 if (si->offsets == NULL)
153 {
154 unsigned int i;
155
156 si->offsets = XNEWVEC (int, num_ssa_names);
157 for (i = 0; i < num_ssa_names; ++i)
158 si->offsets[i] = -1;
159 }
160
161 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
162 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
163 orig_lhs = lhs = rhs;
164 while (lhs)
165 {
166 enum tree_code rhs_code;
167 tree rhs1;
168
169 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
170 {
171 if (counter_val >= max_size)
172 {
173 ret = max_size;
174 break;
175 }
176
177 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
178 break;
179 }
180
181 stmt = SSA_NAME_DEF_STMT (lhs);
182
183 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
184 return HOST_WIDE_INT_M1U;
185
186 rhs_code = gimple_assign_rhs_code (stmt);
187 rhs1 = gimple_assign_rhs1 (stmt);
188 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
189 || gimple_assign_cast_p (stmt))
190 && TREE_CODE (rhs1) == SSA_NAME)
191 {
192 lhs = rhs1;
193 continue;
194 }
195
196 if ((rhs_code == POINTER_PLUS_EXPR
197 || rhs_code == PLUS_EXPR)
198 && TREE_CODE (rhs1) == SSA_NAME
199 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
200 {
201 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
202 lhs = rhs1;
203 continue;
204 }
205
206 if (rhs_code == ADDR_EXPR
207 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
208 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
209 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
210 {
211 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
212 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
213 continue;
214 }
215
216 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
217 return HOST_WIDE_INT_M1U;
218
219 rhs = gimple_assign_rhs1 (stmt);
220 if (TREE_CODE (counter) != TREE_CODE (rhs))
221 return HOST_WIDE_INT_M1U;
222
223 if (TREE_CODE (counter) == COMPONENT_REF)
224 {
225 if (get_base_address (counter) != get_base_address (rhs)
226 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
227 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
228 return HOST_WIDE_INT_M1U;
229 }
230 else if (counter != rhs)
231 return HOST_WIDE_INT_M1U;
232
233 lhs = NULL;
234 }
235
236 lhs = orig_lhs;
237 val = ret + counter_val;
238 while (lhs)
239 {
240 enum tree_code rhs_code;
241 tree rhs1;
242
243 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
244 break;
245
246 if (val >= max_size)
247 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
248 else
249 si->offsets[SSA_NAME_VERSION (lhs)] = val;
250
251 stmt = SSA_NAME_DEF_STMT (lhs);
252
253 rhs_code = gimple_assign_rhs_code (stmt);
254 rhs1 = gimple_assign_rhs1 (stmt);
255 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
256 || gimple_assign_cast_p (stmt))
257 && TREE_CODE (rhs1) == SSA_NAME)
258 {
259 lhs = rhs1;
260 continue;
261 }
262
263 if ((rhs_code == POINTER_PLUS_EXPR
264 || rhs_code == PLUS_EXPR)
265 && TREE_CODE (rhs1) == SSA_NAME
266 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
267 {
268 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
269 lhs = rhs1;
270 continue;
271 }
272
273 if (rhs_code == ADDR_EXPR
274 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
275 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
276 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
277 {
278 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
279 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
280 continue;
281 }
282
283 lhs = NULL;
284 }
285
286 return ret;
287 }
288
289
290 /* Called by walk_tree to look for references to va_list variables. */
291
292 static tree
293 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
294 void *data)
295 {
296 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
297 tree var = *tp;
298
299 if (TREE_CODE (var) == SSA_NAME)
300 {
301 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
302 return var;
303 }
304 else if (TREE_CODE (var) == VAR_DECL)
305 {
306 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
307 return var;
308 }
309
310 return NULL_TREE;
311 }
312
313
314 /* Helper function of va_list_counter_struct_op. Compute
315 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
316 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
317 statement. GPR_P is true if AP is a GPR counter, false if it is
318 a FPR counter. */
319
320 static void
321 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
322 bool write_p)
323 {
324 unsigned HOST_WIDE_INT increment;
325
326 if (si->compute_sizes < 0)
327 {
328 si->compute_sizes = 0;
329 if (si->va_start_count == 1
330 && reachable_at_most_once (si->bb, si->va_start_bb))
331 si->compute_sizes = 1;
332
333 if (dump_file && (dump_flags & TDF_DETAILS))
334 fprintf (dump_file,
335 "bb%d will %sbe executed at most once for each va_start "
336 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
337 si->va_start_bb->index);
338 }
339
340 if (write_p
341 && si->compute_sizes
342 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
343 {
344 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
345 {
346 cfun->va_list_gpr_size += increment;
347 return;
348 }
349
350 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
351 {
352 cfun->va_list_fpr_size += increment;
353 return;
354 }
355 }
356
357 if (write_p || !si->compute_sizes)
358 {
359 if (gpr_p)
360 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
361 else
362 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
363 }
364 }
365
366
367 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
368 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
369 is false, AP has been seen in VAR = AP assignment.
370 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
371 va_arg operation that doesn't cause the va_list variable to escape
372 current function. */
373
374 static bool
375 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
376 bool write_p)
377 {
378 tree base;
379
380 if (TREE_CODE (ap) != COMPONENT_REF
381 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
382 return false;
383
384 if (TREE_CODE (var) != SSA_NAME
385 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
386 return false;
387
388 base = get_base_address (ap);
389 if (TREE_CODE (base) != VAR_DECL
390 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
391 return false;
392
393 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
394 va_list_counter_op (si, ap, var, true, write_p);
395 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
396 va_list_counter_op (si, ap, var, false, write_p);
397
398 return true;
399 }
400
401
402 /* Check for TEM = AP. Return true if found and the caller shouldn't
403 search for va_list references in the statement. */
404
405 static bool
406 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
407 {
408 if (TREE_CODE (ap) != VAR_DECL
409 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
410 return false;
411
412 if (TREE_CODE (tem) != SSA_NAME
413 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
414 return false;
415
416 if (si->compute_sizes < 0)
417 {
418 si->compute_sizes = 0;
419 if (si->va_start_count == 1
420 && reachable_at_most_once (si->bb, si->va_start_bb))
421 si->compute_sizes = 1;
422
423 if (dump_file && (dump_flags & TDF_DETAILS))
424 fprintf (dump_file,
425 "bb%d will %sbe executed at most once for each va_start "
426 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
427 si->va_start_bb->index);
428 }
429
430 /* For void * or char * va_list types, there is just one counter.
431 If va_arg is used in a loop, we don't know how many registers need
432 saving. */
433 if (! si->compute_sizes)
434 return false;
435
436 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
437 return false;
438
439 /* Note the temporary, as we need to track whether it doesn't escape
440 the current function. */
441 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
442
443 return true;
444 }
445
446
447 /* Check for:
448 tem1 = AP;
449 TEM2 = tem1 + CST;
450 AP = TEM2;
451 sequence and update cfun->va_list_gpr_size. Return true if found. */
452
453 static bool
454 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
455 {
456 unsigned HOST_WIDE_INT increment;
457
458 if (TREE_CODE (ap) != VAR_DECL
459 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
460 return false;
461
462 if (TREE_CODE (tem2) != SSA_NAME
463 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
464 return false;
465
466 if (si->compute_sizes <= 0)
467 return false;
468
469 increment = va_list_counter_bump (si, ap, tem2, true);
470 if (increment + 1 <= 1)
471 return false;
472
473 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
474 cfun->va_list_gpr_size += increment;
475 else
476 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
477
478 return true;
479 }
480
481
482 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
483 containing value of some va_list variable plus optionally some constant,
484 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
485 depending whether LHS is a function local temporary. */
486
487 static void
488 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
489 {
490 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
491 return;
492
493 if (TREE_CODE (rhs) == SSA_NAME)
494 {
495 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
496 return;
497 }
498 else if (TREE_CODE (rhs) == ADDR_EXPR
499 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
500 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
501 {
502 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
503 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
504 return;
505 }
506 else
507 return;
508
509 if (TREE_CODE (lhs) != SSA_NAME)
510 {
511 si->va_list_escapes = true;
512 return;
513 }
514
515 if (si->compute_sizes < 0)
516 {
517 si->compute_sizes = 0;
518 if (si->va_start_count == 1
519 && reachable_at_most_once (si->bb, si->va_start_bb))
520 si->compute_sizes = 1;
521
522 if (dump_file && (dump_flags & TDF_DETAILS))
523 fprintf (dump_file,
524 "bb%d will %sbe executed at most once for each va_start "
525 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
526 si->va_start_bb->index);
527 }
528
529 /* For void * or char * va_list types, there is just one counter.
530 If va_arg is used in a loop, we don't know how many registers need
531 saving. */
532 if (! si->compute_sizes)
533 {
534 si->va_list_escapes = true;
535 return;
536 }
537
538 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
539 == HOST_WIDE_INT_M1U)
540 {
541 si->va_list_escapes = true;
542 return;
543 }
544
545 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
546 }
547
548
549 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
550 Return true if va_list might be escaping. */
551
552 static bool
553 check_all_va_list_escapes (struct stdarg_info *si)
554 {
555 basic_block bb;
556
557 FOR_EACH_BB_FN (bb, cfun)
558 {
559 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
560 gsi_next (&i))
561 {
562 tree lhs;
563 use_operand_p uop;
564 ssa_op_iter soi;
565 gphi *phi = i.phi ();
566
567 lhs = PHI_RESULT (phi);
568 if (virtual_operand_p (lhs)
569 || bitmap_bit_p (si->va_list_escape_vars,
570 SSA_NAME_VERSION (lhs)))
571 continue;
572
573 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
574 {
575 tree rhs = USE_FROM_PTR (uop);
576 if (TREE_CODE (rhs) == SSA_NAME
577 && bitmap_bit_p (si->va_list_escape_vars,
578 SSA_NAME_VERSION (rhs)))
579 {
580 if (dump_file && (dump_flags & TDF_DETAILS))
581 {
582 fputs ("va_list escapes in ", dump_file);
583 print_gimple_stmt (dump_file, phi, 0, dump_flags);
584 fputc ('\n', dump_file);
585 }
586 return true;
587 }
588 }
589 }
590
591 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
592 gsi_next (&i))
593 {
594 gimple stmt = gsi_stmt (i);
595 tree use;
596 ssa_op_iter iter;
597
598 if (is_gimple_debug (stmt))
599 continue;
600
601 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
602 {
603 if (! bitmap_bit_p (si->va_list_escape_vars,
604 SSA_NAME_VERSION (use)))
605 continue;
606
607 if (is_gimple_assign (stmt))
608 {
609 tree rhs = gimple_assign_rhs1 (stmt);
610 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
611
612 /* x = *ap_temp; */
613 if (rhs_code == MEM_REF
614 && TREE_OPERAND (rhs, 0) == use
615 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
616 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
617 && si->offsets[SSA_NAME_VERSION (use)] != -1)
618 {
619 unsigned HOST_WIDE_INT gpr_size;
620 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
621
622 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
623 + tree_to_shwi (TREE_OPERAND (rhs, 1))
624 + tree_to_uhwi (access_size);
625 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
626 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
627 else if (gpr_size > cfun->va_list_gpr_size)
628 cfun->va_list_gpr_size = gpr_size;
629 continue;
630 }
631
632 /* va_arg sequences may contain
633 other_ap_temp = ap_temp;
634 other_ap_temp = ap_temp + constant;
635 other_ap_temp = (some_type *) ap_temp;
636 ap = ap_temp;
637 statements. */
638 if (rhs == use
639 && ((rhs_code == POINTER_PLUS_EXPR
640 && (TREE_CODE (gimple_assign_rhs2 (stmt))
641 == INTEGER_CST))
642 || gimple_assign_cast_p (stmt)
643 || (get_gimple_rhs_class (rhs_code)
644 == GIMPLE_SINGLE_RHS)))
645 {
646 tree lhs = gimple_assign_lhs (stmt);
647
648 if (TREE_CODE (lhs) == SSA_NAME
649 && bitmap_bit_p (si->va_list_escape_vars,
650 SSA_NAME_VERSION (lhs)))
651 continue;
652
653 if (TREE_CODE (lhs) == VAR_DECL
654 && bitmap_bit_p (si->va_list_vars,
655 DECL_UID (lhs) + num_ssa_names))
656 continue;
657 }
658 else if (rhs_code == ADDR_EXPR
659 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
660 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
661 {
662 tree lhs = gimple_assign_lhs (stmt);
663
664 if (bitmap_bit_p (si->va_list_escape_vars,
665 SSA_NAME_VERSION (lhs)))
666 continue;
667 }
668 }
669
670 if (dump_file && (dump_flags & TDF_DETAILS))
671 {
672 fputs ("va_list escapes in ", dump_file);
673 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
674 fputc ('\n', dump_file);
675 }
676 return true;
677 }
678 }
679 }
680
681 return false;
682 }
683
684 /* Optimize FUN->va_list_gpr_size and FUN->va_list_fpr_size. */
685
686 static void
687 optimize_va_list_gpr_fpr_size (function *fun)
688 {
689 basic_block bb;
690 bool va_list_escapes = false;
691 bool va_list_simple_ptr;
692 struct stdarg_info si;
693 struct walk_stmt_info wi;
694 const char *funcname = NULL;
695 tree cfun_va_list;
696
697 fun->va_list_gpr_size = 0;
698 fun->va_list_fpr_size = 0;
699 memset (&si, 0, sizeof (si));
700 si.va_list_vars = BITMAP_ALLOC (NULL);
701 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
702
703 if (dump_file)
704 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
705
706 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
707 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
708 && (TREE_TYPE (cfun_va_list) == void_type_node
709 || TREE_TYPE (cfun_va_list) == char_type_node);
710 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
711
712 FOR_EACH_BB_FN (bb, fun)
713 {
714 gimple_stmt_iterator i;
715
716 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
717 {
718 gimple stmt = gsi_stmt (i);
719 tree callee, ap;
720
721 if (!is_gimple_call (stmt))
722 continue;
723
724 callee = gimple_call_fndecl (stmt);
725 if (!callee
726 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
727 continue;
728
729 switch (DECL_FUNCTION_CODE (callee))
730 {
731 case BUILT_IN_VA_START:
732 break;
733 /* If old style builtins are used, don't optimize anything. */
734 case BUILT_IN_SAVEREGS:
735 case BUILT_IN_NEXT_ARG:
736 va_list_escapes = true;
737 continue;
738 default:
739 continue;
740 }
741
742 si.va_start_count++;
743 ap = gimple_call_arg (stmt, 0);
744
745 if (TREE_CODE (ap) != ADDR_EXPR)
746 {
747 va_list_escapes = true;
748 break;
749 }
750 ap = TREE_OPERAND (ap, 0);
751 if (TREE_CODE (ap) == ARRAY_REF)
752 {
753 if (! integer_zerop (TREE_OPERAND (ap, 1)))
754 {
755 va_list_escapes = true;
756 break;
757 }
758 ap = TREE_OPERAND (ap, 0);
759 }
760 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
761 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
762 || TREE_CODE (ap) != VAR_DECL)
763 {
764 va_list_escapes = true;
765 break;
766 }
767
768 if (is_global_var (ap))
769 {
770 va_list_escapes = true;
771 break;
772 }
773
774 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
775
776 /* VA_START_BB and VA_START_AP will be only used if there is just
777 one va_start in the function. */
778 si.va_start_bb = bb;
779 si.va_start_ap = ap;
780 }
781
782 if (va_list_escapes)
783 break;
784 }
785
786 /* If there were no va_start uses in the function, there is no need to
787 save anything. */
788 if (si.va_start_count == 0)
789 goto finish;
790
791 /* If some va_list arguments weren't local, we can't optimize. */
792 if (va_list_escapes)
793 goto finish;
794
795 /* For void * or char * va_list, something useful can be done only
796 if there is just one va_start. */
797 if (va_list_simple_ptr && si.va_start_count > 1)
798 {
799 va_list_escapes = true;
800 goto finish;
801 }
802
803 /* For struct * va_list, if the backend didn't tell us what the counter fields
804 are, there is nothing more we can do. */
805 if (!va_list_simple_ptr
806 && va_list_gpr_counter_field == NULL_TREE
807 && va_list_fpr_counter_field == NULL_TREE)
808 {
809 va_list_escapes = true;
810 goto finish;
811 }
812
813 /* For void * or char * va_list there is just one counter
814 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
815 if (va_list_simple_ptr)
816 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
817
818 calculate_dominance_info (CDI_DOMINATORS);
819 memset (&wi, 0, sizeof (wi));
820 wi.info = si.va_list_vars;
821
822 FOR_EACH_BB_FN (bb, fun)
823 {
824 si.compute_sizes = -1;
825 si.bb = bb;
826
827 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
828 them as assignments for the purpose of escape analysis. This is
829 not needed for non-simple va_list because virtual phis don't perform
830 any real data movement. Also, check PHI nodes for taking address of
831 the va_list vars. */
832 tree lhs, rhs;
833 use_operand_p uop;
834 ssa_op_iter soi;
835
836 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
837 gsi_next (&i))
838 {
839 gphi *phi = i.phi ();
840 lhs = PHI_RESULT (phi);
841
842 if (virtual_operand_p (lhs))
843 continue;
844
845 if (va_list_simple_ptr)
846 {
847 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
848 {
849 rhs = USE_FROM_PTR (uop);
850 if (va_list_ptr_read (&si, rhs, lhs))
851 continue;
852 else if (va_list_ptr_write (&si, lhs, rhs))
853 continue;
854 else
855 check_va_list_escapes (&si, lhs, rhs);
856
857 if (si.va_list_escapes)
858 {
859 if (dump_file && (dump_flags & TDF_DETAILS))
860 {
861 fputs ("va_list escapes in ", dump_file);
862 print_gimple_stmt (dump_file, phi, 0, dump_flags);
863 fputc ('\n', dump_file);
864 }
865 va_list_escapes = true;
866 }
867 }
868 }
869
870 for (unsigned j = 0; !va_list_escapes
871 && j < gimple_phi_num_args (phi); ++j)
872 if ((!va_list_simple_ptr
873 || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
874 && walk_tree (gimple_phi_arg_def_ptr (phi, j),
875 find_va_list_reference, &wi, NULL))
876 {
877 if (dump_file && (dump_flags & TDF_DETAILS))
878 {
879 fputs ("va_list escapes in ", dump_file);
880 print_gimple_stmt (dump_file, phi, 0, dump_flags);
881 fputc ('\n', dump_file);
882 }
883 va_list_escapes = true;
884 }
885 }
886
887 for (gimple_stmt_iterator i = gsi_start_bb (bb);
888 !gsi_end_p (i) && !va_list_escapes;
889 gsi_next (&i))
890 {
891 gimple stmt = gsi_stmt (i);
892
893 /* Don't look at __builtin_va_{start,end}, they are ok. */
894 if (is_gimple_call (stmt))
895 {
896 tree callee = gimple_call_fndecl (stmt);
897
898 if (callee
899 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
900 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
901 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
902 continue;
903 }
904
905 if (is_gimple_assign (stmt))
906 {
907 lhs = gimple_assign_lhs (stmt);
908 rhs = gimple_assign_rhs1 (stmt);
909
910 if (va_list_simple_ptr)
911 {
912 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
913 == GIMPLE_SINGLE_RHS)
914 {
915 /* Check for ap ={v} {}. */
916 if (TREE_CLOBBER_P (rhs))
917 continue;
918
919 /* Check for tem = ap. */
920 else if (va_list_ptr_read (&si, rhs, lhs))
921 continue;
922
923 /* Check for the last insn in:
924 tem1 = ap;
925 tem2 = tem1 + CST;
926 ap = tem2;
927 sequence. */
928 else if (va_list_ptr_write (&si, lhs, rhs))
929 continue;
930 }
931
932 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
933 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
934 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
935 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
936 == GIMPLE_SINGLE_RHS))
937 check_va_list_escapes (&si, lhs, rhs);
938 }
939 else
940 {
941 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
942 == GIMPLE_SINGLE_RHS)
943 {
944 /* Check for ap ={v} {}. */
945 if (TREE_CLOBBER_P (rhs))
946 continue;
947
948 /* Check for ap[0].field = temp. */
949 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
950 continue;
951
952 /* Check for temp = ap[0].field. */
953 else if (va_list_counter_struct_op (&si, rhs, lhs,
954 false))
955 continue;
956 }
957
958 /* Do any architecture specific checking. */
959 if (targetm.stdarg_optimize_hook
960 && targetm.stdarg_optimize_hook (&si, stmt))
961 continue;
962 }
963 }
964 else if (is_gimple_debug (stmt))
965 continue;
966
967 /* All other uses of va_list are either va_copy (that is not handled
968 in this optimization), taking address of va_list variable or
969 passing va_list to other functions (in that case va_list might
970 escape the function and therefore va_start needs to set it up
971 fully), or some unexpected use of va_list. None of these should
972 happen in a gimplified VA_ARG_EXPR. */
973 if (si.va_list_escapes
974 || walk_gimple_op (stmt, find_va_list_reference, &wi))
975 {
976 if (dump_file && (dump_flags & TDF_DETAILS))
977 {
978 fputs ("va_list escapes in ", dump_file);
979 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
980 fputc ('\n', dump_file);
981 }
982 va_list_escapes = true;
983 }
984 }
985
986 if (va_list_escapes)
987 break;
988 }
989
990 if (! va_list_escapes
991 && va_list_simple_ptr
992 && ! bitmap_empty_p (si.va_list_escape_vars)
993 && check_all_va_list_escapes (&si))
994 va_list_escapes = true;
995
996 finish:
997 if (va_list_escapes)
998 {
999 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1000 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1001 }
1002 BITMAP_FREE (si.va_list_vars);
1003 BITMAP_FREE (si.va_list_escape_vars);
1004 free (si.offsets);
1005 if (dump_file)
1006 {
1007 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1008 funcname, (int) va_list_escapes);
1009 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1010 fputs ("all", dump_file);
1011 else
1012 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1013 fputs (" GPR units and ", dump_file);
1014 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1015 fputs ("all", dump_file);
1016 else
1017 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1018 fputs (" FPR units.\n", dump_file);
1019 }
1020 }
1021
1022 /* Return true if STMT is IFN_VA_ARG. */
1023
1024 static bool
1025 gimple_call_ifn_va_arg_p (gimple stmt)
1026 {
1027 return (is_gimple_call (stmt)
1028 && gimple_call_internal_p (stmt)
1029 && gimple_call_internal_fn (stmt) == IFN_VA_ARG);
1030 }
1031
1032 /* Expand IFN_VA_ARGs in FUN. */
1033
1034 static void
1035 expand_ifn_va_arg_1 (function *fun)
1036 {
1037 bool modified = false;
1038 basic_block bb;
1039 gimple_stmt_iterator i;
1040
1041 FOR_EACH_BB_FN (bb, fun)
1042 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1043 {
1044 gimple stmt = gsi_stmt (i);
1045 tree ap, expr, lhs, type;
1046 gimple_seq pre = NULL, post = NULL;
1047
1048 if (!gimple_call_ifn_va_arg_p (stmt))
1049 continue;
1050
1051 modified = true;
1052
1053 type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 1)));
1054 ap = gimple_call_arg (stmt, 0);
1055
1056 /* Balanced out the &ap, usually added by build_va_arg. */
1057 ap = build_fold_indirect_ref (ap);
1058
1059 push_gimplify_context (false);
1060
1061 /* Make it easier for the backends by protecting the valist argument
1062 from multiple evaluations. */
1063 gimplify_expr (&ap, &pre, &post, is_gimple_min_lval, fb_lvalue);
1064
1065 expr = targetm.gimplify_va_arg_expr (ap, type, &pre, &post);
1066
1067 lhs = gimple_call_lhs (stmt);
1068 if (lhs != NULL_TREE)
1069 {
1070 unsigned int nargs = gimple_call_num_args (stmt);
1071 gcc_assert (useless_type_conversion_p (TREE_TYPE (lhs), type));
1072
1073 if (nargs == 3)
1074 {
1075 /* We've transported the size of with WITH_SIZE_EXPR here as
1076 the last argument of the internal fn call. Now reinstate
1077 it. */
1078 tree size = gimple_call_arg (stmt, nargs - 1);
1079 expr = build2 (WITH_SIZE_EXPR, TREE_TYPE (expr), expr, size);
1080 }
1081
1082 /* We use gimplify_assign here, rather than gimple_build_assign,
1083 because gimple_assign knows how to deal with variable-sized
1084 types. */
1085 gimplify_assign (lhs, expr, &pre);
1086 }
1087 else
1088 gimplify_expr (&expr, &pre, &post, is_gimple_lvalue, fb_lvalue);
1089
1090 pop_gimplify_context (NULL);
1091
1092 gimple_seq_add_seq (&pre, post);
1093 update_modified_stmts (pre);
1094
1095 /* Add the sequence after IFN_VA_ARG. This splits the bb right
1096 after IFN_VA_ARG, and adds the sequence in one or more new bbs
1097 inbetween. */
1098 gimple_find_sub_bbs (pre, &i);
1099
1100 /* Remove the IFN_VA_ARG gimple_call. It's the last stmt in the
1101 bb. */
1102 gsi_remove (&i, true);
1103 gcc_assert (gsi_end_p (i));
1104
1105 /* We're walking here into the bbs which contain the expansion of
1106 IFN_VA_ARG, and will not contain another IFN_VA_ARG that needs
1107 expanding. We could try to skip walking these bbs, perhaps by
1108 walking backwards over gimples and bbs. */
1109 break;
1110 }
1111
1112 if (!modified)
1113 return;
1114
1115 free_dominance_info (CDI_DOMINATORS);
1116 update_ssa (TODO_update_ssa);
1117 }
1118
1119 /* Expand IFN_VA_ARGs in FUN, if necessary. */
1120
1121 static void
1122 expand_ifn_va_arg (function *fun)
1123 {
1124 if ((fun->curr_properties & PROP_gimple_lva) == 0)
1125 expand_ifn_va_arg_1 (fun);
1126
1127 #if ENABLE_CHECKING
1128 basic_block bb;
1129 gimple_stmt_iterator i;
1130 FOR_EACH_BB_FN (bb, fun)
1131 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1132 gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
1133 #endif
1134 }
1135
1136 namespace {
1137
1138 const pass_data pass_data_stdarg =
1139 {
1140 GIMPLE_PASS, /* type */
1141 "stdarg", /* name */
1142 OPTGROUP_NONE, /* optinfo_flags */
1143 TV_NONE, /* tv_id */
1144 ( PROP_cfg | PROP_ssa ), /* properties_required */
1145 PROP_gimple_lva, /* properties_provided */
1146 0, /* properties_destroyed */
1147 0, /* todo_flags_start */
1148 0, /* todo_flags_finish */
1149 };
1150
1151 class pass_stdarg : public gimple_opt_pass
1152 {
1153 public:
1154 pass_stdarg (gcc::context *ctxt)
1155 : gimple_opt_pass (pass_data_stdarg, ctxt)
1156 {}
1157
1158 /* opt_pass methods: */
1159 virtual bool gate (function *)
1160 {
1161 /* Always run this pass, in order to expand va_arg internal_fns. We
1162 also need to do that if fun->stdarg == 0, because a va_arg may also
1163 occur in a function without varargs, f.i. if when passing a va_list to
1164 another function. */
1165 return true;
1166 }
1167
1168 virtual unsigned int execute (function *);
1169
1170 }; // class pass_stdarg
1171
1172 unsigned int
1173 pass_stdarg::execute (function *fun)
1174 {
1175 /* TODO: Postpone expand_ifn_va_arg till after
1176 optimize_va_list_gpr_fpr_size. */
1177 expand_ifn_va_arg (fun);
1178
1179 if (flag_stdarg_opt
1180 /* This optimization is only for stdarg functions. */
1181 && fun->stdarg != 0)
1182 optimize_va_list_gpr_fpr_size (fun);
1183
1184 return 0;
1185 }
1186
1187 } // anon namespace
1188
1189 gimple_opt_pass *
1190 make_pass_stdarg (gcc::context *ctxt)
1191 {
1192 return new pass_stdarg (ctxt);
1193 }
1194
1195 namespace {
1196
1197 const pass_data pass_data_lower_vaarg =
1198 {
1199 GIMPLE_PASS, /* type */
1200 "lower_vaarg", /* name */
1201 OPTGROUP_NONE, /* optinfo_flags */
1202 TV_NONE, /* tv_id */
1203 ( PROP_cfg | PROP_ssa ), /* properties_required */
1204 PROP_gimple_lva, /* properties_provided */
1205 0, /* properties_destroyed */
1206 0, /* todo_flags_start */
1207 0, /* todo_flags_finish */
1208 };
1209
1210 class pass_lower_vaarg : public gimple_opt_pass
1211 {
1212 public:
1213 pass_lower_vaarg (gcc::context *ctxt)
1214 : gimple_opt_pass (pass_data_lower_vaarg, ctxt)
1215 {}
1216
1217 /* opt_pass methods: */
1218 virtual bool gate (function *)
1219 {
1220 return (cfun->curr_properties & PROP_gimple_lva) == 0;
1221 }
1222
1223 virtual unsigned int execute (function *);
1224
1225 }; // class pass_lower_vaarg
1226
1227 unsigned int
1228 pass_lower_vaarg::execute (function *fun)
1229 {
1230 expand_ifn_va_arg (fun);
1231 return 0;
1232 }
1233
1234 } // anon namespace
1235
1236 gimple_opt_pass *
1237 make_pass_lower_vaarg (gcc::context *ctxt)
1238 {
1239 return new pass_lower_vaarg (ctxt);
1240 }