ipa-utils.c (ipa_merge_profiles): Avoid ICE on mismatch in indirect edges.
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "hard-reg-set.h"
37 #include "input.h"
38 #include "function.h"
39 #include "langhooks.h"
40 #include "gimple-pretty-print.h"
41 #include "target.h"
42 #include "bitmap.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimple-walk.h"
54 #include "gimple-ssa.h"
55 #include "tree-phinodes.h"
56 #include "ssa-iterators.h"
57 #include "stringpool.h"
58 #include "tree-ssanames.h"
59 #include "sbitmap.h"
60 #include "tree-pass.h"
61 #include "tree-stdarg.h"
62
63 /* A simple pass that attempts to optimize stdarg functions on architectures
64 that need to save register arguments to stack on entry to stdarg functions.
65 If the function doesn't use any va_start macros, no registers need to
66 be saved. If va_start macros are used, the va_list variables don't escape
67 the function, it is only necessary to save registers that will be used
68 in va_arg macros. E.g. if va_arg is only used with integral types
69 in the function, floating point registers don't need to be saved, etc. */
70
71
72 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
73 is executed at most as many times as VA_START_BB. */
74
75 static bool
76 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
77 {
78 vec<edge> stack = vNULL;
79 edge e;
80 edge_iterator ei;
81 sbitmap visited;
82 bool ret;
83
84 if (va_arg_bb == va_start_bb)
85 return true;
86
87 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
88 return false;
89
90 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
91 bitmap_clear (visited);
92 ret = true;
93
94 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
95 stack.safe_push (e);
96
97 while (! stack.is_empty ())
98 {
99 basic_block src;
100
101 e = stack.pop ();
102 src = e->src;
103
104 if (e->flags & EDGE_COMPLEX)
105 {
106 ret = false;
107 break;
108 }
109
110 if (src == va_start_bb)
111 continue;
112
113 /* va_arg_bb can be executed more times than va_start_bb. */
114 if (src == va_arg_bb)
115 {
116 ret = false;
117 break;
118 }
119
120 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
121
122 if (! bitmap_bit_p (visited, src->index))
123 {
124 bitmap_set_bit (visited, src->index);
125 FOR_EACH_EDGE (e, ei, src->preds)
126 stack.safe_push (e);
127 }
128 }
129
130 stack.release ();
131 sbitmap_free (visited);
132 return ret;
133 }
134
135
136 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
137 return constant, otherwise return HOST_WIDE_INT_M1U.
138 GPR_P is true if this is GPR counter. */
139
140 static unsigned HOST_WIDE_INT
141 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
142 bool gpr_p)
143 {
144 tree lhs, orig_lhs;
145 gimple stmt;
146 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
147 unsigned int max_size;
148
149 if (si->offsets == NULL)
150 {
151 unsigned int i;
152
153 si->offsets = XNEWVEC (int, num_ssa_names);
154 for (i = 0; i < num_ssa_names; ++i)
155 si->offsets[i] = -1;
156 }
157
158 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
159 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
160 orig_lhs = lhs = rhs;
161 while (lhs)
162 {
163 enum tree_code rhs_code;
164 tree rhs1;
165
166 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
167 {
168 if (counter_val >= max_size)
169 {
170 ret = max_size;
171 break;
172 }
173
174 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
175 break;
176 }
177
178 stmt = SSA_NAME_DEF_STMT (lhs);
179
180 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
181 return HOST_WIDE_INT_M1U;
182
183 rhs_code = gimple_assign_rhs_code (stmt);
184 rhs1 = gimple_assign_rhs1 (stmt);
185 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
186 || gimple_assign_cast_p (stmt))
187 && TREE_CODE (rhs1) == SSA_NAME)
188 {
189 lhs = rhs1;
190 continue;
191 }
192
193 if ((rhs_code == POINTER_PLUS_EXPR
194 || rhs_code == PLUS_EXPR)
195 && TREE_CODE (rhs1) == SSA_NAME
196 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
197 {
198 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
199 lhs = rhs1;
200 continue;
201 }
202
203 if (rhs_code == ADDR_EXPR
204 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
205 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
206 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
207 {
208 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
209 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
210 continue;
211 }
212
213 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
214 return HOST_WIDE_INT_M1U;
215
216 rhs = gimple_assign_rhs1 (stmt);
217 if (TREE_CODE (counter) != TREE_CODE (rhs))
218 return HOST_WIDE_INT_M1U;
219
220 if (TREE_CODE (counter) == COMPONENT_REF)
221 {
222 if (get_base_address (counter) != get_base_address (rhs)
223 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
224 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
225 return HOST_WIDE_INT_M1U;
226 }
227 else if (counter != rhs)
228 return HOST_WIDE_INT_M1U;
229
230 lhs = NULL;
231 }
232
233 lhs = orig_lhs;
234 val = ret + counter_val;
235 while (lhs)
236 {
237 enum tree_code rhs_code;
238 tree rhs1;
239
240 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
241 break;
242
243 if (val >= max_size)
244 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
245 else
246 si->offsets[SSA_NAME_VERSION (lhs)] = val;
247
248 stmt = SSA_NAME_DEF_STMT (lhs);
249
250 rhs_code = gimple_assign_rhs_code (stmt);
251 rhs1 = gimple_assign_rhs1 (stmt);
252 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
253 || gimple_assign_cast_p (stmt))
254 && TREE_CODE (rhs1) == SSA_NAME)
255 {
256 lhs = rhs1;
257 continue;
258 }
259
260 if ((rhs_code == POINTER_PLUS_EXPR
261 || rhs_code == PLUS_EXPR)
262 && TREE_CODE (rhs1) == SSA_NAME
263 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
264 {
265 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
266 lhs = rhs1;
267 continue;
268 }
269
270 if (rhs_code == ADDR_EXPR
271 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
272 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
273 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
274 {
275 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
276 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
277 continue;
278 }
279
280 lhs = NULL;
281 }
282
283 return ret;
284 }
285
286
287 /* Called by walk_tree to look for references to va_list variables. */
288
289 static tree
290 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
291 void *data)
292 {
293 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
294 tree var = *tp;
295
296 if (TREE_CODE (var) == SSA_NAME)
297 {
298 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
299 return var;
300 }
301 else if (TREE_CODE (var) == VAR_DECL)
302 {
303 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
304 return var;
305 }
306
307 return NULL_TREE;
308 }
309
310
311 /* Helper function of va_list_counter_struct_op. Compute
312 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
313 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
314 statement. GPR_P is true if AP is a GPR counter, false if it is
315 a FPR counter. */
316
317 static void
318 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
319 bool write_p)
320 {
321 unsigned HOST_WIDE_INT increment;
322
323 if (si->compute_sizes < 0)
324 {
325 si->compute_sizes = 0;
326 if (si->va_start_count == 1
327 && reachable_at_most_once (si->bb, si->va_start_bb))
328 si->compute_sizes = 1;
329
330 if (dump_file && (dump_flags & TDF_DETAILS))
331 fprintf (dump_file,
332 "bb%d will %sbe executed at most once for each va_start "
333 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
334 si->va_start_bb->index);
335 }
336
337 if (write_p
338 && si->compute_sizes
339 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
340 {
341 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
342 {
343 cfun->va_list_gpr_size += increment;
344 return;
345 }
346
347 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
348 {
349 cfun->va_list_fpr_size += increment;
350 return;
351 }
352 }
353
354 if (write_p || !si->compute_sizes)
355 {
356 if (gpr_p)
357 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
358 else
359 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
360 }
361 }
362
363
364 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
365 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
366 is false, AP has been seen in VAR = AP assignment.
367 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
368 va_arg operation that doesn't cause the va_list variable to escape
369 current function. */
370
371 static bool
372 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
373 bool write_p)
374 {
375 tree base;
376
377 if (TREE_CODE (ap) != COMPONENT_REF
378 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
379 return false;
380
381 if (TREE_CODE (var) != SSA_NAME
382 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
383 return false;
384
385 base = get_base_address (ap);
386 if (TREE_CODE (base) != VAR_DECL
387 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
388 return false;
389
390 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
391 va_list_counter_op (si, ap, var, true, write_p);
392 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
393 va_list_counter_op (si, ap, var, false, write_p);
394
395 return true;
396 }
397
398
399 /* Check for TEM = AP. Return true if found and the caller shouldn't
400 search for va_list references in the statement. */
401
402 static bool
403 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
404 {
405 if (TREE_CODE (ap) != VAR_DECL
406 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
407 return false;
408
409 if (TREE_CODE (tem) != SSA_NAME
410 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
411 return false;
412
413 if (si->compute_sizes < 0)
414 {
415 si->compute_sizes = 0;
416 if (si->va_start_count == 1
417 && reachable_at_most_once (si->bb, si->va_start_bb))
418 si->compute_sizes = 1;
419
420 if (dump_file && (dump_flags & TDF_DETAILS))
421 fprintf (dump_file,
422 "bb%d will %sbe executed at most once for each va_start "
423 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
424 si->va_start_bb->index);
425 }
426
427 /* For void * or char * va_list types, there is just one counter.
428 If va_arg is used in a loop, we don't know how many registers need
429 saving. */
430 if (! si->compute_sizes)
431 return false;
432
433 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
434 return false;
435
436 /* Note the temporary, as we need to track whether it doesn't escape
437 the current function. */
438 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
439
440 return true;
441 }
442
443
444 /* Check for:
445 tem1 = AP;
446 TEM2 = tem1 + CST;
447 AP = TEM2;
448 sequence and update cfun->va_list_gpr_size. Return true if found. */
449
450 static bool
451 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
452 {
453 unsigned HOST_WIDE_INT increment;
454
455 if (TREE_CODE (ap) != VAR_DECL
456 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
457 return false;
458
459 if (TREE_CODE (tem2) != SSA_NAME
460 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
461 return false;
462
463 if (si->compute_sizes <= 0)
464 return false;
465
466 increment = va_list_counter_bump (si, ap, tem2, true);
467 if (increment + 1 <= 1)
468 return false;
469
470 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
471 cfun->va_list_gpr_size += increment;
472 else
473 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
474
475 return true;
476 }
477
478
479 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
480 containing value of some va_list variable plus optionally some constant,
481 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
482 depending whether LHS is a function local temporary. */
483
484 static void
485 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
486 {
487 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
488 return;
489
490 if (TREE_CODE (rhs) == SSA_NAME)
491 {
492 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
493 return;
494 }
495 else if (TREE_CODE (rhs) == ADDR_EXPR
496 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
497 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
498 {
499 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
500 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
501 return;
502 }
503 else
504 return;
505
506 if (TREE_CODE (lhs) != SSA_NAME)
507 {
508 si->va_list_escapes = true;
509 return;
510 }
511
512 if (si->compute_sizes < 0)
513 {
514 si->compute_sizes = 0;
515 if (si->va_start_count == 1
516 && reachable_at_most_once (si->bb, si->va_start_bb))
517 si->compute_sizes = 1;
518
519 if (dump_file && (dump_flags & TDF_DETAILS))
520 fprintf (dump_file,
521 "bb%d will %sbe executed at most once for each va_start "
522 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
523 si->va_start_bb->index);
524 }
525
526 /* For void * or char * va_list types, there is just one counter.
527 If va_arg is used in a loop, we don't know how many registers need
528 saving. */
529 if (! si->compute_sizes)
530 {
531 si->va_list_escapes = true;
532 return;
533 }
534
535 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
536 == HOST_WIDE_INT_M1U)
537 {
538 si->va_list_escapes = true;
539 return;
540 }
541
542 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
543 }
544
545
546 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
547 Return true if va_list might be escaping. */
548
549 static bool
550 check_all_va_list_escapes (struct stdarg_info *si)
551 {
552 basic_block bb;
553
554 FOR_EACH_BB_FN (bb, cfun)
555 {
556 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
557 gsi_next (&i))
558 {
559 tree lhs;
560 use_operand_p uop;
561 ssa_op_iter soi;
562 gphi *phi = i.phi ();
563
564 lhs = PHI_RESULT (phi);
565 if (virtual_operand_p (lhs)
566 || bitmap_bit_p (si->va_list_escape_vars,
567 SSA_NAME_VERSION (lhs)))
568 continue;
569
570 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
571 {
572 tree rhs = USE_FROM_PTR (uop);
573 if (TREE_CODE (rhs) == SSA_NAME
574 && bitmap_bit_p (si->va_list_escape_vars,
575 SSA_NAME_VERSION (rhs)))
576 {
577 if (dump_file && (dump_flags & TDF_DETAILS))
578 {
579 fputs ("va_list escapes in ", dump_file);
580 print_gimple_stmt (dump_file, phi, 0, dump_flags);
581 fputc ('\n', dump_file);
582 }
583 return true;
584 }
585 }
586 }
587
588 for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
589 gsi_next (&i))
590 {
591 gimple stmt = gsi_stmt (i);
592 tree use;
593 ssa_op_iter iter;
594
595 if (is_gimple_debug (stmt))
596 continue;
597
598 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
599 {
600 if (! bitmap_bit_p (si->va_list_escape_vars,
601 SSA_NAME_VERSION (use)))
602 continue;
603
604 if (is_gimple_assign (stmt))
605 {
606 tree rhs = gimple_assign_rhs1 (stmt);
607 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
608
609 /* x = *ap_temp; */
610 if (rhs_code == MEM_REF
611 && TREE_OPERAND (rhs, 0) == use
612 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
613 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
614 && si->offsets[SSA_NAME_VERSION (use)] != -1)
615 {
616 unsigned HOST_WIDE_INT gpr_size;
617 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
618
619 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
620 + tree_to_shwi (TREE_OPERAND (rhs, 1))
621 + tree_to_uhwi (access_size);
622 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
623 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
624 else if (gpr_size > cfun->va_list_gpr_size)
625 cfun->va_list_gpr_size = gpr_size;
626 continue;
627 }
628
629 /* va_arg sequences may contain
630 other_ap_temp = ap_temp;
631 other_ap_temp = ap_temp + constant;
632 other_ap_temp = (some_type *) ap_temp;
633 ap = ap_temp;
634 statements. */
635 if (rhs == use
636 && ((rhs_code == POINTER_PLUS_EXPR
637 && (TREE_CODE (gimple_assign_rhs2 (stmt))
638 == INTEGER_CST))
639 || gimple_assign_cast_p (stmt)
640 || (get_gimple_rhs_class (rhs_code)
641 == GIMPLE_SINGLE_RHS)))
642 {
643 tree lhs = gimple_assign_lhs (stmt);
644
645 if (TREE_CODE (lhs) == SSA_NAME
646 && bitmap_bit_p (si->va_list_escape_vars,
647 SSA_NAME_VERSION (lhs)))
648 continue;
649
650 if (TREE_CODE (lhs) == VAR_DECL
651 && bitmap_bit_p (si->va_list_vars,
652 DECL_UID (lhs) + num_ssa_names))
653 continue;
654 }
655 else if (rhs_code == ADDR_EXPR
656 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
657 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
658 {
659 tree lhs = gimple_assign_lhs (stmt);
660
661 if (bitmap_bit_p (si->va_list_escape_vars,
662 SSA_NAME_VERSION (lhs)))
663 continue;
664 }
665 }
666
667 if (dump_file && (dump_flags & TDF_DETAILS))
668 {
669 fputs ("va_list escapes in ", dump_file);
670 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
671 fputc ('\n', dump_file);
672 }
673 return true;
674 }
675 }
676 }
677
678 return false;
679 }
680
681
682 namespace {
683
684 const pass_data pass_data_stdarg =
685 {
686 GIMPLE_PASS, /* type */
687 "stdarg", /* name */
688 OPTGROUP_NONE, /* optinfo_flags */
689 TV_NONE, /* tv_id */
690 ( PROP_cfg | PROP_ssa ), /* properties_required */
691 0, /* properties_provided */
692 0, /* properties_destroyed */
693 0, /* todo_flags_start */
694 0, /* todo_flags_finish */
695 };
696
697 class pass_stdarg : public gimple_opt_pass
698 {
699 public:
700 pass_stdarg (gcc::context *ctxt)
701 : gimple_opt_pass (pass_data_stdarg, ctxt)
702 {}
703
704 /* opt_pass methods: */
705 virtual bool gate (function *fun)
706 {
707 /* This optimization is only for stdarg functions. */
708 return fun->stdarg != 0;
709 }
710
711 virtual unsigned int execute (function *);
712
713 }; // class pass_stdarg
714
715 unsigned int
716 pass_stdarg::execute (function *fun)
717 {
718 basic_block bb;
719 bool va_list_escapes = false;
720 bool va_list_simple_ptr;
721 struct stdarg_info si;
722 struct walk_stmt_info wi;
723 const char *funcname = NULL;
724 tree cfun_va_list;
725
726 fun->va_list_gpr_size = 0;
727 fun->va_list_fpr_size = 0;
728 memset (&si, 0, sizeof (si));
729 si.va_list_vars = BITMAP_ALLOC (NULL);
730 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
731
732 if (dump_file)
733 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
734
735 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
736 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
737 && (TREE_TYPE (cfun_va_list) == void_type_node
738 || TREE_TYPE (cfun_va_list) == char_type_node);
739 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
740
741 FOR_EACH_BB_FN (bb, fun)
742 {
743 gimple_stmt_iterator i;
744
745 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
746 {
747 gimple stmt = gsi_stmt (i);
748 tree callee, ap;
749
750 if (!is_gimple_call (stmt))
751 continue;
752
753 callee = gimple_call_fndecl (stmt);
754 if (!callee
755 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
756 continue;
757
758 switch (DECL_FUNCTION_CODE (callee))
759 {
760 case BUILT_IN_VA_START:
761 break;
762 /* If old style builtins are used, don't optimize anything. */
763 case BUILT_IN_SAVEREGS:
764 case BUILT_IN_NEXT_ARG:
765 va_list_escapes = true;
766 continue;
767 default:
768 continue;
769 }
770
771 si.va_start_count++;
772 ap = gimple_call_arg (stmt, 0);
773
774 if (TREE_CODE (ap) != ADDR_EXPR)
775 {
776 va_list_escapes = true;
777 break;
778 }
779 ap = TREE_OPERAND (ap, 0);
780 if (TREE_CODE (ap) == ARRAY_REF)
781 {
782 if (! integer_zerop (TREE_OPERAND (ap, 1)))
783 {
784 va_list_escapes = true;
785 break;
786 }
787 ap = TREE_OPERAND (ap, 0);
788 }
789 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
790 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
791 || TREE_CODE (ap) != VAR_DECL)
792 {
793 va_list_escapes = true;
794 break;
795 }
796
797 if (is_global_var (ap))
798 {
799 va_list_escapes = true;
800 break;
801 }
802
803 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
804
805 /* VA_START_BB and VA_START_AP will be only used if there is just
806 one va_start in the function. */
807 si.va_start_bb = bb;
808 si.va_start_ap = ap;
809 }
810
811 if (va_list_escapes)
812 break;
813 }
814
815 /* If there were no va_start uses in the function, there is no need to
816 save anything. */
817 if (si.va_start_count == 0)
818 goto finish;
819
820 /* If some va_list arguments weren't local, we can't optimize. */
821 if (va_list_escapes)
822 goto finish;
823
824 /* For void * or char * va_list, something useful can be done only
825 if there is just one va_start. */
826 if (va_list_simple_ptr && si.va_start_count > 1)
827 {
828 va_list_escapes = true;
829 goto finish;
830 }
831
832 /* For struct * va_list, if the backend didn't tell us what the counter fields
833 are, there is nothing more we can do. */
834 if (!va_list_simple_ptr
835 && va_list_gpr_counter_field == NULL_TREE
836 && va_list_fpr_counter_field == NULL_TREE)
837 {
838 va_list_escapes = true;
839 goto finish;
840 }
841
842 /* For void * or char * va_list there is just one counter
843 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
844 if (va_list_simple_ptr)
845 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
846
847 calculate_dominance_info (CDI_DOMINATORS);
848 memset (&wi, 0, sizeof (wi));
849 wi.info = si.va_list_vars;
850
851 FOR_EACH_BB_FN (bb, fun)
852 {
853 si.compute_sizes = -1;
854 si.bb = bb;
855
856 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
857 them as assignments for the purpose of escape analysis. This is
858 not needed for non-simple va_list because virtual phis don't perform
859 any real data movement. */
860 if (va_list_simple_ptr)
861 {
862 tree lhs, rhs;
863 use_operand_p uop;
864 ssa_op_iter soi;
865
866 for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
867 gsi_next (&i))
868 {
869 gphi *phi = i.phi ();
870 lhs = PHI_RESULT (phi);
871
872 if (virtual_operand_p (lhs))
873 continue;
874
875 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
876 {
877 rhs = USE_FROM_PTR (uop);
878 if (va_list_ptr_read (&si, rhs, lhs))
879 continue;
880 else if (va_list_ptr_write (&si, lhs, rhs))
881 continue;
882 else
883 check_va_list_escapes (&si, lhs, rhs);
884
885 if (si.va_list_escapes)
886 {
887 if (dump_file && (dump_flags & TDF_DETAILS))
888 {
889 fputs ("va_list escapes in ", dump_file);
890 print_gimple_stmt (dump_file, phi, 0, dump_flags);
891 fputc ('\n', dump_file);
892 }
893 va_list_escapes = true;
894 }
895 }
896 }
897 }
898
899 for (gimple_stmt_iterator i = gsi_start_bb (bb);
900 !gsi_end_p (i) && !va_list_escapes;
901 gsi_next (&i))
902 {
903 gimple stmt = gsi_stmt (i);
904
905 /* Don't look at __builtin_va_{start,end}, they are ok. */
906 if (is_gimple_call (stmt))
907 {
908 tree callee = gimple_call_fndecl (stmt);
909
910 if (callee
911 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
912 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
913 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
914 continue;
915 }
916
917 if (is_gimple_assign (stmt))
918 {
919 tree lhs = gimple_assign_lhs (stmt);
920 tree rhs = gimple_assign_rhs1 (stmt);
921
922 if (va_list_simple_ptr)
923 {
924 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
925 == GIMPLE_SINGLE_RHS)
926 {
927 /* Check for ap ={v} {}. */
928 if (TREE_CLOBBER_P (rhs))
929 continue;
930
931 /* Check for tem = ap. */
932 else if (va_list_ptr_read (&si, rhs, lhs))
933 continue;
934
935 /* Check for the last insn in:
936 tem1 = ap;
937 tem2 = tem1 + CST;
938 ap = tem2;
939 sequence. */
940 else if (va_list_ptr_write (&si, lhs, rhs))
941 continue;
942 }
943
944 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
945 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
946 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
947 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
948 == GIMPLE_SINGLE_RHS))
949 check_va_list_escapes (&si, lhs, rhs);
950 }
951 else
952 {
953 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
954 == GIMPLE_SINGLE_RHS)
955 {
956 /* Check for ap ={v} {}. */
957 if (TREE_CLOBBER_P (rhs))
958 continue;
959
960 /* Check for ap[0].field = temp. */
961 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
962 continue;
963
964 /* Check for temp = ap[0].field. */
965 else if (va_list_counter_struct_op (&si, rhs, lhs,
966 false))
967 continue;
968 }
969
970 /* Do any architecture specific checking. */
971 if (targetm.stdarg_optimize_hook
972 && targetm.stdarg_optimize_hook (&si, stmt))
973 continue;
974 }
975 }
976 else if (is_gimple_debug (stmt))
977 continue;
978
979 /* All other uses of va_list are either va_copy (that is not handled
980 in this optimization), taking address of va_list variable or
981 passing va_list to other functions (in that case va_list might
982 escape the function and therefore va_start needs to set it up
983 fully), or some unexpected use of va_list. None of these should
984 happen in a gimplified VA_ARG_EXPR. */
985 if (si.va_list_escapes
986 || walk_gimple_op (stmt, find_va_list_reference, &wi))
987 {
988 if (dump_file && (dump_flags & TDF_DETAILS))
989 {
990 fputs ("va_list escapes in ", dump_file);
991 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
992 fputc ('\n', dump_file);
993 }
994 va_list_escapes = true;
995 }
996 }
997
998 if (va_list_escapes)
999 break;
1000 }
1001
1002 if (! va_list_escapes
1003 && va_list_simple_ptr
1004 && ! bitmap_empty_p (si.va_list_escape_vars)
1005 && check_all_va_list_escapes (&si))
1006 va_list_escapes = true;
1007
1008 finish:
1009 if (va_list_escapes)
1010 {
1011 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1012 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1013 }
1014 BITMAP_FREE (si.va_list_vars);
1015 BITMAP_FREE (si.va_list_escape_vars);
1016 free (si.offsets);
1017 if (dump_file)
1018 {
1019 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1020 funcname, (int) va_list_escapes);
1021 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1022 fputs ("all", dump_file);
1023 else
1024 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1025 fputs (" GPR units and ", dump_file);
1026 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1027 fputs ("all", dump_file);
1028 else
1029 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1030 fputs (" FPR units.\n", dump_file);
1031 }
1032 return 0;
1033 }
1034
1035 } // anon namespace
1036
1037 gimple_opt_pass *
1038 make_pass_stdarg (gcc::context *ctxt)
1039 {
1040 return new pass_stdarg (ctxt);
1041 }