[ARM] Add ACLE 2.0 predefined marco __ARM_FEATURE_IDIV
[gcc.git] / gcc / tree-stdarg.c
1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "vec.h"
29 #include "machmode.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "langhooks.h"
34 #include "gimple-pretty-print.h"
35 #include "target.h"
36 #include "bitmap.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimple-walk.h"
45 #include "gimple-ssa.h"
46 #include "tree-phinodes.h"
47 #include "ssa-iterators.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "sbitmap.h"
51 #include "tree-pass.h"
52 #include "tree-stdarg.h"
53
54 /* A simple pass that attempts to optimize stdarg functions on architectures
55 that need to save register arguments to stack on entry to stdarg functions.
56 If the function doesn't use any va_start macros, no registers need to
57 be saved. If va_start macros are used, the va_list variables don't escape
58 the function, it is only necessary to save registers that will be used
59 in va_arg macros. E.g. if va_arg is only used with integral types
60 in the function, floating point registers don't need to be saved, etc. */
61
62
63 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
64 is executed at most as many times as VA_START_BB. */
65
66 static bool
67 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
68 {
69 vec<edge> stack = vNULL;
70 edge e;
71 edge_iterator ei;
72 sbitmap visited;
73 bool ret;
74
75 if (va_arg_bb == va_start_bb)
76 return true;
77
78 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
79 return false;
80
81 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
82 bitmap_clear (visited);
83 ret = true;
84
85 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
86 stack.safe_push (e);
87
88 while (! stack.is_empty ())
89 {
90 basic_block src;
91
92 e = stack.pop ();
93 src = e->src;
94
95 if (e->flags & EDGE_COMPLEX)
96 {
97 ret = false;
98 break;
99 }
100
101 if (src == va_start_bb)
102 continue;
103
104 /* va_arg_bb can be executed more times than va_start_bb. */
105 if (src == va_arg_bb)
106 {
107 ret = false;
108 break;
109 }
110
111 gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
112
113 if (! bitmap_bit_p (visited, src->index))
114 {
115 bitmap_set_bit (visited, src->index);
116 FOR_EACH_EDGE (e, ei, src->preds)
117 stack.safe_push (e);
118 }
119 }
120
121 stack.release ();
122 sbitmap_free (visited);
123 return ret;
124 }
125
126
127 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
128 return constant, otherwise return HOST_WIDE_INT_M1U.
129 GPR_P is true if this is GPR counter. */
130
131 static unsigned HOST_WIDE_INT
132 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
133 bool gpr_p)
134 {
135 tree lhs, orig_lhs;
136 gimple stmt;
137 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
138 unsigned int max_size;
139
140 if (si->offsets == NULL)
141 {
142 unsigned int i;
143
144 si->offsets = XNEWVEC (int, num_ssa_names);
145 for (i = 0; i < num_ssa_names; ++i)
146 si->offsets[i] = -1;
147 }
148
149 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
150 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
151 orig_lhs = lhs = rhs;
152 while (lhs)
153 {
154 enum tree_code rhs_code;
155 tree rhs1;
156
157 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
158 {
159 if (counter_val >= max_size)
160 {
161 ret = max_size;
162 break;
163 }
164
165 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
166 break;
167 }
168
169 stmt = SSA_NAME_DEF_STMT (lhs);
170
171 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
172 return HOST_WIDE_INT_M1U;
173
174 rhs_code = gimple_assign_rhs_code (stmt);
175 rhs1 = gimple_assign_rhs1 (stmt);
176 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
177 || gimple_assign_cast_p (stmt))
178 && TREE_CODE (rhs1) == SSA_NAME)
179 {
180 lhs = rhs1;
181 continue;
182 }
183
184 if ((rhs_code == POINTER_PLUS_EXPR
185 || rhs_code == PLUS_EXPR)
186 && TREE_CODE (rhs1) == SSA_NAME
187 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
188 {
189 ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
190 lhs = rhs1;
191 continue;
192 }
193
194 if (rhs_code == ADDR_EXPR
195 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
196 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
197 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
198 {
199 ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
200 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
201 continue;
202 }
203
204 if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
205 return HOST_WIDE_INT_M1U;
206
207 rhs = gimple_assign_rhs1 (stmt);
208 if (TREE_CODE (counter) != TREE_CODE (rhs))
209 return HOST_WIDE_INT_M1U;
210
211 if (TREE_CODE (counter) == COMPONENT_REF)
212 {
213 if (get_base_address (counter) != get_base_address (rhs)
214 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
215 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
216 return HOST_WIDE_INT_M1U;
217 }
218 else if (counter != rhs)
219 return HOST_WIDE_INT_M1U;
220
221 lhs = NULL;
222 }
223
224 lhs = orig_lhs;
225 val = ret + counter_val;
226 while (lhs)
227 {
228 enum tree_code rhs_code;
229 tree rhs1;
230
231 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
232 break;
233
234 if (val >= max_size)
235 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
236 else
237 si->offsets[SSA_NAME_VERSION (lhs)] = val;
238
239 stmt = SSA_NAME_DEF_STMT (lhs);
240
241 rhs_code = gimple_assign_rhs_code (stmt);
242 rhs1 = gimple_assign_rhs1 (stmt);
243 if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
244 || gimple_assign_cast_p (stmt))
245 && TREE_CODE (rhs1) == SSA_NAME)
246 {
247 lhs = rhs1;
248 continue;
249 }
250
251 if ((rhs_code == POINTER_PLUS_EXPR
252 || rhs_code == PLUS_EXPR)
253 && TREE_CODE (rhs1) == SSA_NAME
254 && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
255 {
256 val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
257 lhs = rhs1;
258 continue;
259 }
260
261 if (rhs_code == ADDR_EXPR
262 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
263 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
264 && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
265 {
266 val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
267 lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
268 continue;
269 }
270
271 lhs = NULL;
272 }
273
274 return ret;
275 }
276
277
278 /* Called by walk_tree to look for references to va_list variables. */
279
280 static tree
281 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
282 void *data)
283 {
284 bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
285 tree var = *tp;
286
287 if (TREE_CODE (var) == SSA_NAME)
288 {
289 if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
290 return var;
291 }
292 else if (TREE_CODE (var) == VAR_DECL)
293 {
294 if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
295 return var;
296 }
297
298 return NULL_TREE;
299 }
300
301
302 /* Helper function of va_list_counter_struct_op. Compute
303 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
304 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
305 statement. GPR_P is true if AP is a GPR counter, false if it is
306 a FPR counter. */
307
308 static void
309 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
310 bool write_p)
311 {
312 unsigned HOST_WIDE_INT increment;
313
314 if (si->compute_sizes < 0)
315 {
316 si->compute_sizes = 0;
317 if (si->va_start_count == 1
318 && reachable_at_most_once (si->bb, si->va_start_bb))
319 si->compute_sizes = 1;
320
321 if (dump_file && (dump_flags & TDF_DETAILS))
322 fprintf (dump_file,
323 "bb%d will %sbe executed at most once for each va_start "
324 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
325 si->va_start_bb->index);
326 }
327
328 if (write_p
329 && si->compute_sizes
330 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
331 {
332 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
333 {
334 cfun->va_list_gpr_size += increment;
335 return;
336 }
337
338 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
339 {
340 cfun->va_list_fpr_size += increment;
341 return;
342 }
343 }
344
345 if (write_p || !si->compute_sizes)
346 {
347 if (gpr_p)
348 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
349 else
350 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
351 }
352 }
353
354
355 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
356 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
357 is false, AP has been seen in VAR = AP assignment.
358 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
359 va_arg operation that doesn't cause the va_list variable to escape
360 current function. */
361
362 static bool
363 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
364 bool write_p)
365 {
366 tree base;
367
368 if (TREE_CODE (ap) != COMPONENT_REF
369 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
370 return false;
371
372 if (TREE_CODE (var) != SSA_NAME
373 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
374 return false;
375
376 base = get_base_address (ap);
377 if (TREE_CODE (base) != VAR_DECL
378 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
379 return false;
380
381 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
382 va_list_counter_op (si, ap, var, true, write_p);
383 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
384 va_list_counter_op (si, ap, var, false, write_p);
385
386 return true;
387 }
388
389
390 /* Check for TEM = AP. Return true if found and the caller shouldn't
391 search for va_list references in the statement. */
392
393 static bool
394 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
395 {
396 if (TREE_CODE (ap) != VAR_DECL
397 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
398 return false;
399
400 if (TREE_CODE (tem) != SSA_NAME
401 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
402 return false;
403
404 if (si->compute_sizes < 0)
405 {
406 si->compute_sizes = 0;
407 if (si->va_start_count == 1
408 && reachable_at_most_once (si->bb, si->va_start_bb))
409 si->compute_sizes = 1;
410
411 if (dump_file && (dump_flags & TDF_DETAILS))
412 fprintf (dump_file,
413 "bb%d will %sbe executed at most once for each va_start "
414 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
415 si->va_start_bb->index);
416 }
417
418 /* For void * or char * va_list types, there is just one counter.
419 If va_arg is used in a loop, we don't know how many registers need
420 saving. */
421 if (! si->compute_sizes)
422 return false;
423
424 if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
425 return false;
426
427 /* Note the temporary, as we need to track whether it doesn't escape
428 the current function. */
429 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
430
431 return true;
432 }
433
434
435 /* Check for:
436 tem1 = AP;
437 TEM2 = tem1 + CST;
438 AP = TEM2;
439 sequence and update cfun->va_list_gpr_size. Return true if found. */
440
441 static bool
442 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
443 {
444 unsigned HOST_WIDE_INT increment;
445
446 if (TREE_CODE (ap) != VAR_DECL
447 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
448 return false;
449
450 if (TREE_CODE (tem2) != SSA_NAME
451 || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
452 return false;
453
454 if (si->compute_sizes <= 0)
455 return false;
456
457 increment = va_list_counter_bump (si, ap, tem2, true);
458 if (increment + 1 <= 1)
459 return false;
460
461 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
462 cfun->va_list_gpr_size += increment;
463 else
464 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
465
466 return true;
467 }
468
469
470 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
471 containing value of some va_list variable plus optionally some constant,
472 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
473 depending whether LHS is a function local temporary. */
474
475 static void
476 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
477 {
478 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
479 return;
480
481 if (TREE_CODE (rhs) == SSA_NAME)
482 {
483 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
484 return;
485 }
486 else if (TREE_CODE (rhs) == ADDR_EXPR
487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
488 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
489 {
490 tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
491 if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
492 return;
493 }
494 else
495 return;
496
497 if (TREE_CODE (lhs) != SSA_NAME)
498 {
499 si->va_list_escapes = true;
500 return;
501 }
502
503 if (si->compute_sizes < 0)
504 {
505 si->compute_sizes = 0;
506 if (si->va_start_count == 1
507 && reachable_at_most_once (si->bb, si->va_start_bb))
508 si->compute_sizes = 1;
509
510 if (dump_file && (dump_flags & TDF_DETAILS))
511 fprintf (dump_file,
512 "bb%d will %sbe executed at most once for each va_start "
513 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
514 si->va_start_bb->index);
515 }
516
517 /* For void * or char * va_list types, there is just one counter.
518 If va_arg is used in a loop, we don't know how many registers need
519 saving. */
520 if (! si->compute_sizes)
521 {
522 si->va_list_escapes = true;
523 return;
524 }
525
526 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
527 == HOST_WIDE_INT_M1U)
528 {
529 si->va_list_escapes = true;
530 return;
531 }
532
533 bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
534 }
535
536
537 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
538 Return true if va_list might be escaping. */
539
540 static bool
541 check_all_va_list_escapes (struct stdarg_info *si)
542 {
543 basic_block bb;
544
545 FOR_EACH_BB_FN (bb, cfun)
546 {
547 gimple_stmt_iterator i;
548
549 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
550 {
551 tree lhs;
552 use_operand_p uop;
553 ssa_op_iter soi;
554 gimple phi = gsi_stmt (i);
555
556 lhs = PHI_RESULT (phi);
557 if (virtual_operand_p (lhs)
558 || bitmap_bit_p (si->va_list_escape_vars,
559 SSA_NAME_VERSION (lhs)))
560 continue;
561
562 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
563 {
564 tree rhs = USE_FROM_PTR (uop);
565 if (TREE_CODE (rhs) == SSA_NAME
566 && bitmap_bit_p (si->va_list_escape_vars,
567 SSA_NAME_VERSION (rhs)))
568 {
569 if (dump_file && (dump_flags & TDF_DETAILS))
570 {
571 fputs ("va_list escapes in ", dump_file);
572 print_gimple_stmt (dump_file, phi, 0, dump_flags);
573 fputc ('\n', dump_file);
574 }
575 return true;
576 }
577 }
578 }
579
580 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
581 {
582 gimple stmt = gsi_stmt (i);
583 tree use;
584 ssa_op_iter iter;
585
586 if (is_gimple_debug (stmt))
587 continue;
588
589 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
590 {
591 if (! bitmap_bit_p (si->va_list_escape_vars,
592 SSA_NAME_VERSION (use)))
593 continue;
594
595 if (is_gimple_assign (stmt))
596 {
597 tree rhs = gimple_assign_rhs1 (stmt);
598 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
599
600 /* x = *ap_temp; */
601 if (rhs_code == MEM_REF
602 && TREE_OPERAND (rhs, 0) == use
603 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
604 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
605 && si->offsets[SSA_NAME_VERSION (use)] != -1)
606 {
607 unsigned HOST_WIDE_INT gpr_size;
608 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
609
610 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
611 + tree_to_shwi (TREE_OPERAND (rhs, 1))
612 + tree_to_uhwi (access_size);
613 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
614 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
615 else if (gpr_size > cfun->va_list_gpr_size)
616 cfun->va_list_gpr_size = gpr_size;
617 continue;
618 }
619
620 /* va_arg sequences may contain
621 other_ap_temp = ap_temp;
622 other_ap_temp = ap_temp + constant;
623 other_ap_temp = (some_type *) ap_temp;
624 ap = ap_temp;
625 statements. */
626 if (rhs == use
627 && ((rhs_code == POINTER_PLUS_EXPR
628 && (TREE_CODE (gimple_assign_rhs2 (stmt))
629 == INTEGER_CST))
630 || gimple_assign_cast_p (stmt)
631 || (get_gimple_rhs_class (rhs_code)
632 == GIMPLE_SINGLE_RHS)))
633 {
634 tree lhs = gimple_assign_lhs (stmt);
635
636 if (TREE_CODE (lhs) == SSA_NAME
637 && bitmap_bit_p (si->va_list_escape_vars,
638 SSA_NAME_VERSION (lhs)))
639 continue;
640
641 if (TREE_CODE (lhs) == VAR_DECL
642 && bitmap_bit_p (si->va_list_vars,
643 DECL_UID (lhs) + num_ssa_names))
644 continue;
645 }
646 else if (rhs_code == ADDR_EXPR
647 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
648 && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
649 {
650 tree lhs = gimple_assign_lhs (stmt);
651
652 if (bitmap_bit_p (si->va_list_escape_vars,
653 SSA_NAME_VERSION (lhs)))
654 continue;
655 }
656 }
657
658 if (dump_file && (dump_flags & TDF_DETAILS))
659 {
660 fputs ("va_list escapes in ", dump_file);
661 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
662 fputc ('\n', dump_file);
663 }
664 return true;
665 }
666 }
667 }
668
669 return false;
670 }
671
672
673 namespace {
674
675 const pass_data pass_data_stdarg =
676 {
677 GIMPLE_PASS, /* type */
678 "stdarg", /* name */
679 OPTGROUP_NONE, /* optinfo_flags */
680 TV_NONE, /* tv_id */
681 ( PROP_cfg | PROP_ssa ), /* properties_required */
682 0, /* properties_provided */
683 0, /* properties_destroyed */
684 0, /* todo_flags_start */
685 0, /* todo_flags_finish */
686 };
687
688 class pass_stdarg : public gimple_opt_pass
689 {
690 public:
691 pass_stdarg (gcc::context *ctxt)
692 : gimple_opt_pass (pass_data_stdarg, ctxt)
693 {}
694
695 /* opt_pass methods: */
696 virtual bool gate (function *fun)
697 {
698 /* This optimization is only for stdarg functions. */
699 return fun->stdarg != 0;
700 }
701
702 virtual unsigned int execute (function *);
703
704 }; // class pass_stdarg
705
706 unsigned int
707 pass_stdarg::execute (function *fun)
708 {
709 basic_block bb;
710 bool va_list_escapes = false;
711 bool va_list_simple_ptr;
712 struct stdarg_info si;
713 struct walk_stmt_info wi;
714 const char *funcname = NULL;
715 tree cfun_va_list;
716
717 fun->va_list_gpr_size = 0;
718 fun->va_list_fpr_size = 0;
719 memset (&si, 0, sizeof (si));
720 si.va_list_vars = BITMAP_ALLOC (NULL);
721 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
722
723 if (dump_file)
724 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
725
726 cfun_va_list = targetm.fn_abi_va_list (fun->decl);
727 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
728 && (TREE_TYPE (cfun_va_list) == void_type_node
729 || TREE_TYPE (cfun_va_list) == char_type_node);
730 gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
731
732 FOR_EACH_BB_FN (bb, fun)
733 {
734 gimple_stmt_iterator i;
735
736 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
737 {
738 gimple stmt = gsi_stmt (i);
739 tree callee, ap;
740
741 if (!is_gimple_call (stmt))
742 continue;
743
744 callee = gimple_call_fndecl (stmt);
745 if (!callee
746 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
747 continue;
748
749 switch (DECL_FUNCTION_CODE (callee))
750 {
751 case BUILT_IN_VA_START:
752 break;
753 /* If old style builtins are used, don't optimize anything. */
754 case BUILT_IN_SAVEREGS:
755 case BUILT_IN_NEXT_ARG:
756 va_list_escapes = true;
757 continue;
758 default:
759 continue;
760 }
761
762 si.va_start_count++;
763 ap = gimple_call_arg (stmt, 0);
764
765 if (TREE_CODE (ap) != ADDR_EXPR)
766 {
767 va_list_escapes = true;
768 break;
769 }
770 ap = TREE_OPERAND (ap, 0);
771 if (TREE_CODE (ap) == ARRAY_REF)
772 {
773 if (! integer_zerop (TREE_OPERAND (ap, 1)))
774 {
775 va_list_escapes = true;
776 break;
777 }
778 ap = TREE_OPERAND (ap, 0);
779 }
780 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
781 != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
782 || TREE_CODE (ap) != VAR_DECL)
783 {
784 va_list_escapes = true;
785 break;
786 }
787
788 if (is_global_var (ap))
789 {
790 va_list_escapes = true;
791 break;
792 }
793
794 bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
795
796 /* VA_START_BB and VA_START_AP will be only used if there is just
797 one va_start in the function. */
798 si.va_start_bb = bb;
799 si.va_start_ap = ap;
800 }
801
802 if (va_list_escapes)
803 break;
804 }
805
806 /* If there were no va_start uses in the function, there is no need to
807 save anything. */
808 if (si.va_start_count == 0)
809 goto finish;
810
811 /* If some va_list arguments weren't local, we can't optimize. */
812 if (va_list_escapes)
813 goto finish;
814
815 /* For void * or char * va_list, something useful can be done only
816 if there is just one va_start. */
817 if (va_list_simple_ptr && si.va_start_count > 1)
818 {
819 va_list_escapes = true;
820 goto finish;
821 }
822
823 /* For struct * va_list, if the backend didn't tell us what the counter fields
824 are, there is nothing more we can do. */
825 if (!va_list_simple_ptr
826 && va_list_gpr_counter_field == NULL_TREE
827 && va_list_fpr_counter_field == NULL_TREE)
828 {
829 va_list_escapes = true;
830 goto finish;
831 }
832
833 /* For void * or char * va_list there is just one counter
834 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
835 if (va_list_simple_ptr)
836 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
837
838 calculate_dominance_info (CDI_DOMINATORS);
839 memset (&wi, 0, sizeof (wi));
840 wi.info = si.va_list_vars;
841
842 FOR_EACH_BB_FN (bb, fun)
843 {
844 gimple_stmt_iterator i;
845
846 si.compute_sizes = -1;
847 si.bb = bb;
848
849 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
850 them as assignments for the purpose of escape analysis. This is
851 not needed for non-simple va_list because virtual phis don't perform
852 any real data movement. */
853 if (va_list_simple_ptr)
854 {
855 tree lhs, rhs;
856 use_operand_p uop;
857 ssa_op_iter soi;
858
859 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
860 {
861 gimple phi = gsi_stmt (i);
862 lhs = PHI_RESULT (phi);
863
864 if (virtual_operand_p (lhs))
865 continue;
866
867 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
868 {
869 rhs = USE_FROM_PTR (uop);
870 if (va_list_ptr_read (&si, rhs, lhs))
871 continue;
872 else if (va_list_ptr_write (&si, lhs, rhs))
873 continue;
874 else
875 check_va_list_escapes (&si, lhs, rhs);
876
877 if (si.va_list_escapes)
878 {
879 if (dump_file && (dump_flags & TDF_DETAILS))
880 {
881 fputs ("va_list escapes in ", dump_file);
882 print_gimple_stmt (dump_file, phi, 0, dump_flags);
883 fputc ('\n', dump_file);
884 }
885 va_list_escapes = true;
886 }
887 }
888 }
889 }
890
891 for (i = gsi_start_bb (bb);
892 !gsi_end_p (i) && !va_list_escapes;
893 gsi_next (&i))
894 {
895 gimple stmt = gsi_stmt (i);
896
897 /* Don't look at __builtin_va_{start,end}, they are ok. */
898 if (is_gimple_call (stmt))
899 {
900 tree callee = gimple_call_fndecl (stmt);
901
902 if (callee
903 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
904 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
905 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
906 continue;
907 }
908
909 if (is_gimple_assign (stmt))
910 {
911 tree lhs = gimple_assign_lhs (stmt);
912 tree rhs = gimple_assign_rhs1 (stmt);
913
914 if (va_list_simple_ptr)
915 {
916 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
917 == GIMPLE_SINGLE_RHS)
918 {
919 /* Check for ap ={v} {}. */
920 if (TREE_CLOBBER_P (rhs))
921 continue;
922
923 /* Check for tem = ap. */
924 else if (va_list_ptr_read (&si, rhs, lhs))
925 continue;
926
927 /* Check for the last insn in:
928 tem1 = ap;
929 tem2 = tem1 + CST;
930 ap = tem2;
931 sequence. */
932 else if (va_list_ptr_write (&si, lhs, rhs))
933 continue;
934 }
935
936 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
937 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
938 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
939 || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
940 == GIMPLE_SINGLE_RHS))
941 check_va_list_escapes (&si, lhs, rhs);
942 }
943 else
944 {
945 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
946 == GIMPLE_SINGLE_RHS)
947 {
948 /* Check for ap ={v} {}. */
949 if (TREE_CLOBBER_P (rhs))
950 continue;
951
952 /* Check for ap[0].field = temp. */
953 else if (va_list_counter_struct_op (&si, lhs, rhs, true))
954 continue;
955
956 /* Check for temp = ap[0].field. */
957 else if (va_list_counter_struct_op (&si, rhs, lhs,
958 false))
959 continue;
960 }
961
962 /* Do any architecture specific checking. */
963 if (targetm.stdarg_optimize_hook
964 && targetm.stdarg_optimize_hook (&si, stmt))
965 continue;
966 }
967 }
968 else if (is_gimple_debug (stmt))
969 continue;
970
971 /* All other uses of va_list are either va_copy (that is not handled
972 in this optimization), taking address of va_list variable or
973 passing va_list to other functions (in that case va_list might
974 escape the function and therefore va_start needs to set it up
975 fully), or some unexpected use of va_list. None of these should
976 happen in a gimplified VA_ARG_EXPR. */
977 if (si.va_list_escapes
978 || walk_gimple_op (stmt, find_va_list_reference, &wi))
979 {
980 if (dump_file && (dump_flags & TDF_DETAILS))
981 {
982 fputs ("va_list escapes in ", dump_file);
983 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
984 fputc ('\n', dump_file);
985 }
986 va_list_escapes = true;
987 }
988 }
989
990 if (va_list_escapes)
991 break;
992 }
993
994 if (! va_list_escapes
995 && va_list_simple_ptr
996 && ! bitmap_empty_p (si.va_list_escape_vars)
997 && check_all_va_list_escapes (&si))
998 va_list_escapes = true;
999
1000 finish:
1001 if (va_list_escapes)
1002 {
1003 fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
1004 fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
1005 }
1006 BITMAP_FREE (si.va_list_vars);
1007 BITMAP_FREE (si.va_list_escape_vars);
1008 free (si.offsets);
1009 if (dump_file)
1010 {
1011 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
1012 funcname, (int) va_list_escapes);
1013 if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
1014 fputs ("all", dump_file);
1015 else
1016 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
1017 fputs (" GPR units and ", dump_file);
1018 if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
1019 fputs ("all", dump_file);
1020 else
1021 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
1022 fputs (" FPR units.\n", dump_file);
1023 }
1024 return 0;
1025 }
1026
1027 } // anon namespace
1028
1029 gimple_opt_pass *
1030 make_pass_stdarg (gcc::context *ctxt)
1031 {
1032 return new pass_stdarg (ctxt);
1033 }