gimple.h (gimple_call_fntype): New function.
[gcc.git] / gcc / gimple-low.c
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35
36 /* The differences between High GIMPLE and Low GIMPLE are the
37 following:
38
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
44
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
47
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
52 {
53 tree label;
54 gimple stmt;
55 };
56 typedef struct return_statements_t return_statements_t;
57
58 DEF_VEC_O(return_statements_t);
59 DEF_VEC_ALLOC_O(return_statements_t,heap);
60
61 struct lower_data
62 {
63 /* Block the current statement belongs to. */
64 tree block;
65
66 /* A vector of label and return statements to be moved to the end
67 of the function. */
68 VEC(return_statements_t,heap) *return_statements;
69
70 /* True if the current statement cannot fall through. */
71 bool cannot_fallthru;
72
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp;
75 };
76
77 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_builtin_setjmp (gimple_stmt_iterator *);
81
82
83 /* Lower the body of current_function_decl from High GIMPLE into Low
84 GIMPLE. */
85
86 static unsigned int
87 lower_function_body (void)
88 {
89 struct lower_data data;
90 gimple_seq body = gimple_body (current_function_decl);
91 gimple_seq lowered_body;
92 gimple_stmt_iterator i;
93 gimple bind;
94 tree t;
95 gimple x;
96
97 /* The gimplifier should've left a body of exactly one statement,
98 namely a GIMPLE_BIND. */
99 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
100 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
101
102 memset (&data, 0, sizeof (data));
103 data.block = DECL_INITIAL (current_function_decl);
104 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
105 BLOCK_CHAIN (data.block) = NULL_TREE;
106 TREE_ASM_WRITTEN (data.block) = 1;
107 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
108
109 bind = gimple_seq_first_stmt (body);
110 lowered_body = NULL;
111 gimple_seq_add_stmt (&lowered_body, bind);
112 i = gsi_start (lowered_body);
113 lower_gimple_bind (&i, &data);
114
115 /* Once the old body has been lowered, replace it with the new
116 lowered sequence. */
117 gimple_set_body (current_function_decl, lowered_body);
118
119 i = gsi_last (lowered_body);
120
121 /* If the function falls off the end, we need a null return statement.
122 If we've already got one in the return_statements vector, we don't
123 need to do anything special. Otherwise build one by hand. */
124 if (gimple_seq_may_fallthru (lowered_body)
125 && (VEC_empty (return_statements_t, data.return_statements)
126 || gimple_return_retval (VEC_last (return_statements_t,
127 data.return_statements)->stmt) != NULL))
128 {
129 x = gimple_build_return (NULL);
130 gimple_set_location (x, cfun->function_end_locus);
131 gimple_set_block (x, DECL_INITIAL (current_function_decl));
132 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
133 }
134
135 /* If we lowered any return statements, emit the representative
136 at the end of the function. */
137 while (!VEC_empty (return_statements_t, data.return_statements))
138 {
139 return_statements_t t;
140
141 /* Unfortunately, we can't use VEC_pop because it returns void for
142 objects. */
143 t = *VEC_last (return_statements_t, data.return_statements);
144 VEC_truncate (return_statements_t,
145 data.return_statements,
146 VEC_length (return_statements_t,
147 data.return_statements) - 1);
148
149 x = gimple_build_label (t.label);
150 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
151 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
152 }
153
154 /* If the function calls __builtin_setjmp, we need to emit the computed
155 goto that will serve as the unique dispatcher for all the receivers. */
156 if (data.calls_builtin_setjmp)
157 {
158 tree disp_label, disp_var, arg;
159
160 /* Build 'DISP_LABEL:' and insert. */
161 disp_label = create_artificial_label (cfun->function_end_locus);
162 /* This mark will create forward edges from every call site. */
163 DECL_NONLOCAL (disp_label) = 1;
164 cfun->has_nonlocal_label = 1;
165 x = gimple_build_label (disp_label);
166 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
167
168 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
169 and insert. */
170 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
171 arg = build_addr (disp_label, current_function_decl);
172 t = implicit_built_in_decls[BUILT_IN_SETJMP_DISPATCHER];
173 x = gimple_build_call (t, 1, arg);
174 gimple_call_set_lhs (x, disp_var);
175
176 /* Build 'goto DISP_VAR;' and insert. */
177 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
178 x = gimple_build_goto (disp_var);
179 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
180 }
181
182 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
183 BLOCK_SUBBLOCKS (data.block)
184 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
185
186 clear_block_marks (data.block);
187 VEC_free(return_statements_t, heap, data.return_statements);
188 return 0;
189 }
190
191 struct gimple_opt_pass pass_lower_cf =
192 {
193 {
194 GIMPLE_PASS,
195 "lower", /* name */
196 NULL, /* gate */
197 lower_function_body, /* execute */
198 NULL, /* sub */
199 NULL, /* next */
200 0, /* static_pass_number */
201 TV_NONE, /* tv_id */
202 PROP_gimple_any, /* properties_required */
203 PROP_gimple_lcf, /* properties_provided */
204 0, /* properties_destroyed */
205 0, /* todo_flags_start */
206 TODO_dump_func /* todo_flags_finish */
207 }
208 };
209
210
211 /* Verify if the type of the argument matches that of the function
212 declaration. If we cannot verify this or there is a mismatch,
213 return false. */
214
215 bool
216 gimple_check_call_args (gimple stmt)
217 {
218 tree fndecl, parms, p;
219 unsigned int i, nargs;
220
221 nargs = gimple_call_num_args (stmt);
222
223 /* Get argument types for verification. */
224 fndecl = gimple_call_fndecl (stmt);
225 if (fndecl)
226 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
227 else
228 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
229
230 /* Verify if the type of the argument matches that of the function
231 declaration. If we cannot verify this or there is a mismatch,
232 return false. */
233 if (fndecl && DECL_ARGUMENTS (fndecl))
234 {
235 for (i = 0, p = DECL_ARGUMENTS (fndecl);
236 i < nargs;
237 i++, p = DECL_CHAIN (p))
238 {
239 /* We cannot distinguish a varargs function from the case
240 of excess parameters, still deferring the inlining decision
241 to the callee is possible. */
242 if (!p)
243 break;
244 if (p == error_mark_node
245 || gimple_call_arg (stmt, i) == error_mark_node
246 || !fold_convertible_p (DECL_ARG_TYPE (p),
247 gimple_call_arg (stmt, i)))
248 return false;
249 }
250 }
251 else if (parms)
252 {
253 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
254 {
255 /* If this is a varargs function defer inlining decision
256 to callee. */
257 if (!p)
258 break;
259 if (TREE_VALUE (p) == error_mark_node
260 || gimple_call_arg (stmt, i) == error_mark_node
261 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
262 || !fold_convertible_p (TREE_VALUE (p),
263 gimple_call_arg (stmt, i)))
264 return false;
265 }
266 }
267 else
268 {
269 if (nargs != 0)
270 return false;
271 }
272 return true;
273 }
274
275
276 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
277 when they are changed -- if this has to be done, the lowering routine must
278 do it explicitly. DATA is passed through the recursion. */
279
280 static void
281 lower_sequence (gimple_seq seq, struct lower_data *data)
282 {
283 gimple_stmt_iterator gsi;
284
285 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
286 lower_stmt (&gsi, data);
287 }
288
289
290 /* Lower the OpenMP directive statement pointed by GSI. DATA is
291 passed through the recursion. */
292
293 static void
294 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
295 {
296 gimple stmt;
297
298 stmt = gsi_stmt (*gsi);
299
300 lower_sequence (gimple_omp_body (stmt), data);
301 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
302 gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
303 gimple_omp_set_body (stmt, NULL);
304 gsi_remove (gsi, false);
305 }
306
307
308 /* Lower statement GSI. DATA is passed through the recursion. We try to
309 track the fallthruness of statements and get rid of unreachable return
310 statements in order to prevent the EH lowering pass from adding useless
311 edges that can cause bogus warnings to be issued later; this guess need
312 not be 100% accurate, simply be conservative and reset cannot_fallthru
313 to false if we don't know. */
314
315 static void
316 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
317 {
318 gimple stmt = gsi_stmt (*gsi);
319
320 gimple_set_block (stmt, data->block);
321
322 switch (gimple_code (stmt))
323 {
324 case GIMPLE_BIND:
325 lower_gimple_bind (gsi, data);
326 /* Propagate fallthruness. */
327 return;
328
329 case GIMPLE_COND:
330 case GIMPLE_GOTO:
331 case GIMPLE_SWITCH:
332 data->cannot_fallthru = true;
333 gsi_next (gsi);
334 return;
335
336 case GIMPLE_RETURN:
337 if (data->cannot_fallthru)
338 {
339 gsi_remove (gsi, false);
340 /* Propagate fallthruness. */
341 }
342 else
343 {
344 lower_gimple_return (gsi, data);
345 data->cannot_fallthru = true;
346 }
347 return;
348
349 case GIMPLE_TRY:
350 {
351 bool try_cannot_fallthru;
352 lower_sequence (gimple_try_eval (stmt), data);
353 try_cannot_fallthru = data->cannot_fallthru;
354 data->cannot_fallthru = false;
355 lower_sequence (gimple_try_cleanup (stmt), data);
356 /* See gimple_stmt_may_fallthru for the rationale. */
357 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
358 {
359 data->cannot_fallthru |= try_cannot_fallthru;
360 gsi_next (gsi);
361 return;
362 }
363 }
364 break;
365
366 case GIMPLE_CATCH:
367 data->cannot_fallthru = false;
368 lower_sequence (gimple_catch_handler (stmt), data);
369 break;
370
371 case GIMPLE_EH_FILTER:
372 data->cannot_fallthru = false;
373 lower_sequence (gimple_eh_filter_failure (stmt), data);
374 break;
375
376 case GIMPLE_NOP:
377 case GIMPLE_ASM:
378 case GIMPLE_ASSIGN:
379 case GIMPLE_PREDICT:
380 case GIMPLE_LABEL:
381 case GIMPLE_EH_MUST_NOT_THROW:
382 case GIMPLE_OMP_FOR:
383 case GIMPLE_OMP_SECTIONS:
384 case GIMPLE_OMP_SECTIONS_SWITCH:
385 case GIMPLE_OMP_SECTION:
386 case GIMPLE_OMP_SINGLE:
387 case GIMPLE_OMP_MASTER:
388 case GIMPLE_OMP_ORDERED:
389 case GIMPLE_OMP_CRITICAL:
390 case GIMPLE_OMP_RETURN:
391 case GIMPLE_OMP_ATOMIC_LOAD:
392 case GIMPLE_OMP_ATOMIC_STORE:
393 case GIMPLE_OMP_CONTINUE:
394 break;
395
396 case GIMPLE_CALL:
397 {
398 tree decl = gimple_call_fndecl (stmt);
399
400 if (decl
401 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
402 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
403 {
404 lower_builtin_setjmp (gsi);
405 data->cannot_fallthru = false;
406 data->calls_builtin_setjmp = true;
407 return;
408 }
409
410 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
411 {
412 data->cannot_fallthru = true;
413 gsi_next (gsi);
414 return;
415 }
416 }
417 break;
418
419 case GIMPLE_OMP_PARALLEL:
420 case GIMPLE_OMP_TASK:
421 data->cannot_fallthru = false;
422 lower_omp_directive (gsi, data);
423 data->cannot_fallthru = false;
424 return;
425
426 default:
427 gcc_unreachable ();
428 }
429
430 data->cannot_fallthru = false;
431 gsi_next (gsi);
432 }
433
434 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
435
436 static void
437 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
438 {
439 tree old_block = data->block;
440 gimple stmt = gsi_stmt (*gsi);
441 tree new_block = gimple_bind_block (stmt);
442
443 if (new_block)
444 {
445 if (new_block == old_block)
446 {
447 /* The outermost block of the original function may not be the
448 outermost statement chain of the gimplified function. So we
449 may see the outermost block just inside the function. */
450 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
451 new_block = NULL;
452 }
453 else
454 {
455 /* We do not expect to handle duplicate blocks. */
456 gcc_assert (!TREE_ASM_WRITTEN (new_block));
457 TREE_ASM_WRITTEN (new_block) = 1;
458
459 /* Block tree may get clobbered by inlining. Normally this would
460 be fixed in rest_of_decl_compilation using block notes, but
461 since we are not going to emit them, it is up to us. */
462 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
463 BLOCK_SUBBLOCKS (old_block) = new_block;
464 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
465 BLOCK_SUPERCONTEXT (new_block) = old_block;
466
467 data->block = new_block;
468 }
469 }
470
471 record_vars (gimple_bind_vars (stmt));
472 lower_sequence (gimple_bind_body (stmt), data);
473
474 if (new_block)
475 {
476 gcc_assert (data->block == new_block);
477
478 BLOCK_SUBBLOCKS (new_block)
479 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
480 data->block = old_block;
481 }
482
483 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
484 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
485 gsi_remove (gsi, false);
486 }
487
488 /* Try to determine whether a TRY_CATCH expression can fall through.
489 This is a subroutine of block_may_fallthru. */
490
491 static bool
492 try_catch_may_fallthru (const_tree stmt)
493 {
494 tree_stmt_iterator i;
495
496 /* If the TRY block can fall through, the whole TRY_CATCH can
497 fall through. */
498 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
499 return true;
500
501 i = tsi_start (TREE_OPERAND (stmt, 1));
502 switch (TREE_CODE (tsi_stmt (i)))
503 {
504 case CATCH_EXPR:
505 /* We expect to see a sequence of CATCH_EXPR trees, each with a
506 catch expression and a body. The whole TRY_CATCH may fall
507 through iff any of the catch bodies falls through. */
508 for (; !tsi_end_p (i); tsi_next (&i))
509 {
510 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
511 return true;
512 }
513 return false;
514
515 case EH_FILTER_EXPR:
516 /* The exception filter expression only matters if there is an
517 exception. If the exception does not match EH_FILTER_TYPES,
518 we will execute EH_FILTER_FAILURE, and we will fall through
519 if that falls through. If the exception does match
520 EH_FILTER_TYPES, the stack unwinder will continue up the
521 stack, so we will not fall through. We don't know whether we
522 will throw an exception which matches EH_FILTER_TYPES or not,
523 so we just ignore EH_FILTER_TYPES and assume that we might
524 throw an exception which doesn't match. */
525 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
526
527 default:
528 /* This case represents statements to be executed when an
529 exception occurs. Those statements are implicitly followed
530 by a RESX statement to resume execution after the exception.
531 So in this case the TRY_CATCH never falls through. */
532 return false;
533 }
534 }
535
536
537 /* Same as above, but for a GIMPLE_TRY_CATCH. */
538
539 static bool
540 gimple_try_catch_may_fallthru (gimple stmt)
541 {
542 gimple_stmt_iterator i;
543
544 /* We don't handle GIMPLE_TRY_FINALLY. */
545 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
546
547 /* If the TRY block can fall through, the whole TRY_CATCH can
548 fall through. */
549 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
550 return true;
551
552 i = gsi_start (gimple_try_cleanup (stmt));
553 switch (gimple_code (gsi_stmt (i)))
554 {
555 case GIMPLE_CATCH:
556 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
557 catch expression and a body. The whole try/catch may fall
558 through iff any of the catch bodies falls through. */
559 for (; !gsi_end_p (i); gsi_next (&i))
560 {
561 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
562 return true;
563 }
564 return false;
565
566 case GIMPLE_EH_FILTER:
567 /* The exception filter expression only matters if there is an
568 exception. If the exception does not match EH_FILTER_TYPES,
569 we will execute EH_FILTER_FAILURE, and we will fall through
570 if that falls through. If the exception does match
571 EH_FILTER_TYPES, the stack unwinder will continue up the
572 stack, so we will not fall through. We don't know whether we
573 will throw an exception which matches EH_FILTER_TYPES or not,
574 so we just ignore EH_FILTER_TYPES and assume that we might
575 throw an exception which doesn't match. */
576 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
577
578 default:
579 /* This case represents statements to be executed when an
580 exception occurs. Those statements are implicitly followed
581 by a GIMPLE_RESX to resume execution after the exception. So
582 in this case the try/catch never falls through. */
583 return false;
584 }
585 }
586
587
588 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
589 need not be 100% accurate; simply be conservative and return true if we
590 don't know. This is used only to avoid stupidly generating extra code.
591 If we're wrong, we'll just delete the extra code later. */
592
593 bool
594 block_may_fallthru (const_tree block)
595 {
596 /* This CONST_CAST is okay because expr_last returns its argument
597 unmodified and we assign it to a const_tree. */
598 const_tree stmt = expr_last (CONST_CAST_TREE(block));
599
600 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
601 {
602 case GOTO_EXPR:
603 case RETURN_EXPR:
604 /* Easy cases. If the last statement of the block implies
605 control transfer, then we can't fall through. */
606 return false;
607
608 case SWITCH_EXPR:
609 /* If SWITCH_LABELS is set, this is lowered, and represents a
610 branch to a selected label and hence can not fall through.
611 Otherwise SWITCH_BODY is set, and the switch can fall
612 through. */
613 return SWITCH_LABELS (stmt) == NULL_TREE;
614
615 case COND_EXPR:
616 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
617 return true;
618 return block_may_fallthru (COND_EXPR_ELSE (stmt));
619
620 case BIND_EXPR:
621 return block_may_fallthru (BIND_EXPR_BODY (stmt));
622
623 case TRY_CATCH_EXPR:
624 return try_catch_may_fallthru (stmt);
625
626 case TRY_FINALLY_EXPR:
627 /* The finally clause is always executed after the try clause,
628 so if it does not fall through, then the try-finally will not
629 fall through. Otherwise, if the try clause does not fall
630 through, then when the finally clause falls through it will
631 resume execution wherever the try clause was going. So the
632 whole try-finally will only fall through if both the try
633 clause and the finally clause fall through. */
634 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
635 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
636
637 case MODIFY_EXPR:
638 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
639 stmt = TREE_OPERAND (stmt, 1);
640 else
641 return true;
642 /* FALLTHRU */
643
644 case CALL_EXPR:
645 /* Functions that do not return do not fall through. */
646 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
647
648 case CLEANUP_POINT_EXPR:
649 return block_may_fallthru (TREE_OPERAND (stmt, 0));
650
651 default:
652 return true;
653 }
654 }
655
656
657 /* Try to determine if we can continue executing the statement
658 immediately following STMT. This guess need not be 100% accurate;
659 simply be conservative and return true if we don't know. This is
660 used only to avoid stupidly generating extra code. If we're wrong,
661 we'll just delete the extra code later. */
662
663 bool
664 gimple_stmt_may_fallthru (gimple stmt)
665 {
666 if (!stmt)
667 return true;
668
669 switch (gimple_code (stmt))
670 {
671 case GIMPLE_GOTO:
672 case GIMPLE_RETURN:
673 case GIMPLE_RESX:
674 /* Easy cases. If the last statement of the seq implies
675 control transfer, then we can't fall through. */
676 return false;
677
678 case GIMPLE_SWITCH:
679 /* Switch has already been lowered and represents a branch
680 to a selected label and hence can't fall through. */
681 return false;
682
683 case GIMPLE_COND:
684 /* GIMPLE_COND's are already lowered into a two-way branch. They
685 can't fall through. */
686 return false;
687
688 case GIMPLE_BIND:
689 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
690
691 case GIMPLE_TRY:
692 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
693 return gimple_try_catch_may_fallthru (stmt);
694
695 /* It must be a GIMPLE_TRY_FINALLY. */
696
697 /* The finally clause is always executed after the try clause,
698 so if it does not fall through, then the try-finally will not
699 fall through. Otherwise, if the try clause does not fall
700 through, then when the finally clause falls through it will
701 resume execution wherever the try clause was going. So the
702 whole try-finally will only fall through if both the try
703 clause and the finally clause fall through. */
704 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
705 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
706
707 case GIMPLE_CALL:
708 /* Functions that do not return do not fall through. */
709 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
710
711 default:
712 return true;
713 }
714 }
715
716
717 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
718
719 bool
720 gimple_seq_may_fallthru (gimple_seq seq)
721 {
722 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
723 }
724
725
726 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
727
728 static void
729 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
730 {
731 gimple stmt = gsi_stmt (*gsi);
732 gimple t;
733 int i;
734 return_statements_t tmp_rs;
735
736 /* Match this up with an existing return statement that's been created. */
737 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
738 i >= 0; i--)
739 {
740 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
741
742 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
743 {
744 /* Remove the line number from the representative return statement.
745 It now fills in for many such returns. Failure to remove this
746 will result in incorrect results for coverage analysis. */
747 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
748
749 goto found;
750 }
751 }
752
753 /* Not found. Create a new label and record the return statement. */
754 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
755 tmp_rs.stmt = stmt;
756 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
757
758 /* Generate a goto statement and remove the return statement. */
759 found:
760 /* When not optimizing, make sure user returns are preserved. */
761 if (!optimize && gimple_has_location (stmt))
762 DECL_ARTIFICIAL (tmp_rs.label) = 0;
763 t = gimple_build_goto (tmp_rs.label);
764 gimple_set_location (t, gimple_location (stmt));
765 gimple_set_block (t, gimple_block (stmt));
766 gsi_insert_before (gsi, t, GSI_SAME_STMT);
767 gsi_remove (gsi, false);
768 }
769
770 /* Lower a __builtin_setjmp GSI.
771
772 __builtin_setjmp is passed a pointer to an array of five words (not
773 all will be used on all machines). It operates similarly to the C
774 library function of the same name, but is more efficient.
775
776 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
777 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
778 __builtin_setjmp_dispatcher shared among all the instances; that's
779 why it is only emitted at the end by lower_function_body.
780
781 After full lowering, the body of the function should look like:
782
783 {
784 void * setjmpvar.0;
785 int D.1844;
786 int D.2844;
787
788 [...]
789
790 __builtin_setjmp_setup (&buf, &<D1847>);
791 D.1844 = 0;
792 goto <D1846>;
793 <D1847>:;
794 __builtin_setjmp_receiver (&<D1847>);
795 D.1844 = 1;
796 <D1846>:;
797 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
798
799 [...]
800
801 __builtin_setjmp_setup (&buf, &<D2847>);
802 D.2844 = 0;
803 goto <D2846>;
804 <D2847>:;
805 __builtin_setjmp_receiver (&<D2847>);
806 D.2844 = 1;
807 <D2846>:;
808 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
809
810 [...]
811
812 <D3850>:;
813 return;
814 <D3853>: [non-local];
815 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
816 goto setjmpvar.0;
817 }
818
819 The dispatcher block will be both the unique destination of all the
820 abnormal call edges and the unique source of all the abnormal edges
821 to the receivers, thus keeping the complexity explosion localized. */
822
823 static void
824 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
825 {
826 gimple stmt = gsi_stmt (*gsi);
827 location_t loc = gimple_location (stmt);
828 tree cont_label = create_artificial_label (loc);
829 tree next_label = create_artificial_label (loc);
830 tree dest, t, arg;
831 gimple g;
832
833 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
834 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
835 FORCED_LABEL (next_label) = 1;
836
837 dest = gimple_call_lhs (stmt);
838
839 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
840 arg = build_addr (next_label, current_function_decl);
841 t = implicit_built_in_decls[BUILT_IN_SETJMP_SETUP];
842 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
843 gimple_set_location (g, loc);
844 gimple_set_block (g, gimple_block (stmt));
845 gsi_insert_before (gsi, g, GSI_SAME_STMT);
846
847 /* Build 'DEST = 0' and insert. */
848 if (dest)
849 {
850 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
851 gimple_set_location (g, loc);
852 gimple_set_block (g, gimple_block (stmt));
853 gsi_insert_before (gsi, g, GSI_SAME_STMT);
854 }
855
856 /* Build 'goto CONT_LABEL' and insert. */
857 g = gimple_build_goto (cont_label);
858 gsi_insert_before (gsi, g, GSI_SAME_STMT);
859
860 /* Build 'NEXT_LABEL:' and insert. */
861 g = gimple_build_label (next_label);
862 gsi_insert_before (gsi, g, GSI_SAME_STMT);
863
864 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
865 arg = build_addr (next_label, current_function_decl);
866 t = implicit_built_in_decls[BUILT_IN_SETJMP_RECEIVER];
867 g = gimple_build_call (t, 1, arg);
868 gimple_set_location (g, loc);
869 gimple_set_block (g, gimple_block (stmt));
870 gsi_insert_before (gsi, g, GSI_SAME_STMT);
871
872 /* Build 'DEST = 1' and insert. */
873 if (dest)
874 {
875 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
876 integer_one_node));
877 gimple_set_location (g, loc);
878 gimple_set_block (g, gimple_block (stmt));
879 gsi_insert_before (gsi, g, GSI_SAME_STMT);
880 }
881
882 /* Build 'CONT_LABEL:' and insert. */
883 g = gimple_build_label (cont_label);
884 gsi_insert_before (gsi, g, GSI_SAME_STMT);
885
886 /* Remove the call to __builtin_setjmp. */
887 gsi_remove (gsi, false);
888 }
889 \f
890
891 /* Record the variables in VARS into function FN. */
892
893 void
894 record_vars_into (tree vars, tree fn)
895 {
896 if (fn != current_function_decl)
897 push_cfun (DECL_STRUCT_FUNCTION (fn));
898
899 for (; vars; vars = DECL_CHAIN (vars))
900 {
901 tree var = vars;
902
903 /* BIND_EXPRs contains also function/type/constant declarations
904 we don't need to care about. */
905 if (TREE_CODE (var) != VAR_DECL)
906 continue;
907
908 /* Nothing to do in this case. */
909 if (DECL_EXTERNAL (var))
910 continue;
911
912 /* Record the variable. */
913 add_local_decl (cfun, var);
914 if (gimple_referenced_vars (cfun))
915 add_referenced_var (var);
916 }
917
918 if (fn != current_function_decl)
919 pop_cfun ();
920 }
921
922
923 /* Record the variables in VARS into current_function_decl. */
924
925 void
926 record_vars (tree vars)
927 {
928 record_vars_into (vars, current_function_decl);
929 }