1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t
;
58 DEF_VEC_O(return_statements_t
);
59 DEF_VEC_ALLOC_O(return_statements_t
,heap
);
63 /* Block the current statement belongs to. */
66 /* A vector of label and return statements to be moved to the end
68 VEC(return_statements_t
,heap
) *return_statements
;
70 /* True if the current statement cannot fall through. */
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp
;
77 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
78 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
79 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
80 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
83 /* Lower the body of current_function_decl from High GIMPLE into Low
87 lower_function_body (void)
89 struct lower_data data
;
90 gimple_seq body
= gimple_body (current_function_decl
);
91 gimple_seq lowered_body
;
92 gimple_stmt_iterator i
;
97 /* The gimplifier should've left a body of exactly one statement,
98 namely a GIMPLE_BIND. */
99 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
100 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
102 memset (&data
, 0, sizeof (data
));
103 data
.block
= DECL_INITIAL (current_function_decl
);
104 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
105 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
106 TREE_ASM_WRITTEN (data
.block
) = 1;
107 data
.return_statements
= VEC_alloc (return_statements_t
, heap
, 8);
109 bind
= gimple_seq_first_stmt (body
);
111 gimple_seq_add_stmt (&lowered_body
, bind
);
112 i
= gsi_start (lowered_body
);
113 lower_gimple_bind (&i
, &data
);
115 /* Once the old body has been lowered, replace it with the new
117 gimple_set_body (current_function_decl
, lowered_body
);
119 i
= gsi_last (lowered_body
);
121 /* If the function falls off the end, we need a null return statement.
122 If we've already got one in the return_statements vector, we don't
123 need to do anything special. Otherwise build one by hand. */
124 if (gimple_seq_may_fallthru (lowered_body
)
125 && (VEC_empty (return_statements_t
, data
.return_statements
)
126 || gimple_return_retval (VEC_last (return_statements_t
,
127 data
.return_statements
)->stmt
) != NULL
))
129 x
= gimple_build_return (NULL
);
130 gimple_set_location (x
, cfun
->function_end_locus
);
131 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
132 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
135 /* If we lowered any return statements, emit the representative
136 at the end of the function. */
137 while (!VEC_empty (return_statements_t
, data
.return_statements
))
139 return_statements_t t
;
141 /* Unfortunately, we can't use VEC_pop because it returns void for
143 t
= *VEC_last (return_statements_t
, data
.return_statements
);
144 VEC_truncate (return_statements_t
,
145 data
.return_statements
,
146 VEC_length (return_statements_t
,
147 data
.return_statements
) - 1);
149 x
= gimple_build_label (t
.label
);
150 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
151 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
154 /* If the function calls __builtin_setjmp, we need to emit the computed
155 goto that will serve as the unique dispatcher for all the receivers. */
156 if (data
.calls_builtin_setjmp
)
158 tree disp_label
, disp_var
, arg
;
160 /* Build 'DISP_LABEL:' and insert. */
161 disp_label
= create_artificial_label (cfun
->function_end_locus
);
162 /* This mark will create forward edges from every call site. */
163 DECL_NONLOCAL (disp_label
) = 1;
164 cfun
->has_nonlocal_label
= 1;
165 x
= gimple_build_label (disp_label
);
166 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
168 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
170 disp_var
= create_tmp_var (ptr_type_node
, "setjmpvar");
171 arg
= build_addr (disp_label
, current_function_decl
);
172 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_DISPATCHER
];
173 x
= gimple_build_call (t
, 1, arg
);
174 gimple_call_set_lhs (x
, disp_var
);
176 /* Build 'goto DISP_VAR;' and insert. */
177 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
178 x
= gimple_build_goto (disp_var
);
179 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
182 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
183 BLOCK_SUBBLOCKS (data
.block
)
184 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
186 clear_block_marks (data
.block
);
187 VEC_free(return_statements_t
, heap
, data
.return_statements
);
191 struct gimple_opt_pass pass_lower_cf
=
197 lower_function_body
, /* execute */
200 0, /* static_pass_number */
202 PROP_gimple_any
, /* properties_required */
203 PROP_gimple_lcf
, /* properties_provided */
204 0, /* properties_destroyed */
205 0, /* todo_flags_start */
206 TODO_dump_func
/* todo_flags_finish */
211 /* Verify if the type of the argument matches that of the function
212 declaration. If we cannot verify this or there is a mismatch,
216 gimple_check_call_args (gimple stmt
)
218 tree fndecl
, parms
, p
;
219 unsigned int i
, nargs
;
221 nargs
= gimple_call_num_args (stmt
);
223 /* Get argument types for verification. */
224 fndecl
= gimple_call_fndecl (stmt
);
226 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
228 parms
= TYPE_ARG_TYPES (gimple_call_fntype (stmt
));
230 /* Verify if the type of the argument matches that of the function
231 declaration. If we cannot verify this or there is a mismatch,
233 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
235 for (i
= 0, p
= DECL_ARGUMENTS (fndecl
);
237 i
++, p
= DECL_CHAIN (p
))
239 /* We cannot distinguish a varargs function from the case
240 of excess parameters, still deferring the inlining decision
241 to the callee is possible. */
244 if (p
== error_mark_node
245 || gimple_call_arg (stmt
, i
) == error_mark_node
246 || !fold_convertible_p (DECL_ARG_TYPE (p
),
247 gimple_call_arg (stmt
, i
)))
253 for (i
= 0, p
= parms
; i
< nargs
; i
++, p
= TREE_CHAIN (p
))
255 /* If this is a varargs function defer inlining decision
259 if (TREE_VALUE (p
) == error_mark_node
260 || gimple_call_arg (stmt
, i
) == error_mark_node
261 || TREE_CODE (TREE_VALUE (p
)) == VOID_TYPE
262 || !fold_convertible_p (TREE_VALUE (p
),
263 gimple_call_arg (stmt
, i
)))
276 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
277 when they are changed -- if this has to be done, the lowering routine must
278 do it explicitly. DATA is passed through the recursion. */
281 lower_sequence (gimple_seq seq
, struct lower_data
*data
)
283 gimple_stmt_iterator gsi
;
285 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); )
286 lower_stmt (&gsi
, data
);
290 /* Lower the OpenMP directive statement pointed by GSI. DATA is
291 passed through the recursion. */
294 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
298 stmt
= gsi_stmt (*gsi
);
300 lower_sequence (gimple_omp_body (stmt
), data
);
301 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
302 gsi_insert_seq_before (gsi
, gimple_omp_body (stmt
), GSI_SAME_STMT
);
303 gimple_omp_set_body (stmt
, NULL
);
304 gsi_remove (gsi
, false);
308 /* Lower statement GSI. DATA is passed through the recursion. We try to
309 track the fallthruness of statements and get rid of unreachable return
310 statements in order to prevent the EH lowering pass from adding useless
311 edges that can cause bogus warnings to be issued later; this guess need
312 not be 100% accurate, simply be conservative and reset cannot_fallthru
313 to false if we don't know. */
316 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
318 gimple stmt
= gsi_stmt (*gsi
);
320 gimple_set_block (stmt
, data
->block
);
322 switch (gimple_code (stmt
))
325 lower_gimple_bind (gsi
, data
);
326 /* Propagate fallthruness. */
332 data
->cannot_fallthru
= true;
337 if (data
->cannot_fallthru
)
339 gsi_remove (gsi
, false);
340 /* Propagate fallthruness. */
344 lower_gimple_return (gsi
, data
);
345 data
->cannot_fallthru
= true;
351 bool try_cannot_fallthru
;
352 lower_sequence (gimple_try_eval (stmt
), data
);
353 try_cannot_fallthru
= data
->cannot_fallthru
;
354 data
->cannot_fallthru
= false;
355 lower_sequence (gimple_try_cleanup (stmt
), data
);
356 /* See gimple_stmt_may_fallthru for the rationale. */
357 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
359 data
->cannot_fallthru
|= try_cannot_fallthru
;
367 data
->cannot_fallthru
= false;
368 lower_sequence (gimple_catch_handler (stmt
), data
);
371 case GIMPLE_EH_FILTER
:
372 data
->cannot_fallthru
= false;
373 lower_sequence (gimple_eh_filter_failure (stmt
), data
);
381 case GIMPLE_EH_MUST_NOT_THROW
:
383 case GIMPLE_OMP_SECTIONS
:
384 case GIMPLE_OMP_SECTIONS_SWITCH
:
385 case GIMPLE_OMP_SECTION
:
386 case GIMPLE_OMP_SINGLE
:
387 case GIMPLE_OMP_MASTER
:
388 case GIMPLE_OMP_ORDERED
:
389 case GIMPLE_OMP_CRITICAL
:
390 case GIMPLE_OMP_RETURN
:
391 case GIMPLE_OMP_ATOMIC_LOAD
:
392 case GIMPLE_OMP_ATOMIC_STORE
:
393 case GIMPLE_OMP_CONTINUE
:
398 tree decl
= gimple_call_fndecl (stmt
);
401 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
402 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
404 lower_builtin_setjmp (gsi
);
405 data
->cannot_fallthru
= false;
406 data
->calls_builtin_setjmp
= true;
410 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
412 data
->cannot_fallthru
= true;
419 case GIMPLE_OMP_PARALLEL
:
420 case GIMPLE_OMP_TASK
:
421 data
->cannot_fallthru
= false;
422 lower_omp_directive (gsi
, data
);
423 data
->cannot_fallthru
= false;
430 data
->cannot_fallthru
= false;
434 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
437 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
439 tree old_block
= data
->block
;
440 gimple stmt
= gsi_stmt (*gsi
);
441 tree new_block
= gimple_bind_block (stmt
);
445 if (new_block
== old_block
)
447 /* The outermost block of the original function may not be the
448 outermost statement chain of the gimplified function. So we
449 may see the outermost block just inside the function. */
450 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
455 /* We do not expect to handle duplicate blocks. */
456 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
457 TREE_ASM_WRITTEN (new_block
) = 1;
459 /* Block tree may get clobbered by inlining. Normally this would
460 be fixed in rest_of_decl_compilation using block notes, but
461 since we are not going to emit them, it is up to us. */
462 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
463 BLOCK_SUBBLOCKS (old_block
) = new_block
;
464 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
465 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
467 data
->block
= new_block
;
471 record_vars (gimple_bind_vars (stmt
));
472 lower_sequence (gimple_bind_body (stmt
), data
);
476 gcc_assert (data
->block
== new_block
);
478 BLOCK_SUBBLOCKS (new_block
)
479 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
480 data
->block
= old_block
;
483 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
484 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
485 gsi_remove (gsi
, false);
488 /* Try to determine whether a TRY_CATCH expression can fall through.
489 This is a subroutine of block_may_fallthru. */
492 try_catch_may_fallthru (const_tree stmt
)
494 tree_stmt_iterator i
;
496 /* If the TRY block can fall through, the whole TRY_CATCH can
498 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
501 i
= tsi_start (TREE_OPERAND (stmt
, 1));
502 switch (TREE_CODE (tsi_stmt (i
)))
505 /* We expect to see a sequence of CATCH_EXPR trees, each with a
506 catch expression and a body. The whole TRY_CATCH may fall
507 through iff any of the catch bodies falls through. */
508 for (; !tsi_end_p (i
); tsi_next (&i
))
510 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
516 /* The exception filter expression only matters if there is an
517 exception. If the exception does not match EH_FILTER_TYPES,
518 we will execute EH_FILTER_FAILURE, and we will fall through
519 if that falls through. If the exception does match
520 EH_FILTER_TYPES, the stack unwinder will continue up the
521 stack, so we will not fall through. We don't know whether we
522 will throw an exception which matches EH_FILTER_TYPES or not,
523 so we just ignore EH_FILTER_TYPES and assume that we might
524 throw an exception which doesn't match. */
525 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
528 /* This case represents statements to be executed when an
529 exception occurs. Those statements are implicitly followed
530 by a RESX statement to resume execution after the exception.
531 So in this case the TRY_CATCH never falls through. */
537 /* Same as above, but for a GIMPLE_TRY_CATCH. */
540 gimple_try_catch_may_fallthru (gimple stmt
)
542 gimple_stmt_iterator i
;
544 /* We don't handle GIMPLE_TRY_FINALLY. */
545 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
547 /* If the TRY block can fall through, the whole TRY_CATCH can
549 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
552 i
= gsi_start (gimple_try_cleanup (stmt
));
553 switch (gimple_code (gsi_stmt (i
)))
556 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
557 catch expression and a body. The whole try/catch may fall
558 through iff any of the catch bodies falls through. */
559 for (; !gsi_end_p (i
); gsi_next (&i
))
561 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i
))))
566 case GIMPLE_EH_FILTER
:
567 /* The exception filter expression only matters if there is an
568 exception. If the exception does not match EH_FILTER_TYPES,
569 we will execute EH_FILTER_FAILURE, and we will fall through
570 if that falls through. If the exception does match
571 EH_FILTER_TYPES, the stack unwinder will continue up the
572 stack, so we will not fall through. We don't know whether we
573 will throw an exception which matches EH_FILTER_TYPES or not,
574 so we just ignore EH_FILTER_TYPES and assume that we might
575 throw an exception which doesn't match. */
576 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
579 /* This case represents statements to be executed when an
580 exception occurs. Those statements are implicitly followed
581 by a GIMPLE_RESX to resume execution after the exception. So
582 in this case the try/catch never falls through. */
588 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
589 need not be 100% accurate; simply be conservative and return true if we
590 don't know. This is used only to avoid stupidly generating extra code.
591 If we're wrong, we'll just delete the extra code later. */
594 block_may_fallthru (const_tree block
)
596 /* This CONST_CAST is okay because expr_last returns its argument
597 unmodified and we assign it to a const_tree. */
598 const_tree stmt
= expr_last (CONST_CAST_TREE(block
));
600 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
604 /* Easy cases. If the last statement of the block implies
605 control transfer, then we can't fall through. */
609 /* If SWITCH_LABELS is set, this is lowered, and represents a
610 branch to a selected label and hence can not fall through.
611 Otherwise SWITCH_BODY is set, and the switch can fall
613 return SWITCH_LABELS (stmt
) == NULL_TREE
;
616 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
618 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
621 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
624 return try_catch_may_fallthru (stmt
);
626 case TRY_FINALLY_EXPR
:
627 /* The finally clause is always executed after the try clause,
628 so if it does not fall through, then the try-finally will not
629 fall through. Otherwise, if the try clause does not fall
630 through, then when the finally clause falls through it will
631 resume execution wherever the try clause was going. So the
632 whole try-finally will only fall through if both the try
633 clause and the finally clause fall through. */
634 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
635 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
638 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
639 stmt
= TREE_OPERAND (stmt
, 1);
645 /* Functions that do not return do not fall through. */
646 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
648 case CLEANUP_POINT_EXPR
:
649 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
657 /* Try to determine if we can continue executing the statement
658 immediately following STMT. This guess need not be 100% accurate;
659 simply be conservative and return true if we don't know. This is
660 used only to avoid stupidly generating extra code. If we're wrong,
661 we'll just delete the extra code later. */
664 gimple_stmt_may_fallthru (gimple stmt
)
669 switch (gimple_code (stmt
))
674 /* Easy cases. If the last statement of the seq implies
675 control transfer, then we can't fall through. */
679 /* Switch has already been lowered and represents a branch
680 to a selected label and hence can't fall through. */
684 /* GIMPLE_COND's are already lowered into a two-way branch. They
685 can't fall through. */
689 return gimple_seq_may_fallthru (gimple_bind_body (stmt
));
692 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
693 return gimple_try_catch_may_fallthru (stmt
);
695 /* It must be a GIMPLE_TRY_FINALLY. */
697 /* The finally clause is always executed after the try clause,
698 so if it does not fall through, then the try-finally will not
699 fall through. Otherwise, if the try clause does not fall
700 through, then when the finally clause falls through it will
701 resume execution wherever the try clause was going. So the
702 whole try-finally will only fall through if both the try
703 clause and the finally clause fall through. */
704 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
705 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
708 /* Functions that do not return do not fall through. */
709 return (gimple_call_flags (stmt
) & ECF_NORETURN
) == 0;
717 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
720 gimple_seq_may_fallthru (gimple_seq seq
)
722 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq
));
726 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
729 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
731 gimple stmt
= gsi_stmt (*gsi
);
734 return_statements_t tmp_rs
;
736 /* Match this up with an existing return statement that's been created. */
737 for (i
= VEC_length (return_statements_t
, data
->return_statements
) - 1;
740 tmp_rs
= *VEC_index (return_statements_t
, data
->return_statements
, i
);
742 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
744 /* Remove the line number from the representative return statement.
745 It now fills in for many such returns. Failure to remove this
746 will result in incorrect results for coverage analysis. */
747 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
753 /* Not found. Create a new label and record the return statement. */
754 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
756 VEC_safe_push (return_statements_t
, heap
, data
->return_statements
, &tmp_rs
);
758 /* Generate a goto statement and remove the return statement. */
760 /* When not optimizing, make sure user returns are preserved. */
761 if (!optimize
&& gimple_has_location (stmt
))
762 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
763 t
= gimple_build_goto (tmp_rs
.label
);
764 gimple_set_location (t
, gimple_location (stmt
));
765 gimple_set_block (t
, gimple_block (stmt
));
766 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
767 gsi_remove (gsi
, false);
770 /* Lower a __builtin_setjmp GSI.
772 __builtin_setjmp is passed a pointer to an array of five words (not
773 all will be used on all machines). It operates similarly to the C
774 library function of the same name, but is more efficient.
776 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
777 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
778 __builtin_setjmp_dispatcher shared among all the instances; that's
779 why it is only emitted at the end by lower_function_body.
781 After full lowering, the body of the function should look like:
790 __builtin_setjmp_setup (&buf, &<D1847>);
794 __builtin_setjmp_receiver (&<D1847>);
797 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
801 __builtin_setjmp_setup (&buf, &<D2847>);
805 __builtin_setjmp_receiver (&<D2847>);
808 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
814 <D3853>: [non-local];
815 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
819 The dispatcher block will be both the unique destination of all the
820 abnormal call edges and the unique source of all the abnormal edges
821 to the receivers, thus keeping the complexity explosion localized. */
824 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
826 gimple stmt
= gsi_stmt (*gsi
);
827 location_t loc
= gimple_location (stmt
);
828 tree cont_label
= create_artificial_label (loc
);
829 tree next_label
= create_artificial_label (loc
);
833 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
834 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
835 FORCED_LABEL (next_label
) = 1;
837 dest
= gimple_call_lhs (stmt
);
839 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
840 arg
= build_addr (next_label
, current_function_decl
);
841 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_SETUP
];
842 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
843 gimple_set_location (g
, loc
);
844 gimple_set_block (g
, gimple_block (stmt
));
845 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
847 /* Build 'DEST = 0' and insert. */
850 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
851 gimple_set_location (g
, loc
);
852 gimple_set_block (g
, gimple_block (stmt
));
853 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
856 /* Build 'goto CONT_LABEL' and insert. */
857 g
= gimple_build_goto (cont_label
);
858 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
860 /* Build 'NEXT_LABEL:' and insert. */
861 g
= gimple_build_label (next_label
);
862 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
864 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
865 arg
= build_addr (next_label
, current_function_decl
);
866 t
= implicit_built_in_decls
[BUILT_IN_SETJMP_RECEIVER
];
867 g
= gimple_build_call (t
, 1, arg
);
868 gimple_set_location (g
, loc
);
869 gimple_set_block (g
, gimple_block (stmt
));
870 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
872 /* Build 'DEST = 1' and insert. */
875 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
877 gimple_set_location (g
, loc
);
878 gimple_set_block (g
, gimple_block (stmt
));
879 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
882 /* Build 'CONT_LABEL:' and insert. */
883 g
= gimple_build_label (cont_label
);
884 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
886 /* Remove the call to __builtin_setjmp. */
887 gsi_remove (gsi
, false);
891 /* Record the variables in VARS into function FN. */
894 record_vars_into (tree vars
, tree fn
)
896 if (fn
!= current_function_decl
)
897 push_cfun (DECL_STRUCT_FUNCTION (fn
));
899 for (; vars
; vars
= DECL_CHAIN (vars
))
903 /* BIND_EXPRs contains also function/type/constant declarations
904 we don't need to care about. */
905 if (TREE_CODE (var
) != VAR_DECL
)
908 /* Nothing to do in this case. */
909 if (DECL_EXTERNAL (var
))
912 /* Record the variable. */
913 add_local_decl (cfun
, var
);
914 if (gimple_referenced_vars (cfun
))
915 add_referenced_var (var
);
918 if (fn
!= current_function_decl
)
923 /* Record the variables in VARS into current_function_decl. */
926 record_vars (tree vars
)
928 record_vars_into (vars
, current_function_decl
);