coretypes.h (gimple_seq, [...]): Typedef as gimple.
[gcc.git] / gcc / gimple-low.c
1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35
36 /* The differences between High GIMPLE and Low GIMPLE are the
37 following:
38
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
44
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
47
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
52 {
53 tree label;
54 gimple stmt;
55 };
56 typedef struct return_statements_t return_statements_t;
57
58 DEF_VEC_O(return_statements_t);
59 DEF_VEC_ALLOC_O(return_statements_t,heap);
60
61 struct lower_data
62 {
63 /* Block the current statement belongs to. */
64 tree block;
65
66 /* A vector of label and return statements to be moved to the end
67 of the function. */
68 VEC(return_statements_t,heap) *return_statements;
69
70 /* True if the current statement cannot fall through. */
71 bool cannot_fallthru;
72
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp;
75 };
76
77 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_builtin_setjmp (gimple_stmt_iterator *);
81
82
83 /* Lower the body of current_function_decl from High GIMPLE into Low
84 GIMPLE. */
85
86 static unsigned int
87 lower_function_body (void)
88 {
89 struct lower_data data;
90 gimple_seq body = gimple_body (current_function_decl);
91 gimple_seq lowered_body;
92 gimple_stmt_iterator i;
93 gimple bind;
94 tree t;
95 gimple x;
96
97 /* The gimplifier should've left a body of exactly one statement,
98 namely a GIMPLE_BIND. */
99 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
100 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
101
102 memset (&data, 0, sizeof (data));
103 data.block = DECL_INITIAL (current_function_decl);
104 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
105 BLOCK_CHAIN (data.block) = NULL_TREE;
106 TREE_ASM_WRITTEN (data.block) = 1;
107 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
108
109 bind = gimple_seq_first_stmt (body);
110 lowered_body = NULL;
111 gimple_seq_add_stmt (&lowered_body, bind);
112 i = gsi_start (lowered_body);
113 lower_gimple_bind (&i, &data);
114
115 i = gsi_last (lowered_body);
116
117 /* If the function falls off the end, we need a null return statement.
118 If we've already got one in the return_statements vector, we don't
119 need to do anything special. Otherwise build one by hand. */
120 if (gimple_seq_may_fallthru (lowered_body)
121 && (VEC_empty (return_statements_t, data.return_statements)
122 || gimple_return_retval (VEC_last (return_statements_t,
123 data.return_statements)->stmt) != NULL))
124 {
125 x = gimple_build_return (NULL);
126 gimple_set_location (x, cfun->function_end_locus);
127 gimple_set_block (x, DECL_INITIAL (current_function_decl));
128 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
129 }
130
131 /* If we lowered any return statements, emit the representative
132 at the end of the function. */
133 while (!VEC_empty (return_statements_t, data.return_statements))
134 {
135 return_statements_t t;
136
137 /* Unfortunately, we can't use VEC_pop because it returns void for
138 objects. */
139 t = *VEC_last (return_statements_t, data.return_statements);
140 VEC_truncate (return_statements_t,
141 data.return_statements,
142 VEC_length (return_statements_t,
143 data.return_statements) - 1);
144
145 x = gimple_build_label (t.label);
146 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
147 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
148 }
149
150 /* If the function calls __builtin_setjmp, we need to emit the computed
151 goto that will serve as the unique dispatcher for all the receivers. */
152 if (data.calls_builtin_setjmp)
153 {
154 tree disp_label, disp_var, arg;
155
156 /* Build 'DISP_LABEL:' and insert. */
157 disp_label = create_artificial_label (cfun->function_end_locus);
158 /* This mark will create forward edges from every call site. */
159 DECL_NONLOCAL (disp_label) = 1;
160 cfun->has_nonlocal_label = 1;
161 x = gimple_build_label (disp_label);
162 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
163
164 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
165 and insert. */
166 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
167 arg = build_addr (disp_label, current_function_decl);
168 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
169 x = gimple_build_call (t, 1, arg);
170 gimple_call_set_lhs (x, disp_var);
171
172 /* Build 'goto DISP_VAR;' and insert. */
173 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
174 x = gimple_build_goto (disp_var);
175 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
176 }
177
178 /* Once the old body has been lowered, replace it with the new
179 lowered sequence. */
180 gimple_set_body (current_function_decl, lowered_body);
181
182 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
183 BLOCK_SUBBLOCKS (data.block)
184 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
185
186 clear_block_marks (data.block);
187 VEC_free(return_statements_t, heap, data.return_statements);
188 return 0;
189 }
190
191 struct gimple_opt_pass pass_lower_cf =
192 {
193 {
194 GIMPLE_PASS,
195 "lower", /* name */
196 NULL, /* gate */
197 lower_function_body, /* execute */
198 NULL, /* sub */
199 NULL, /* next */
200 0, /* static_pass_number */
201 TV_NONE, /* tv_id */
202 PROP_gimple_any, /* properties_required */
203 PROP_gimple_lcf, /* properties_provided */
204 0, /* properties_destroyed */
205 0, /* todo_flags_start */
206 0 /* todo_flags_finish */
207 }
208 };
209
210
211
212 /* Verify if the type of the argument matches that of the function
213 declaration. If we cannot verify this or there is a mismatch,
214 return false. */
215
216 static bool
217 gimple_check_call_args (gimple stmt, tree fndecl)
218 {
219 tree parms, p;
220 unsigned int i, nargs;
221
222 /* Calls to internal functions always match their signature. */
223 if (gimple_call_internal_p (stmt))
224 return true;
225
226 nargs = gimple_call_num_args (stmt);
227
228 /* Get argument types for verification. */
229 if (fndecl)
230 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
231 else
232 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
233
234 /* Verify if the type of the argument matches that of the function
235 declaration. If we cannot verify this or there is a mismatch,
236 return false. */
237 if (fndecl && DECL_ARGUMENTS (fndecl))
238 {
239 for (i = 0, p = DECL_ARGUMENTS (fndecl);
240 i < nargs;
241 i++, p = DECL_CHAIN (p))
242 {
243 tree arg;
244 /* We cannot distinguish a varargs function from the case
245 of excess parameters, still deferring the inlining decision
246 to the callee is possible. */
247 if (!p)
248 break;
249 arg = gimple_call_arg (stmt, i);
250 if (p == error_mark_node
251 || arg == error_mark_node
252 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
253 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
254 return false;
255 }
256 }
257 else if (parms)
258 {
259 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
260 {
261 tree arg;
262 /* If this is a varargs function defer inlining decision
263 to callee. */
264 if (!p)
265 break;
266 arg = gimple_call_arg (stmt, i);
267 if (TREE_VALUE (p) == error_mark_node
268 || arg == error_mark_node
269 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
270 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
271 && !fold_convertible_p (TREE_VALUE (p), arg)))
272 return false;
273 }
274 }
275 else
276 {
277 if (nargs != 0)
278 return false;
279 }
280 return true;
281 }
282
283 /* Verify if the type of the argument and lhs of CALL_STMT matches
284 that of the function declaration CALLEE.
285 If we cannot verify this or there is a mismatch, return false. */
286
287 bool
288 gimple_check_call_matching_types (gimple call_stmt, tree callee)
289 {
290 tree lhs;
291
292 if ((DECL_RESULT (callee)
293 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
294 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
295 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
296 TREE_TYPE (lhs))
297 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
298 || !gimple_check_call_args (call_stmt, callee))
299 return false;
300 return true;
301 }
302
303 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
304 when they are changed -- if this has to be done, the lowering routine must
305 do it explicitly. DATA is passed through the recursion. */
306
307 static void
308 lower_sequence (gimple_seq *seq, struct lower_data *data)
309 {
310 gimple_stmt_iterator gsi;
311
312 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
313 lower_stmt (&gsi, data);
314 }
315
316
317 /* Lower the OpenMP directive statement pointed by GSI. DATA is
318 passed through the recursion. */
319
320 static void
321 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
322 {
323 gimple stmt;
324
325 stmt = gsi_stmt (*gsi);
326
327 lower_sequence (gimple_omp_body_ptr (stmt), data);
328 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
329 gimple_omp_set_body (stmt, NULL);
330 gsi_next (gsi);
331 }
332
333
334 /* Lower statement GSI. DATA is passed through the recursion. We try to
335 track the fallthruness of statements and get rid of unreachable return
336 statements in order to prevent the EH lowering pass from adding useless
337 edges that can cause bogus warnings to be issued later; this guess need
338 not be 100% accurate, simply be conservative and reset cannot_fallthru
339 to false if we don't know. */
340
341 static void
342 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
343 {
344 gimple stmt = gsi_stmt (*gsi);
345
346 gimple_set_block (stmt, data->block);
347
348 switch (gimple_code (stmt))
349 {
350 case GIMPLE_BIND:
351 lower_gimple_bind (gsi, data);
352 /* Propagate fallthruness. */
353 return;
354
355 case GIMPLE_COND:
356 case GIMPLE_GOTO:
357 case GIMPLE_SWITCH:
358 data->cannot_fallthru = true;
359 gsi_next (gsi);
360 return;
361
362 case GIMPLE_RETURN:
363 if (data->cannot_fallthru)
364 {
365 gsi_remove (gsi, false);
366 /* Propagate fallthruness. */
367 }
368 else
369 {
370 lower_gimple_return (gsi, data);
371 data->cannot_fallthru = true;
372 }
373 return;
374
375 case GIMPLE_TRY:
376 {
377 bool try_cannot_fallthru;
378 lower_sequence (gimple_try_eval_ptr (stmt), data);
379 try_cannot_fallthru = data->cannot_fallthru;
380 data->cannot_fallthru = false;
381 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
382 /* See gimple_stmt_may_fallthru for the rationale. */
383 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
384 {
385 data->cannot_fallthru |= try_cannot_fallthru;
386 gsi_next (gsi);
387 return;
388 }
389 }
390 break;
391
392 case GIMPLE_CATCH:
393 data->cannot_fallthru = false;
394 lower_sequence (gimple_catch_handler_ptr (stmt), data);
395 break;
396
397 case GIMPLE_EH_FILTER:
398 data->cannot_fallthru = false;
399 lower_sequence (gimple_eh_filter_failure_ptr (stmt), data);
400 break;
401
402 case GIMPLE_EH_ELSE:
403 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
404 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
405 break;
406
407 case GIMPLE_NOP:
408 case GIMPLE_ASM:
409 case GIMPLE_ASSIGN:
410 case GIMPLE_PREDICT:
411 case GIMPLE_LABEL:
412 case GIMPLE_EH_MUST_NOT_THROW:
413 case GIMPLE_OMP_FOR:
414 case GIMPLE_OMP_SECTIONS:
415 case GIMPLE_OMP_SECTIONS_SWITCH:
416 case GIMPLE_OMP_SECTION:
417 case GIMPLE_OMP_SINGLE:
418 case GIMPLE_OMP_MASTER:
419 case GIMPLE_OMP_ORDERED:
420 case GIMPLE_OMP_CRITICAL:
421 case GIMPLE_OMP_RETURN:
422 case GIMPLE_OMP_ATOMIC_LOAD:
423 case GIMPLE_OMP_ATOMIC_STORE:
424 case GIMPLE_OMP_CONTINUE:
425 break;
426
427 case GIMPLE_CALL:
428 {
429 tree decl = gimple_call_fndecl (stmt);
430
431 if (decl
432 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
433 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
434 {
435 lower_builtin_setjmp (gsi);
436 data->cannot_fallthru = false;
437 data->calls_builtin_setjmp = true;
438 return;
439 }
440
441 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
442 {
443 data->cannot_fallthru = true;
444 gsi_next (gsi);
445 return;
446 }
447 }
448 break;
449
450 case GIMPLE_OMP_PARALLEL:
451 case GIMPLE_OMP_TASK:
452 data->cannot_fallthru = false;
453 lower_omp_directive (gsi, data);
454 data->cannot_fallthru = false;
455 return;
456
457 case GIMPLE_TRANSACTION:
458 lower_sequence (gimple_transaction_body_ptr (stmt), data);
459 break;
460
461 default:
462 gcc_unreachable ();
463 }
464
465 data->cannot_fallthru = false;
466 gsi_next (gsi);
467 }
468
469 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
470
471 static void
472 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
473 {
474 tree old_block = data->block;
475 gimple stmt = gsi_stmt (*gsi);
476 tree new_block = gimple_bind_block (stmt);
477
478 if (new_block)
479 {
480 if (new_block == old_block)
481 {
482 /* The outermost block of the original function may not be the
483 outermost statement chain of the gimplified function. So we
484 may see the outermost block just inside the function. */
485 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
486 new_block = NULL;
487 }
488 else
489 {
490 /* We do not expect to handle duplicate blocks. */
491 gcc_assert (!TREE_ASM_WRITTEN (new_block));
492 TREE_ASM_WRITTEN (new_block) = 1;
493
494 /* Block tree may get clobbered by inlining. Normally this would
495 be fixed in rest_of_decl_compilation using block notes, but
496 since we are not going to emit them, it is up to us. */
497 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
498 BLOCK_SUBBLOCKS (old_block) = new_block;
499 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
500 BLOCK_SUPERCONTEXT (new_block) = old_block;
501
502 data->block = new_block;
503 }
504 }
505
506 record_vars (gimple_bind_vars (stmt));
507 lower_sequence (gimple_bind_body_ptr (stmt), data);
508
509 if (new_block)
510 {
511 gcc_assert (data->block == new_block);
512
513 BLOCK_SUBBLOCKS (new_block)
514 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
515 data->block = old_block;
516 }
517
518 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
519 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
520 gsi_remove (gsi, false);
521 }
522
523 /* Try to determine whether a TRY_CATCH expression can fall through.
524 This is a subroutine of block_may_fallthru. */
525
526 static bool
527 try_catch_may_fallthru (const_tree stmt)
528 {
529 tree_stmt_iterator i;
530
531 /* If the TRY block can fall through, the whole TRY_CATCH can
532 fall through. */
533 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
534 return true;
535
536 i = tsi_start (TREE_OPERAND (stmt, 1));
537 switch (TREE_CODE (tsi_stmt (i)))
538 {
539 case CATCH_EXPR:
540 /* We expect to see a sequence of CATCH_EXPR trees, each with a
541 catch expression and a body. The whole TRY_CATCH may fall
542 through iff any of the catch bodies falls through. */
543 for (; !tsi_end_p (i); tsi_next (&i))
544 {
545 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
546 return true;
547 }
548 return false;
549
550 case EH_FILTER_EXPR:
551 /* The exception filter expression only matters if there is an
552 exception. If the exception does not match EH_FILTER_TYPES,
553 we will execute EH_FILTER_FAILURE, and we will fall through
554 if that falls through. If the exception does match
555 EH_FILTER_TYPES, the stack unwinder will continue up the
556 stack, so we will not fall through. We don't know whether we
557 will throw an exception which matches EH_FILTER_TYPES or not,
558 so we just ignore EH_FILTER_TYPES and assume that we might
559 throw an exception which doesn't match. */
560 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
561
562 default:
563 /* This case represents statements to be executed when an
564 exception occurs. Those statements are implicitly followed
565 by a RESX statement to resume execution after the exception.
566 So in this case the TRY_CATCH never falls through. */
567 return false;
568 }
569 }
570
571
572 /* Same as above, but for a GIMPLE_TRY_CATCH. */
573
574 static bool
575 gimple_try_catch_may_fallthru (gimple stmt)
576 {
577 gimple_stmt_iterator i;
578
579 /* We don't handle GIMPLE_TRY_FINALLY. */
580 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
581
582 /* If the TRY block can fall through, the whole TRY_CATCH can
583 fall through. */
584 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
585 return true;
586
587 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
588 switch (gimple_code (gsi_stmt (i)))
589 {
590 case GIMPLE_CATCH:
591 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
592 catch expression and a body. The whole try/catch may fall
593 through iff any of the catch bodies falls through. */
594 for (; !gsi_end_p (i); gsi_next (&i))
595 {
596 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
597 return true;
598 }
599 return false;
600
601 case GIMPLE_EH_FILTER:
602 /* The exception filter expression only matters if there is an
603 exception. If the exception does not match EH_FILTER_TYPES,
604 we will execute EH_FILTER_FAILURE, and we will fall through
605 if that falls through. If the exception does match
606 EH_FILTER_TYPES, the stack unwinder will continue up the
607 stack, so we will not fall through. We don't know whether we
608 will throw an exception which matches EH_FILTER_TYPES or not,
609 so we just ignore EH_FILTER_TYPES and assume that we might
610 throw an exception which doesn't match. */
611 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
612
613 default:
614 /* This case represents statements to be executed when an
615 exception occurs. Those statements are implicitly followed
616 by a GIMPLE_RESX to resume execution after the exception. So
617 in this case the try/catch never falls through. */
618 return false;
619 }
620 }
621
622
623 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
624 need not be 100% accurate; simply be conservative and return true if we
625 don't know. This is used only to avoid stupidly generating extra code.
626 If we're wrong, we'll just delete the extra code later. */
627
628 bool
629 block_may_fallthru (const_tree block)
630 {
631 /* This CONST_CAST is okay because expr_last returns its argument
632 unmodified and we assign it to a const_tree. */
633 const_tree stmt = expr_last (CONST_CAST_TREE(block));
634
635 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
636 {
637 case GOTO_EXPR:
638 case RETURN_EXPR:
639 /* Easy cases. If the last statement of the block implies
640 control transfer, then we can't fall through. */
641 return false;
642
643 case SWITCH_EXPR:
644 /* If SWITCH_LABELS is set, this is lowered, and represents a
645 branch to a selected label and hence can not fall through.
646 Otherwise SWITCH_BODY is set, and the switch can fall
647 through. */
648 return SWITCH_LABELS (stmt) == NULL_TREE;
649
650 case COND_EXPR:
651 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
652 return true;
653 return block_may_fallthru (COND_EXPR_ELSE (stmt));
654
655 case BIND_EXPR:
656 return block_may_fallthru (BIND_EXPR_BODY (stmt));
657
658 case TRY_CATCH_EXPR:
659 return try_catch_may_fallthru (stmt);
660
661 case TRY_FINALLY_EXPR:
662 /* The finally clause is always executed after the try clause,
663 so if it does not fall through, then the try-finally will not
664 fall through. Otherwise, if the try clause does not fall
665 through, then when the finally clause falls through it will
666 resume execution wherever the try clause was going. So the
667 whole try-finally will only fall through if both the try
668 clause and the finally clause fall through. */
669 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
670 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
671
672 case MODIFY_EXPR:
673 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
674 stmt = TREE_OPERAND (stmt, 1);
675 else
676 return true;
677 /* FALLTHRU */
678
679 case CALL_EXPR:
680 /* Functions that do not return do not fall through. */
681 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
682
683 case CLEANUP_POINT_EXPR:
684 return block_may_fallthru (TREE_OPERAND (stmt, 0));
685
686 default:
687 return true;
688 }
689 }
690
691
692 /* Try to determine if we can continue executing the statement
693 immediately following STMT. This guess need not be 100% accurate;
694 simply be conservative and return true if we don't know. This is
695 used only to avoid stupidly generating extra code. If we're wrong,
696 we'll just delete the extra code later. */
697
698 bool
699 gimple_stmt_may_fallthru (gimple stmt)
700 {
701 if (!stmt)
702 return true;
703
704 switch (gimple_code (stmt))
705 {
706 case GIMPLE_GOTO:
707 case GIMPLE_RETURN:
708 case GIMPLE_RESX:
709 /* Easy cases. If the last statement of the seq implies
710 control transfer, then we can't fall through. */
711 return false;
712
713 case GIMPLE_SWITCH:
714 /* Switch has already been lowered and represents a branch
715 to a selected label and hence can't fall through. */
716 return false;
717
718 case GIMPLE_COND:
719 /* GIMPLE_COND's are already lowered into a two-way branch. They
720 can't fall through. */
721 return false;
722
723 case GIMPLE_BIND:
724 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
725
726 case GIMPLE_TRY:
727 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
728 return gimple_try_catch_may_fallthru (stmt);
729
730 /* It must be a GIMPLE_TRY_FINALLY. */
731
732 /* The finally clause is always executed after the try clause,
733 so if it does not fall through, then the try-finally will not
734 fall through. Otherwise, if the try clause does not fall
735 through, then when the finally clause falls through it will
736 resume execution wherever the try clause was going. So the
737 whole try-finally will only fall through if both the try
738 clause and the finally clause fall through. */
739 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
740 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
741
742 case GIMPLE_EH_ELSE:
743 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
744 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
745
746 case GIMPLE_CALL:
747 /* Functions that do not return do not fall through. */
748 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
749
750 default:
751 return true;
752 }
753 }
754
755
756 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
757
758 bool
759 gimple_seq_may_fallthru (gimple_seq seq)
760 {
761 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
762 }
763
764
765 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
766
767 static void
768 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
769 {
770 gimple stmt = gsi_stmt (*gsi);
771 gimple t;
772 int i;
773 return_statements_t tmp_rs;
774
775 /* Match this up with an existing return statement that's been created. */
776 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
777 i >= 0; i--)
778 {
779 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
780
781 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
782 {
783 /* Remove the line number from the representative return statement.
784 It now fills in for many such returns. Failure to remove this
785 will result in incorrect results for coverage analysis. */
786 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
787
788 goto found;
789 }
790 }
791
792 /* Not found. Create a new label and record the return statement. */
793 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
794 tmp_rs.stmt = stmt;
795 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
796
797 /* Generate a goto statement and remove the return statement. */
798 found:
799 /* When not optimizing, make sure user returns are preserved. */
800 if (!optimize && gimple_has_location (stmt))
801 DECL_ARTIFICIAL (tmp_rs.label) = 0;
802 t = gimple_build_goto (tmp_rs.label);
803 gimple_set_location (t, gimple_location (stmt));
804 gimple_set_block (t, gimple_block (stmt));
805 gsi_insert_before (gsi, t, GSI_SAME_STMT);
806 gsi_remove (gsi, false);
807 }
808
809 /* Lower a __builtin_setjmp GSI.
810
811 __builtin_setjmp is passed a pointer to an array of five words (not
812 all will be used on all machines). It operates similarly to the C
813 library function of the same name, but is more efficient.
814
815 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
816 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
817 __builtin_setjmp_dispatcher shared among all the instances; that's
818 why it is only emitted at the end by lower_function_body.
819
820 After full lowering, the body of the function should look like:
821
822 {
823 void * setjmpvar.0;
824 int D.1844;
825 int D.2844;
826
827 [...]
828
829 __builtin_setjmp_setup (&buf, &<D1847>);
830 D.1844 = 0;
831 goto <D1846>;
832 <D1847>:;
833 __builtin_setjmp_receiver (&<D1847>);
834 D.1844 = 1;
835 <D1846>:;
836 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
837
838 [...]
839
840 __builtin_setjmp_setup (&buf, &<D2847>);
841 D.2844 = 0;
842 goto <D2846>;
843 <D2847>:;
844 __builtin_setjmp_receiver (&<D2847>);
845 D.2844 = 1;
846 <D2846>:;
847 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
848
849 [...]
850
851 <D3850>:;
852 return;
853 <D3853>: [non-local];
854 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
855 goto setjmpvar.0;
856 }
857
858 The dispatcher block will be both the unique destination of all the
859 abnormal call edges and the unique source of all the abnormal edges
860 to the receivers, thus keeping the complexity explosion localized. */
861
862 static void
863 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
864 {
865 gimple stmt = gsi_stmt (*gsi);
866 location_t loc = gimple_location (stmt);
867 tree cont_label = create_artificial_label (loc);
868 tree next_label = create_artificial_label (loc);
869 tree dest, t, arg;
870 gimple g;
871
872 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
873 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
874 FORCED_LABEL (next_label) = 1;
875
876 dest = gimple_call_lhs (stmt);
877
878 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
879 arg = build_addr (next_label, current_function_decl);
880 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
881 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
882 gimple_set_location (g, loc);
883 gimple_set_block (g, gimple_block (stmt));
884 gsi_insert_before (gsi, g, GSI_SAME_STMT);
885
886 /* Build 'DEST = 0' and insert. */
887 if (dest)
888 {
889 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
890 gimple_set_location (g, loc);
891 gimple_set_block (g, gimple_block (stmt));
892 gsi_insert_before (gsi, g, GSI_SAME_STMT);
893 }
894
895 /* Build 'goto CONT_LABEL' and insert. */
896 g = gimple_build_goto (cont_label);
897 gsi_insert_before (gsi, g, GSI_SAME_STMT);
898
899 /* Build 'NEXT_LABEL:' and insert. */
900 g = gimple_build_label (next_label);
901 gsi_insert_before (gsi, g, GSI_SAME_STMT);
902
903 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
904 arg = build_addr (next_label, current_function_decl);
905 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
906 g = gimple_build_call (t, 1, arg);
907 gimple_set_location (g, loc);
908 gimple_set_block (g, gimple_block (stmt));
909 gsi_insert_before (gsi, g, GSI_SAME_STMT);
910
911 /* Build 'DEST = 1' and insert. */
912 if (dest)
913 {
914 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
915 integer_one_node));
916 gimple_set_location (g, loc);
917 gimple_set_block (g, gimple_block (stmt));
918 gsi_insert_before (gsi, g, GSI_SAME_STMT);
919 }
920
921 /* Build 'CONT_LABEL:' and insert. */
922 g = gimple_build_label (cont_label);
923 gsi_insert_before (gsi, g, GSI_SAME_STMT);
924
925 /* Remove the call to __builtin_setjmp. */
926 gsi_remove (gsi, false);
927 }
928 \f
929
930 /* Record the variables in VARS into function FN. */
931
932 void
933 record_vars_into (tree vars, tree fn)
934 {
935 if (fn != current_function_decl)
936 push_cfun (DECL_STRUCT_FUNCTION (fn));
937
938 for (; vars; vars = DECL_CHAIN (vars))
939 {
940 tree var = vars;
941
942 /* BIND_EXPRs contains also function/type/constant declarations
943 we don't need to care about. */
944 if (TREE_CODE (var) != VAR_DECL)
945 continue;
946
947 /* Nothing to do in this case. */
948 if (DECL_EXTERNAL (var))
949 continue;
950
951 /* Record the variable. */
952 add_local_decl (cfun, var);
953 if (gimple_referenced_vars (cfun))
954 add_referenced_var (var);
955 }
956
957 if (fn != current_function_decl)
958 pop_cfun ();
959 }
960
961
962 /* Record the variables in VARS into current_function_decl. */
963
964 void
965 record_vars (tree vars)
966 {
967 record_vars_into (vars, current_function_decl);
968 }