/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
- Copyright (C) 2003-2014 Free Software Foundation, Inc.
+ Copyright (C) 2003-2021 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
#include "tree.h"
+#include "gimple.h"
+#include "tree-pass.h"
+#include "fold-const.h"
#include "tree-nested.h"
#include "calls.h"
-#include "basic-block.h"
-#include "tree-ssa-alias.h"
-#include "internal-fn.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
#include "gimple-iterator.h"
-#include "tree-iterator.h"
-#include "tree-inline.h"
-#include "flags.h"
-#include "function.h"
-#include "diagnostic-core.h"
-#include "tree-pass.h"
-#include "langhooks.h"
#include "gimple-low.h"
-#include "tree-nested.h"
+#include "predict.h"
+#include "gimple-predict.h"
+#include "gimple-fold.h"
/* The differences between High GIMPLE and Low GIMPLE are the
following:
struct return_statements_t
{
tree label;
- gimple stmt;
+ greturn *stmt;
};
typedef struct return_statements_t return_statements_t;
gimple_seq body = gimple_body (current_function_decl);
gimple_seq lowered_body;
gimple_stmt_iterator i;
- gimple bind;
- gimple x;
+ gimple *bind;
+ gimple *x;
/* The gimplifier should've left a body of exactly one statement,
namely a GIMPLE_BIND. */
i = gsi_last (lowered_body);
+ /* If we had begin stmt markers from e.g. PCH, but this compilation
+ doesn't want them, lower_stmt will have cleaned them up; we can
+ now clear the flag that indicates we had them. */
+ if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
+ {
+ /* This counter needs not be exact, but before lowering it will
+ most certainly be. */
+ gcc_assert (cfun->debug_marker_count == 0);
+ cfun->debug_nonbind_markers = false;
+ }
+
/* If the function falls off the end, we need a null return statement.
If we've already got one in the return_statements vector, we don't
need to do anything special. Otherwise build one by hand. */
- if (gimple_seq_may_fallthru (lowered_body)
+ bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
+ if (may_fallthru
&& (data.return_statements.is_empty ()
|| (gimple_return_retval (data.return_statements.last().stmt)
!= NULL)))
gimple_set_location (x, cfun->function_end_locus);
gimple_set_block (x, DECL_INITIAL (current_function_decl));
gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
+ may_fallthru = false;
}
/* If we lowered any return statements, emit the representative
x = gimple_build_label (t.label);
gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
+ if (may_fallthru)
+ {
+ /* Remove the line number from the representative return statement.
+ It now fills in for the fallthru too. Failure to remove this
+ will result in incorrect results for coverage analysis. */
+ gimple_set_location (t.stmt, UNKNOWN_LOCATION);
+ may_fallthru = false;
+ }
}
/* Once the old body has been lowered, replace it with the new
static void
lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt;
+ gimple *stmt;
stmt = gsi_stmt (*gsi);
static void
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_set_block (stmt, data->block);
return;
case GIMPLE_EH_ELSE:
- lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
- lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
+ lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
+ }
break;
+ case GIMPLE_DEBUG:
+ gcc_checking_assert (cfun->debug_nonbind_markers);
+ /* We can't possibly have debug bind stmts before lowering, we
+ first emit them when entering SSA. */
+ gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
+ /* Propagate fallthruness. */
+ /* If the function (e.g. from PCH) had debug stmts, but they're
+ disabled for this compilation, remove them. */
+ if (!MAY_HAVE_DEBUG_MARKER_STMTS)
+ gsi_remove (gsi, true);
+ else
+ gsi_next (gsi);
+ return;
+
case GIMPLE_NOP:
case GIMPLE_ASM:
case GIMPLE_ASSIGN:
case GIMPLE_OMP_MASTER:
case GIMPLE_OMP_TASKGROUP:
case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_SCAN:
case GIMPLE_OMP_CRITICAL:
case GIMPLE_OMP_RETURN:
case GIMPLE_OMP_ATOMIC_LOAD:
}
if (decl
- && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
{
if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
{
return;
}
else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
- && flag_tree_bit_ccp)
+ && flag_tree_bit_ccp
+ && gimple_builtin_call_types_compatible_p (stmt, decl))
{
lower_builtin_posix_memalign (gsi);
return;
gsi_next (gsi);
return;
}
+
+ /* We delay folding of built calls from gimplification to
+ here so the IL is in consistent state for the diagnostic
+ machineries job. */
+ if (gimple_call_builtin_p (stmt))
+ fold_stmt (gsi);
}
break;
return;
case GIMPLE_TRANSACTION:
- lower_sequence (gimple_transaction_body_ptr (stmt), data);
+ lower_sequence (gimple_transaction_body_ptr (
+ as_a <gtransaction *> (stmt)),
+ data);
break;
default:
lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
{
tree old_block = data->block;
- gimple stmt = gsi_stmt (*gsi);
+ gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
tree new_block = gimple_bind_block (stmt);
if (new_block)
}
record_vars (gimple_bind_vars (stmt));
+
+ /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
+ need gimple_bind_vars. */
+ tree next;
+ /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
+ it by marking all BLOCK_VARS. */
+ if (gimple_bind_block (stmt))
+ for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
+ TREE_VISITED (t) = 1;
+ for (tree var = gimple_bind_vars (stmt);
+ var && ! TREE_VISITED (var); var = next)
+ {
+ next = DECL_CHAIN (var);
+ DECL_CHAIN (var) = NULL_TREE;
+ }
+ /* Unmark BLOCK_VARS. */
+ if (gimple_bind_block (stmt))
+ for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
+ TREE_VISITED (t) = 0;
+
lower_sequence (gimple_bind_body_ptr (stmt), data);
if (new_block)
lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
{
bool cannot_fallthru;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_stmt_iterator i;
/* We don't handle GIMPLE_TRY_FINALLY. */
for (; !gsi_end_p (i); gsi_next (&i))
{
data->cannot_fallthru = false;
- lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
+ lower_sequence (gimple_catch_handler_ptr (
+ as_a <gcatch *> (gsi_stmt (i))),
+ data);
if (!data->cannot_fallthru)
cannot_fallthru = false;
}
cannot_fallthru = false;
break;
+ case GIMPLE_DEBUG:
+ gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
+ break;
+
default:
/* This case represents statements to be executed when an
exception occurs. Those statements are implicitly followed
This is a subroutine of gimple_stmt_may_fallthru. */
static bool
-gimple_try_catch_may_fallthru (gimple stmt)
+gimple_try_catch_may_fallthru (gtry *stmt)
{
gimple_stmt_iterator i;
through iff any of the catch bodies falls through. */
for (; !gsi_end_p (i); gsi_next (&i))
{
- if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
+ if (gimple_seq_may_fallthru (gimple_catch_handler (
+ as_a <gcatch *> (gsi_stmt (i)))))
return true;
}
return false;
we'll just delete the extra code later. */
bool
-gimple_stmt_may_fallthru (gimple stmt)
+gimple_stmt_may_fallthru (gimple *stmt)
{
if (!stmt)
return true;
return false;
case GIMPLE_BIND:
- return gimple_seq_may_fallthru (gimple_bind_body (stmt));
+ return gimple_seq_may_fallthru (
+ gimple_bind_body (as_a <gbind *> (stmt)));
case GIMPLE_TRY:
if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
- return gimple_try_catch_may_fallthru (stmt);
+ return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
/* It must be a GIMPLE_TRY_FINALLY. */
&& gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
case GIMPLE_EH_ELSE:
- return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
- || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
+ || gimple_seq_may_fallthru (gimple_eh_else_e_body (
+ eh_else_stmt)));
+ }
case GIMPLE_CALL:
/* Functions that do not return do not fall through. */
- return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
+ return !gimple_call_noreturn_p (stmt);
default:
return true;
bool
gimple_seq_may_fallthru (gimple_seq seq)
{
- return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
+ return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
}
static void
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple t;
+ greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
+ gimple *t;
int i;
return_statements_t tmp_rs;
if (!optimize && gimple_has_location (stmt))
DECL_ARTIFICIAL (tmp_rs.label) = 0;
t = gimple_build_goto (tmp_rs.label);
+ /* location includes block. */
gimple_set_location (t, gimple_location (stmt));
- gimple_set_block (t, gimple_block (stmt));
gsi_insert_before (gsi, t, GSI_SAME_STMT);
gsi_remove (gsi, false);
}
static void
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree cont_label = create_artificial_label (loc);
tree next_label = create_artificial_label (loc);
tree dest, t, arg;
- gimple g;
+ gimple *g;
/* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
these builtins are modelled as non-local label jumps to the label
that is passed to these two builtins, so pretend we have a non-local
label during GIMPLE passes too. See PR60003. */
- cfun->has_nonlocal_label = true;
+ cfun->has_nonlocal_label = 1;
/* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
FORCED_LABEL (next_label) = 1;
- dest = gimple_call_lhs (stmt);
+ tree orig_dest = dest = gimple_call_lhs (stmt);
+ if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
+ dest = create_tmp_reg (TREE_TYPE (orig_dest));
/* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
- arg = build_addr (next_label, current_function_decl);
+ arg = build_addr (next_label);
t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
+ /* location includes block. */
gimple_set_location (g, loc);
- gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
/* Build 'DEST = 0' and insert. */
{
g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
gimple_set_location (g, loc);
- gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
}
gsi_insert_before (gsi, g, GSI_SAME_STMT);
/* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
- arg = build_addr (next_label, current_function_decl);
+ arg = build_addr (next_label);
t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
g = gimple_build_call (t, 1, arg);
gimple_set_location (g, loc);
- gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
/* Build 'DEST = 1' and insert. */
g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
integer_one_node));
gimple_set_location (g, loc);
- gimple_set_block (g, gimple_block (stmt));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
}
g = gimple_build_label (cont_label);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
+ /* Build orig_dest = dest if necessary. */
+ if (dest != orig_dest)
+ {
+ g = gimple_build_assign (orig_dest, dest);
+ gsi_insert_before (gsi, g, GSI_SAME_STMT);
+ }
+
/* Remove the call to __builtin_setjmp. */
gsi_remove (gsi, false);
}
static void
lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
{
- gimple stmt, call = gsi_stmt (*gsi);
+ gimple *stmt, *call = gsi_stmt (*gsi);
tree pptr = gimple_call_arg (call, 0);
tree align = gimple_call_arg (call, 1);
tree res = gimple_call_lhs (call);
- tree ptr = create_tmp_reg (ptr_type_node, NULL);
+ tree ptr = create_tmp_reg (ptr_type_node);
if (TREE_CODE (pptr) == ADDR_EXPR)
{
- tree tem = create_tmp_var (ptr_type_node, NULL);
+ tree tem = create_tmp_var (ptr_type_node);
TREE_ADDRESSABLE (tem) = 1;
gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
stmt = gimple_build_assign (ptr, tem);
build_int_cst (ptr_type_node, 0)));
if (res == NULL_TREE)
{
- res = create_tmp_reg (integer_type_node, NULL);
+ res = create_tmp_reg (integer_type_node);
gimple_call_set_lhs (call, res);
}
tree align_label = create_artificial_label (UNKNOWN_LOCATION);
tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
+ gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
align_label, noalign_label);
gsi_insert_after (gsi, cond, GSI_NEW_STMT);
gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
/* BIND_EXPRs contains also function/type/constant declarations
we don't need to care about. */
- if (TREE_CODE (var) != VAR_DECL)
+ if (!VAR_P (var))
continue;
/* Nothing to do in this case. */