-/* Dead store elimination
+/* Dead and redundant store elimination
Copyright (C) 2004-2019 Free Software Foundation, Inc.
This file is part of GCC.
A dead store is a store into a memory location which will later be
overwritten by another store without any intervening loads. In this
- case the earlier store can be deleted.
+ case the earlier store can be deleted or trimmed if the store
+ was partially dead.
+
+ A redundant store is a store into a memory location which stores
+ the exact same value as a prior store to the same memory location.
+ While this can often be handled by dead store elimination, removing
+ the redundant store is often better than removing or trimming the
+ dead store.
In our SSA + virtual operand world we use immediate uses of virtual
- operands to detect dead stores. If a store's virtual definition
+ operands to detect these cases. If a store's virtual definition
is used precisely once by a later store to the same location which
- post dominates the first store, then the first store is dead.
+ post dominates the first store, then the first store is dead. If
+ the data stored is the same, then the second store is redundant.
The single use of the store's virtual definition ensures that
there are no intervening aliased loads and the requirement that
the point immediately before the later store. Again, the single
use of the virtual definition and the post-dominance relationship
ensure that such movement would be safe. Clearly if there are
- back to back stores, then the second is redundant.
+ back to back stores, then the second is makes the first dead. If
+ the second store stores the same value, then the second store is
+ redundant.
Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
may also help in understanding this code since it discusses the
fact, they are the same transformation applied to different views of
the CFG. */
+static void delete_dead_or_redundant_assignment (gimple_stmt_iterator *, char []);
+static void delete_dead_or_redundant_call (gimple_stmt_iterator *, char []);
/* Bitmap of blocks that have had EH statements cleaned. We should
remove their dead edges eventually. */
ao_ref_init_from_ptr_and_size (write, ptr, size);
return true;
}
+
+ /* A calloc call can never be dead, but it can make
+ subsequent stores redundant if they store 0 into
+ the same memory locations. */
+ case BUILT_IN_CALLOC:
+ {
+ tree nelem = gimple_call_arg (stmt, 0);
+ tree selem = gimple_call_arg (stmt, 1);
+ if (TREE_CODE (nelem) == INTEGER_CST
+ && TREE_CODE (selem) == INTEGER_CST)
+ {
+ tree lhs = gimple_call_lhs (stmt);
+ tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
+ nelem, selem);
+ ao_ref_init_from_ptr_and_size (write, lhs, size);
+ return true;
+ }
+ }
+
default:
break;
}
return true;
}
+/* STMT stores the value 0 into one or more memory locations
+ (via memset, empty constructor, calloc call, etc).
+
+ See if there is a subsequent store of the value 0 to one
+ or more of the same memory location(s). If so, the subsequent
+ store is redundant and can be removed.
+
+ The subsequent stores could be via memset, empty constructors,
+ simple MEM stores, etc. */
+
+static void
+dse_optimize_redundant_stores (gimple *stmt)
+{
+ int cnt = 0;
+
+ /* We could do something fairly complex and look through PHIs
+ like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
+ the effort.
+
+ Look at all the immediate uses of the VDEF (which are obviously
+ dominated by STMT). See if one or more stores 0 into the same
+ memory locations a STMT, if so remove the immediate use statements. */
+ tree defvar = gimple_vdef (stmt);
+ imm_use_iterator ui;
+ gimple *use_stmt;
+ FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
+ {
+ /* Limit stmt walking. */
+ if (++cnt > PARAM_VALUE (PARAM_DSE_MAX_ALIAS_QUERIES_PER_STORE))
+ BREAK_FROM_IMM_USE_STMT (ui);
+
+ /* If USE_STMT stores 0 into one or more of the same locations
+ as STMT and STMT would kill USE_STMT, then we can just remove
+ USE_STMT. */
+ tree fndecl;
+ if ((is_gimple_assign (use_stmt)
+ && gimple_vdef (use_stmt)
+ && ((gimple_assign_rhs_code (use_stmt) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (use_stmt)) == 0
+ && !gimple_clobber_p (stmt))
+ || (gimple_assign_rhs_code (use_stmt) == INTEGER_CST
+ && integer_zerop (gimple_assign_rhs1 (use_stmt)))))
+ || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
+ && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
+ && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
+ && integer_zerop (gimple_call_arg (use_stmt, 1))))
+ {
+ ao_ref write;
+
+ if (!initialize_ao_ref_for_dse (use_stmt, &write))
+ BREAK_FROM_IMM_USE_STMT (ui)
+
+ if (valid_ao_ref_for_dse (&write)
+ && stmt_kills_ref_p (stmt, &write))
+ {
+ gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
+ if (is_gimple_assign (use_stmt))
+ delete_dead_or_redundant_assignment (&gsi, "redundant");
+ else if (is_gimple_call (use_stmt))
+ delete_dead_or_redundant_call (&gsi, "redundant");
+ else
+ gcc_unreachable ();
+ }
+ }
+ }
+}
+
/* A helper of dse_optimize_stmt.
Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
according to downstream uses and defs. Sets *BY_CLOBBER_P to true
/* Delete a dead call at GSI, which is mem* call of some kind. */
static void
-delete_dead_call (gimple_stmt_iterator *gsi)
+delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, char *type)
{
gimple *stmt = gsi_stmt (*gsi);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, " Deleted dead call: ");
+ fprintf (dump_file, " Deleted %s call: ", type);
print_gimple_stmt (dump_file, stmt, 0, dump_flags);
fprintf (dump_file, "\n");
}
/* Delete a dead store at GSI, which is a gimple assignment. */
static void
-delete_dead_assignment (gimple_stmt_iterator *gsi)
+delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, char *type)
{
gimple *stmt = gsi_stmt (*gsi);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, " Deleted dead store: ");
+ fprintf (dump_file, " Deleted %s store: ", type);
print_gimple_stmt (dump_file, stmt, 0, dump_flags);
fprintf (dump_file, "\n");
}
some builtin calls. */
if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
{
- switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
+ tree fndecl = gimple_call_fndecl (stmt);
+ switch (DECL_FUNCTION_CODE (fndecl))
{
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
tree size = gimple_call_arg (stmt, 2);
if (integer_zerop (size))
{
- delete_dead_call (gsi);
+ delete_dead_or_redundant_call (gsi, "dead");
return;
}
+ /* If this is a memset call that initializes an object
+ to zero, it may be redundant with an earlier memset
+ or empty CONSTRUCTOR of a larger object. */
+ if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
+ && integer_zerop (gimple_call_arg (stmt, 1)))
+ dse_optimize_redundant_stores (stmt);
+
enum dse_store_status store_status;
m_byte_tracking_enabled
= setup_live_bytes_from_ref (&ref, m_live_bytes);
}
if (store_status == DSE_STORE_DEAD)
- delete_dead_call (gsi);
+ delete_dead_or_redundant_call (gsi, "dead");
return;
}
+ case BUILT_IN_CALLOC:
+ /* We already know the arguments are integer constants. */
+ dse_optimize_redundant_stores (stmt);
+
default:
return;
}
{
bool by_clobber_p = false;
+ /* First see if this store is a CONSTRUCTOR and if there
+ are subsequent CONSTRUCTOR stores which are totally
+ subsumed by this statement. If so remove the subsequent
+ CONSTRUCTOR store.
+
+ This will tend to make fewer calls into memset with longer
+ arguments. */
+ if (gimple_assign_rhs_code (stmt) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) == 0
+ && !gimple_clobber_p (stmt))
+ dse_optimize_redundant_stores (stmt);
+
/* Self-assignments are zombies. */
if (operand_equal_p (gimple_assign_rhs1 (stmt),
gimple_assign_lhs (stmt), 0))
&& !by_clobber_p)
return;
- delete_dead_assignment (gsi);
+ delete_dead_or_redundant_assignment (gsi, "dead");
}
}