/* RTL dead store elimination.
- Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
- Free Software Foundation, Inc.
+ Copyright (C) 2005-2014 Free Software Foundation, Inc.
Contributed by Richard Sandiford <rsandifor@codesourcery.com>
and Kenneth Zadeck <zadeck@naturalbridge.com>
#include "tm.h"
#include "rtl.h"
#include "tree.h"
+#include "stor-layout.h"
#include "tm_p.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "dbgcnt.h"
#include "target.h"
#include "params.h"
-#include "tree-flow.h" /* for may_be_aliased */
+#include "pointer-set.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-expr.h"
+#include "is-a.h"
+#include "gimple.h"
+#include "gimple-ssa.h"
/* This file contains three techniques for performing Dead Store
Elimination (dse).
/* Index into the rtx_group_vec. */
static int rtx_group_next_id;
-DEF_VEC_P(group_info_t);
-DEF_VEC_ALLOC_P(group_info_t,heap);
-static VEC(group_info_t,heap) *rtx_group_vec;
+static vec<group_info_t> rtx_group_vec;
/* This structure holds the set of changes that are being deferred
static deferred_change_t deferred_change_list = NULL;
-/* This are used to hold the alias sets of spill variables. Since
- these are never aliased and there may be a lot of them, it makes
- sense to treat them specially. This bitvector is only allocated in
- calls from dse_record_singleton_alias_set which currently is only
- made during reload1. So when dse is called before reload this
- mechanism does nothing. */
-
-static bitmap clear_alias_sets = NULL;
-
-/* The set of clear_alias_sets that have been disqualified because
- there are loads or stores using a different mode than the alias set
- was registered with. */
-static bitmap disqualified_clear_alias_sets = NULL;
-
/* The group that holds all of the clear_alias_sets. */
static group_info_t clear_alias_group;
enum machine_mode mode;
};
-static alloc_pool clear_alias_mode_pool;
-
/* This is true except if cfun->stdarg -- i.e. we cannot do
this for vararg functions because they play games with the frame. */
static bool stores_off_frame_dead_at_return;
struct invariant_group_base_hasher : typed_noop_remove <group_info>
{
- typedef group_info T;
- static inline hashval_t hash (const T *);
- static inline bool equal (const T *, const T *);
+ typedef group_info value_type;
+ typedef group_info compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
};
inline bool
-invariant_group_base_hasher::equal (const T *gi1, const T *gi2)
+invariant_group_base_hasher::equal (const value_type *gi1,
+ const compare_type *gi2)
{
return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
}
inline hashval_t
-invariant_group_base_hasher::hash (const T *gi)
+invariant_group_base_hasher::hash (const value_type *gi)
{
int do_not_record;
return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
gi->offset_map_size_p = 0;
gi->offset_map_n = NULL;
gi->offset_map_p = NULL;
- VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
+ rtx_group_vec.safe_push (gi);
}
return clear_alias_group;
}
gi->offset_map_size_p = 0;
gi->offset_map_n = NULL;
gi->offset_map_p = NULL;
- VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
+ rtx_group_vec.safe_push (gi);
}
return gi;
rtx_group_table.create (11);
- bb_table = XNEWVEC (bb_info_t, last_basic_block);
+ bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
rtx_group_next_id = 0;
stores_off_frame_dead_at_return = !cfun->stdarg;
init_alias_analysis ();
- if (clear_alias_sets)
- clear_alias_group = get_group_info (NULL);
- else
- clear_alias_group = NULL;
+ clear_alias_group = NULL;
}
if (!check_for_inc_dec_1 (insn_info))
return;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Locally deleting insn %d ",
INSN_UID (insn_info->insn));
insn_info->wild_read = false;
}
-/* Check if EXPR can possibly escape the current function scope. */
+/* Return whether DECL, a local variable, can possibly escape the current
+ function scope. */
+
+static bool
+local_variable_can_escape (tree decl)
+{
+ if (TREE_ADDRESSABLE (decl))
+ return true;
+
+ /* If this is a partitioned variable, we need to consider all the variables
+ in the partition. This is necessary because a store into one of them can
+ be replaced with a store into another and this may not change the outcome
+ of the escape analysis. */
+ if (cfun->gimple_df->decls_to_pointers != NULL)
+ {
+ void *namep
+ = pointer_map_contains (cfun->gimple_df->decls_to_pointers, decl);
+ if (namep)
+ return TREE_ADDRESSABLE (*(tree *)namep);
+ }
+
+ return false;
+}
+
+/* Return whether EXPR can possibly escape the current function scope. */
+
static bool
can_escape (tree expr)
{
return true;
base = get_base_address (expr);
if (DECL_P (base)
- && !may_be_aliased (base))
+ && !may_be_aliased (base)
+ && !(TREE_CODE (base) == VAR_DECL
+ && !DECL_EXTERNAL (base)
+ && !TREE_STATIC (base)
+ && local_variable_can_escape (base)))
return false;
return true;
}
rtx expanded_address, address;
int expanded;
- /* Make sure that cselib is has initialized all of the operands of
- the address before asking it to do the subst. */
-
- if (clear_alias_sets)
- {
- /* If this is a spill, do not do any further processing. */
- alias_set_type alias_set = MEM_ALIAS_SET (mem);
- if (dump_file)
- fprintf (dump_file, "found alias set %d\n", (int) alias_set);
- if (bitmap_bit_p (clear_alias_sets, alias_set))
- {
- struct clear_alias_mode_holder *entry
- = clear_alias_set_lookup (alias_set);
-
- /* If the modes do not match, we cannot process this set. */
- if (entry->mode != GET_MODE (mem))
- {
- if (dump_file)
- fprintf (dump_file,
- "disqualifying alias set %d, (%s) != (%s)\n",
- (int) alias_set, GET_MODE_NAME (entry->mode),
- GET_MODE_NAME (GET_MODE (mem)));
-
- bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
- return false;
- }
-
- *alias_set_out = alias_set;
- *group_id = clear_alias_group->id;
- return true;
- }
- }
-
*alias_set_out = 0;
cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " mem: ");
print_inline_rtx (dump_file, mem_address, 0);
*offset = 0;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
if (expanded)
{
{
group_info_t group = get_group_info (address);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " gid=%d offset=%d \n",
group->id, (int)*offset);
*base = NULL;
if (*base == NULL)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " no cselib val - should be a wild read.\n");
return false;
}
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
(*base)->uid, (*base)->hash, (int)*offset);
return true;
{
if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
add_wild_read (bb_info);
insn_info->cannot_delete = true;
if (GET_MODE (mem) == BLKmode)
width = MEM_SIZE (mem);
else
- {
- width = GET_MODE_SIZE (GET_MODE (mem));
- gcc_assert ((unsigned) width <= HOST_BITS_PER_WIDE_INT);
- }
+ width = GET_MODE_SIZE (GET_MODE (mem));
if (spill_alias_set)
{
store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " processing spill store %d(%s)\n",
(int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
}
frame pointer we can do global analysis. */
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, group_id);
+ = rtx_group_vec[group_id];
tree expr = MEM_EXPR (mem);
store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
set_usage_bits (group, offset, width, expr);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
group_id, (int)offset, (int)(offset+width));
}
store_info = (store_info_t) pool_alloc (cse_store_info_pool);
group_id = -1;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " processing cselib store [%d..%d)\n",
(int)offset, (int)(offset+width));
}
else
{
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, group_id);
+ = rtx_group_vec[group_id];
mem_addr = group->canon_base_addr;
}
if (offset)
del = true;
set_all_positions_unneeded (s_info);
}
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
INSN_UID (ptr->insn), (int) s_info->alias_set);
}
&& (s_info->cse_base == base))
{
HOST_WIDE_INT i;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
INSN_UID (ptr->insn), s_info->group_id,
(int)s_info->begin, (int)s_info->end);
in cache, so it is not going to be an expensive one. Thus, we
are not willing to do a multi insn shift or worse a subroutine
call to get rid of the read. */
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "trying to replace %smode load in insn %d"
" from %smode store in insn %d\n",
GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
if (read_reg == NULL_RTX)
{
end_sequence ();
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " -- could not extract bits of stored value\n");
return false;
}
bitmap_and_into (regs_set, regs_live);
if (!bitmap_empty_p (regs_set))
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file,
"abandoning replacement because sequence clobbers live hardregs:");
rest of dse, play like this read never happened. */
read_insn->read_rec = read_info->next;
pool_free (read_info_pool, read_info);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " -- replaced the loaded MEM with ");
print_simple_rtl (dump_file, read_reg);
}
else
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " -- replacing the loaded MEM with ");
print_simple_rtl (dump_file, read_reg);
if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
|| (MEM_VOLATILE_P (mem)))
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " adding wild read, volatile or barrier.\n");
add_wild_read (bb_info);
insn_info->cannot_delete = true;
if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " adding wild read, canon_address failure.\n");
add_wild_read (bb_info);
return 0;
else
{
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, group_id);
+ = rtx_group_vec[group_id];
mem_addr = group->canon_base_addr;
}
if (offset)
insn_info_t i_ptr = active_local_stores;
insn_info_t last = NULL;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " processing spill load %d\n",
(int) spill_alias_set);
if (store_info->alias_set == spill_alias_set)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
dump_insn_info ("removing from active", i_ptr);
active_local_stores_len--;
insn_info_t i_ptr = active_local_stores;
insn_info_t last = NULL;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
if (width == -1)
fprintf (dump_file, " processing const load gid=%d[BLK]\n",
if (remove)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
dump_insn_info ("removing from active", i_ptr);
active_local_stores_len--;
{
insn_info_t i_ptr = active_local_stores;
insn_info_t last = NULL;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " processing cselib load mem:");
print_inline_rtx (dump_file, mem, 0);
bool remove = false;
store_info_t store_info = i_ptr->store_rec;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " processing cselib load against insn %d\n",
INSN_UID (i_ptr->insn));
if (remove)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
dump_insn_info ("removing from active", i_ptr);
active_local_stores_len--;
int mems_found = 0;
memset (insn_info, 0, sizeof (struct insn_info));
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\n**scanning insn=%d\n",
INSN_UID (insn));
return;
}
- /* Cselib clears the table for this case, so we have to essentially
- do the same. */
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
- && MEM_VOLATILE_P (PATTERN (insn)))
- {
- add_wild_read (bb_info);
- insn_info->cannot_delete = true;
- return;
- }
-
/* Look at all of the uses in the insn. */
note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
const_call = RTL_CONST_CALL_P (insn);
if (!const_call)
{
- rtx call = PATTERN (insn);
- if (GET_CODE (call) == PARALLEL)
- call = XVECEXP (call, 0, 0);
- if (GET_CODE (call) == SET)
- call = SET_SRC (call);
- if (GET_CODE (call) == CALL
- && MEM_P (XEXP (call, 0))
- && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
+ rtx call = get_call_rtx_from (insn);
+ if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
{
rtx symbol = XEXP (XEXP (call, 0), 0);
if (SYMBOL_REF_DECL (symbol)
insn_info_t i_ptr = active_local_stores;
insn_info_t last = NULL;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "%s call %d\n",
const_call ? "const" : "memset", INSN_UID (insn));
store_info = store_info->next;
if (store_info->group_id >= 0
- && VEC_index (group_info_t, rtx_group_vec,
- store_info->group_id)->frame_related)
+ && rtx_group_vec[store_info->group_id]->frame_related)
remove_store = true;
}
if (remove_store)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
dump_insn_info ("removing from active", i_ptr);
active_local_stores_len--;
set_mem_size (mem, INTVAL (args[2]));
body = gen_rtx_SET (VOIDmode, mem, args[1]);
mems_found += record_store (body, bb_info);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "handling memset as BLKmode store\n");
if (mems_found == 1)
{
else
mems_found += record_store (body, bb_info);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
mems_found, insn_info->cannot_delete ? "true" : "false");
bitmap_set_bit (all_blocks, ENTRY_BLOCK);
bitmap_set_bit (all_blocks, EXIT_BLOCK);
- FOR_ALL_BB (bb)
+ FOR_ALL_BB_FN (bb, cfun)
{
insn_info_t ptr;
bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
if (stores_off_frame_dead_at_return
&& (EDGE_COUNT (bb->succs) == 0
|| (single_succ_p (bb)
- && single_succ (bb) == EXIT_BLOCK_PTR
+ && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
&& ! crtl->calls_eh_return)))
{
insn_info_t i_ptr = active_local_stores;
if (store_info->group_id >= 0)
{
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
+ = rtx_group_vec[store_info->group_id];
if (group->frame_related && !i_ptr->cannot_delete)
delete_dead_store_insn (i_ptr);
}
&& s_info->redundant_reason->insn
&& !ptr->cannot_delete)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Locally deleting insn %d "
"because insn %d stores the "
"same value and couldn't be "
INSN_UID (s_info->redundant_reason->insn));
delete_dead_store_insn (ptr);
}
- if (s_info)
- s_info->redundant_reason = NULL;
free_store_info (ptr);
}
else
unsigned int i;
group_info_t group;
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
/* For all non stack related bases, we only consider a store to
be deletable if there are two or more stores for that
{
bitmap_ior_into (group->store2_n, group->store1_n);
bitmap_ior_into (group->store2_p, group->store1_p);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "group %d is frame related ", i);
}
group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
group->offset_map_size_p);
group->process_globally = false;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "group %d(%d+%d): ", i,
(int)bitmap_count_bits (group->store2_n),
/* Position 0 is unused because 0 is used in the maps to mean
unused. */
current_position = 1;
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
bitmap_iterator bi;
unsigned int j;
if (group == clear_alias_group)
continue;
- memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
- memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
+ memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
+ memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
bitmap_clear (group->group_kill);
EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
}
-/* Init the offset tables for the spill case. */
-
-static bool
-dse_step2_spill (void)
-{
- unsigned int j;
- group_info_t group = clear_alias_group;
- bitmap_iterator bi;
-
- /* Position 0 is unused because 0 is used in the maps to mean
- unused. */
- current_position = 1;
-
- if (dump_file)
- {
- bitmap_print (dump_file, clear_alias_sets,
- "clear alias sets ", "\n");
- bitmap_print (dump_file, disqualified_clear_alias_sets,
- "disqualified clear alias sets ", "\n");
- }
-
- memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
- memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
- bitmap_clear (group->group_kill);
-
- /* Remove the disqualified positions from the store2_p set. */
- bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
-
- /* We do not need to process the store2_n set because
- alias_sets are always positive. */
- EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
- {
- bitmap_set_bit (group->group_kill, current_position);
- group->offset_map_p[j] = current_position++;
- group->process_globally = true;
- }
-
- return current_position != 1;
-}
-
-
\f
/*----------------------------------------------------------------------------
Third step.
{
HOST_WIDE_INT i;
group_info_t group_info
- = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
+ = rtx_group_vec[store_info->group_id];
if (group_info->process_globally)
for (i = store_info->begin; i < store_info->end; i++)
{
/* If this insn reads the frame, kill all the frame related stores. */
if (insn_info->frame_read)
{
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
if (group->process_globally && group->frame_related)
{
if (kill)
if (kill)
bitmap_ior_into (kill, kill_on_calls);
bitmap_and_compl_into (gen, kill_on_calls);
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
if (group->process_globally && !group->frame_related)
{
if (kill)
}
while (read_info)
{
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
if (group->process_globally)
{
unsigned int i;
group_info_t group;
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
if (group->process_globally && group->frame_related)
bitmap_ior_into (bb_info->gen, group->group_kill);
edge e;
edge_iterator ei;
- if (TEST_BIT (unreachable_blocks, bb->index))
+ if (bitmap_bit_p (unreachable_blocks, bb->index))
{
- RESET_BIT (unreachable_blocks, bb->index);
+ bitmap_clear_bit (unreachable_blocks, bb->index);
FOR_EACH_EDGE (e, ei, bb->preds)
{
mark_reachable_blocks (unreachable_blocks, e->src);
dse_step3 (bool for_spills)
{
basic_block bb;
- sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
+ sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
sbitmap_iterator sbi;
bitmap all_ones = NULL;
unsigned int i;
- sbitmap_ones (unreachable_blocks);
+ bitmap_ones (unreachable_blocks);
- FOR_ALL_BB (bb)
+ FOR_ALL_BB_FN (bb, cfun)
{
bb_info_t bb_info = bb_table[bb->index];
if (bb_info->gen)
/* For any block in an infinite loop, we must initialize the out set
to all ones. This could be expensive, but almost never occurs in
practice. However, it is common in regression tests. */
- EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
+ EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
{
if (bitmap_bit_p (all_blocks, i))
{
group_info_t group;
all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, j, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
bitmap_ior_into (all_ones, group->group_kill);
}
if (!bb_info->out)
dse_confluence_n, dse_transfer_function,
all_blocks, df_get_postorder (DF_BACKWARD),
df_get_n_blocks (DF_BACKWARD));
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
{
basic_block bb;
fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
- FOR_ALL_BB (bb)
+ FOR_ALL_BB_FN (bb, cfun)
{
bb_info_t bb_info = bb_table[bb->index];
dse_step5_nospill (void)
{
basic_block bb;
- FOR_EACH_BB (bb)
+ FOR_EACH_BB_FN (bb, cfun)
{
bb_info_t bb_info = bb_table[bb->index];
insn_info_t insn_info = bb_info->last_insn;
{
HOST_WIDE_INT i;
group_info_t group_info
- = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
+ = rtx_group_vec[store_info->group_id];
for (i = store_info->begin; i < store_info->end; i++)
{
int index = get_bitmap_index (group_info, i);
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
if (index == 0 || !bitmap_bit_p (v, index))
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "failing at i = %d\n", (int)i);
deleted = false;
break;
scan_stores_nospill (insn_info->store_rec, v, NULL);
if (insn_info->wild_read)
{
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "wild read\n");
bitmap_clear (v);
}
{
if (dump_file && !insn_info->non_frame_wild_read)
fprintf (dump_file, "regular read\n");
- else if (dump_file)
+ else if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "non-frame wild read\n");
scan_reads_nospill (insn_info, v, NULL);
}
}
-static void
-dse_step5_spill (void)
-{
- basic_block bb;
- FOR_EACH_BB (bb)
- {
- bb_info_t bb_info = bb_table[bb->index];
- insn_info_t insn_info = bb_info->last_insn;
- bitmap v = bb_info->out;
-
- while (insn_info)
- {
- bool deleted = false;
- /* There may have been code deleted by the dce pass run before
- this phase. */
- if (insn_info->insn
- && INSN_P (insn_info->insn)
- && (!insn_info->cannot_delete)
- && (!bitmap_empty_p (v)))
- {
- /* Try to delete the current insn. */
- store_info_t store_info = insn_info->store_rec;
- deleted = true;
-
- while (store_info)
- {
- if (store_info->alias_set)
- {
- int index = get_bitmap_index (clear_alias_group,
- store_info->alias_set);
- if (index == 0 || !bitmap_bit_p (v, index))
- {
- deleted = false;
- break;
- }
- }
- else
- deleted = false;
- store_info = store_info->next;
- }
- if (deleted && dbg_cnt (dse)
- && check_for_inc_dec_1 (insn_info))
- {
- if (dump_file)
- fprintf (dump_file, "Spill deleting insn %d\n",
- INSN_UID (insn_info->insn));
- delete_insn (insn_info->insn);
- spill_deleted++;
- insn_info->insn = NULL;
- }
- }
-
- if (insn_info->insn
- && INSN_P (insn_info->insn)
- && (!deleted))
- {
- scan_stores_spill (insn_info->store_rec, v, NULL);
- scan_reads_spill (insn_info->read_rec, v, NULL);
- }
-
- insn_info = insn_info->prev_insn;
- }
- }
-}
-
-
\f
/*----------------------------------------------------------------------------
Sixth step.
{
basic_block bb;
- FOR_ALL_BB (bb)
+ FOR_ALL_BB_FN (bb, cfun)
{
bb_info_t bb_info = bb_table[bb->index];
insn_info_t insn_info = bb_info->last_insn;
&& INSN_P (s_info->redundant_reason->insn))
{
rtx rinsn = s_info->redundant_reason->insn;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Locally deleting insn %d "
"because insn %d stores the "
"same value and couldn't be "
bitmap_obstack_release (&dse_bitmap_obstack);
obstack_free (&dse_obstack, NULL);
- if (clear_alias_sets)
- {
- BITMAP_FREE (clear_alias_sets);
- BITMAP_FREE (disqualified_clear_alias_sets);
- free_alloc_pool (clear_alias_mode_pool);
- htab_delete (clear_alias_mode_table);
- }
-
end_alias_analysis ();
free (bb_table);
rtx_group_table.dispose ();
- VEC_free (group_info_t, heap, rtx_group_vec);
+ rtx_group_vec.release ();
BITMAP_FREE (all_blocks);
BITMAP_FREE (scratch);
static unsigned int
rest_of_handle_dse (void)
{
- bool did_global = false;
-
df_set_flags (DF_DEFER_INSN_RESCAN);
/* Need the notes since we must track live hardregs in the forwards
{
df_set_flags (DF_LR_RUN_DCE);
df_analyze ();
- did_global = true;
- if (dump_file)
+ if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "doing global processing\n");
dse_step3 (false);
dse_step4 ();
dse_step5_nospill ();
}
- /* For the instance of dse that runs after reload, we make a special
- pass to process the spills. These are special in that they are
- totally transparent, i.e, there is no aliasing issues that need
- to be considered. This means that the wild reads that kill
- everything else do not apply here. */
- if (clear_alias_sets && dse_step2_spill ())
- {
- if (!did_global)
- {
- df_set_flags (DF_LR_RUN_DCE);
- df_analyze ();
- }
- did_global = true;
- if (dump_file)
- fprintf (dump_file, "doing global spill processing\n");
- dse_step3 (true);
- dse_step4 ();
- dse_step5_spill ();
- }
-
dse_step6 ();
dse_step7 ();
&& dbg_cnt (dse2);
}
-struct rtl_opt_pass pass_rtl_dse1 =
-{
- {
- RTL_PASS,
- "dse1", /* name */
- gate_dse1, /* gate */
- rest_of_handle_dse, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_DSE1, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_df_finish | TODO_verify_rtl_sharing |
- TODO_ggc_collect /* todo_flags_finish */
- }
+namespace {
+
+const pass_data pass_data_rtl_dse1 =
+{
+ RTL_PASS, /* type */
+ "dse1", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ true, /* has_gate */
+ true, /* has_execute */
+ TV_DSE1, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ ( TODO_df_finish | TODO_verify_rtl_sharing ), /* todo_flags_finish */
};
-struct rtl_opt_pass pass_rtl_dse2 =
-{
- {
- RTL_PASS,
- "dse2", /* name */
- gate_dse2, /* gate */
- rest_of_handle_dse, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_DSE2, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_df_finish | TODO_verify_rtl_sharing |
- TODO_ggc_collect /* todo_flags_finish */
- }
+class pass_rtl_dse1 : public rtl_opt_pass
+{
+public:
+ pass_rtl_dse1 (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ bool gate () { return gate_dse1 (); }
+ unsigned int execute () { return rest_of_handle_dse (); }
+
+}; // class pass_rtl_dse1
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_rtl_dse1 (gcc::context *ctxt)
+{
+ return new pass_rtl_dse1 (ctxt);
+}
+
+namespace {
+
+const pass_data pass_data_rtl_dse2 =
+{
+ RTL_PASS, /* type */
+ "dse2", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ true, /* has_gate */
+ true, /* has_execute */
+ TV_DSE2, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ ( TODO_df_finish | TODO_verify_rtl_sharing ), /* todo_flags_finish */
};
+
+class pass_rtl_dse2 : public rtl_opt_pass
+{
+public:
+ pass_rtl_dse2 (gcc::context *ctxt)
+ : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ bool gate () { return gate_dse2 (); }
+ unsigned int execute () { return rest_of_handle_dse (); }
+
+}; // class pass_rtl_dse2
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_rtl_dse2 (gcc::context *ctxt)
+{
+ return new pass_rtl_dse2 (ctxt);
+}