+2012-11-01 Lawrence Crowl <crowl@google.com>
+
+ * sbitmap.h (TEST_BIT): Rename bitmap_bit_p, normalizing parameter
+ type. Update callers to match.
+ (SET_BIT): Rename bitmap_set_bit, normalizing parameter type. Update
+ callers to match.
+ (SET_BIT_WITH_POPCOUNT): Rename bitmap_set_bit_with_popcount,
+ normalizing parameter type. Update callers to match.
+ (RESET_BIT): Rename bitmap_clear_bit, normalizing parameter type.
+ Update callers to match.
+ (RESET_BIT_WITH_POPCOUNT): Rename bitmap_clear_bit_with_popcount,
+ normalizing parameter type. Update callers to match.
+ * basic-block.h (sbitmap_intersection_of_succs): Rename
+ bitmap_intersection_of_succs. Update callers to match.
+ * basic-block.h (sbitmap_intersection_of_preds): Rename
+ bitmap_intersection_of_preds. Update callers to match.
+ * basic-block.h (sbitmap_union_of_succs): Rename
+ bitmap_union_of_succs. Update callers to match.
+ * basic-block.h (sbitmap_union_of_preds): Rename
+ bitmap_union_of_preds. Update callers to match.
+
2012-11-01 Vladimir Makarov <vmakarov@redhat.com>
PR middle-end/55150
{
while (--n >= 0)
{
- SET_BIT (reg_seen, regno + n);
+ bitmap_set_bit (reg_seen, regno + n);
new_reg_base_value[regno + n] = 0;
}
return;
else
{
/* There's a REG_NOALIAS note against DEST. */
- if (TEST_BIT (reg_seen, regno))
+ if (bitmap_bit_p (reg_seen, regno))
{
new_reg_base_value[regno] = 0;
return;
}
- SET_BIT (reg_seen, regno);
+ bitmap_set_bit (reg_seen, regno);
new_reg_base_value[regno] = unique_base_value (unique_id++);
return;
}
}
/* If this is the first set of a register, record the value. */
else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
- && ! TEST_BIT (reg_seen, regno) && new_reg_base_value[regno] == 0)
+ && ! bitmap_bit_p (reg_seen, regno) && new_reg_base_value[regno] == 0)
new_reg_base_value[regno] = find_base_value (src);
- SET_BIT (reg_seen, regno);
+ bitmap_set_bit (reg_seen, regno);
}
/* Return REG_BASE_VALUE for REGNO. Selective scheduler uses this to avoid
{
regno -= FIRST_PSEUDO_REGISTER;
if (regno < VEC_length (rtx, reg_known_value))
- return TEST_BIT (reg_known_equiv_p, regno);
+ return bitmap_bit_p (reg_known_equiv_p, regno);
}
return false;
}
if (regno < VEC_length (rtx, reg_known_value))
{
if (val)
- SET_BIT (reg_known_equiv_p, regno);
+ bitmap_set_bit (reg_known_equiv_p, regno);
else
- RESET_BIT (reg_known_equiv_p, regno);
+ bitmap_clear_bit (reg_known_equiv_p, regno);
}
}
}
#define CLEANUP_CFG_CHANGED 64 /* The caller changed the CFG. */
/* In cfganal.c */
-extern void sbitmap_intersection_of_succs (sbitmap, sbitmap *, basic_block);
-extern void sbitmap_intersection_of_preds (sbitmap, sbitmap *, basic_block);
-extern void sbitmap_union_of_succs (sbitmap, sbitmap *, basic_block);
-extern void sbitmap_union_of_preds (sbitmap, sbitmap *, basic_block);
+extern void bitmap_intersection_of_succs (sbitmap, sbitmap *, basic_block);
+extern void bitmap_intersection_of_preds (sbitmap, sbitmap *, basic_block);
+extern void bitmap_union_of_succs (sbitmap, sbitmap *, basic_block);
+extern void bitmap_union_of_preds (sbitmap, sbitmap *, basic_block);
/* In lcm.c */
extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,
SET_HARD_REG_BIT (info.btrs_live_in_block, regno);
bitmap_and_compl (bb_gen[i], bb_gen[i],
btr_defset[regno - first_btr]);
- SET_BIT (bb_gen[i], insn_uid);
+ bitmap_set_bit (bb_gen[i], insn_uid);
def->next_this_bb = defs_this_bb;
defs_this_bb = def;
- SET_BIT (btr_defset[regno - first_btr], insn_uid);
+ bitmap_set_bit (btr_defset[regno - first_btr], insn_uid);
note_other_use_this_block (regno, info.users_this_bb);
}
/* Check for the blockage emitted by expand_nl_goto_receiver. */
changed = 0;
for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
- sbitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK (i));
+ bitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK (i));
changed |= bitmap_ior_and_compl (bb_out[i], bb_gen[i],
bb_in, bb_kill[i]);
}
rtx insn;
rtx last;
- sbitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK (i));
+ bitmap_union_of_preds (reaching_defs, bb_out, BASIC_BLOCK (i));
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
insn != last;
insn = NEXT_INSN (insn))
for this one. */
bitmap_and_compl (reaching_defs, reaching_defs,
btr_defset[def->btr - first_btr]);
- SET_BIT(reaching_defs, insn_uid);
+ bitmap_set_bit(reaching_defs, insn_uid);
}
if (user != NULL)
for (k = 0; k < size; k++)
if (i + k < SBITMAP_SIZE (stored_args_map)
- && TEST_BIT (stored_args_map, i + k))
+ && bitmap_bit_p (stored_args_map, i + k))
return true;
}
#endif
for (high = low + arg->locate.size.constant; low < high; low++)
- SET_BIT (stored_args_map, low);
+ bitmap_set_bit (stored_args_map, low);
}
return insn != NULL_RTX;
}
return make_edge (src, dst, flags);
/* Does the requested edge already exist? */
- if (! TEST_BIT (edge_cache, dst->index))
+ if (! bitmap_bit_p (edge_cache, dst->index))
{
/* The edge does not exist. Create one and update the
cache. */
- SET_BIT (edge_cache, dst->index);
+ bitmap_set_bit (edge_cache, dst->index);
return unchecked_make_edge (src, dst, flags);
}
ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
/* Check if the edge destination has been visited yet. */
- if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
+ if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
{
/* Mark that we have visited the destination. */
- SET_BIT (visited, dest->index);
+ bitmap_set_bit (visited, dest->index);
pre[dest->index] = prenum++;
if (EDGE_COUNT (dest->succs) > 0)
dest = ei_edge (ei)->dest;
/* Check if the edge destination has been visited yet. */
- if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
+ if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
{
/* Mark that we have visited the destination. */
- SET_BIT (visited, dest->index);
+ bitmap_set_bit (visited, dest->index);
if (EDGE_COUNT (dest->succs) > 0)
/* Since the DEST node has been visited for the first
{
next_bb = b->next_bb;
- if (!(TEST_BIT (visited, b->index)))
+ if (!(bitmap_bit_p (visited, b->index)))
delete_basic_block (b);
}
if (EDGE_COUNT (bb->preds) > 0)
{
stack[sp++] = ei_start (bb->preds);
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
}
}
pred = ei_edge (ei)->src;
/* Check if the predecessor has been visited yet. */
- if (! TEST_BIT (visited, pred->index))
+ if (! bitmap_bit_p (visited, pred->index))
{
/* Mark that we have visited the destination. */
- SET_BIT (visited, pred->index);
+ bitmap_set_bit (visited, pred->index);
if (EDGE_COUNT (pred->preds) > 0)
/* Since the predecessor node has been visited for the first
Note that this doesn't check EXIT_BLOCK itself
since EXIT_BLOCK is always added after the outer do-while loop. */
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
- if (!TEST_BIT (visited, bb->index))
+ if (!bitmap_bit_p (visited, bb->index))
{
has_unvisited_bb = true;
/* Find an already visited predecessor. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
- if (TEST_BIT (visited, e->src->index))
+ if (bitmap_bit_p (visited, e->src->index))
visited_pred = e->src;
}
{
basic_block be = dfs_find_deadend (bb);
gcc_assert (be != NULL);
- SET_BIT (visited, be->index);
+ bitmap_set_bit (visited, be->index);
stack[sp++] = ei_start (be->preds);
break;
}
Find a dead-end from the ENTRY, and restart the iteration. */
basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
gcc_assert (be != NULL);
- SET_BIT (visited, be->index);
+ bitmap_set_bit (visited, be->index);
stack[sp++] = ei_start (be->preds);
}
dest = ei_edge (ei)->dest;
/* Check if the edge destination has been visited yet. */
- if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
+ if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
{
/* Mark that we have visited the destination. */
- SET_BIT (visited, dest->index);
+ bitmap_set_bit (visited, dest->index);
if (pre_order)
pre_order[pre_order_num] = dest->index;
flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
{
data->stack[data->sp++] = bb;
- SET_BIT (data->visited_blocks, bb->index);
+ bitmap_set_bit (data->visited_blocks, bb->index);
}
/* Continue the depth-first search through the reverse graph starting with the
/* Perform depth-first search on adjacent vertices. */
FOR_EACH_EDGE (e, ei, bb->preds)
- if (!TEST_BIT (data->visited_blocks, e->src->index))
+ if (!bitmap_bit_p (data->visited_blocks, e->src->index))
flow_dfs_compute_reverse_add_bb (data, e->src);
}
/* Determine if there are unvisited basic blocks. */
FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
- if (!TEST_BIT (data->visited_blocks, bb->index))
+ if (!bitmap_bit_p (data->visited_blocks, bb->index))
return dfs_find_deadend (bb);
return NULL;
static sbitmap visited;
static unsigned v_size;
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
-#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
+#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
+#define UNMARK_VISITED(BB) (bitmap_clear_bit (visited, (BB)->index))
+#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
/* Resize the VISITED sbitmap if necessary. */
size = last_basic_block;
basic block B. */
void
-sbitmap_intersection_of_succs (sbitmap dst, sbitmap *src,
- basic_block b)
+bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
basic block B. */
void
-sbitmap_intersection_of_preds (sbitmap dst, sbitmap *src,
- basic_block b)
+bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
basic block B. */
void
-sbitmap_union_of_succs (sbitmap dst, sbitmap *src,
- basic_block b)
+bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
basic block B. */
void
-sbitmap_union_of_preds (sbitmap dst, sbitmap *src,
- basic_block b)
+bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
{
unsigned int set_size = dst->size;
edge e;
{
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR)
- SET_BIT (edge_cache, e->dest->index);
+ bitmap_set_bit (edge_cache, e->dest->index);
}
}
FOR_EACH_BB (bb)
SET_STATE (bb,
- TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
+ bitmap_bit_p (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
FOR_EACH_BB (bb)
if (STATE (bb) == BLOCK_TO_SPLIT)
&& dominated_by_p (CDI_DOMINATORS, latch, header))
{
/* Shared headers should be eliminated by now. */
- SET_BIT (headers, header->index);
+ bitmap_set_bit (headers, header->index);
num_loops++;
}
}
/* Search the nodes of the CFG in reverse completion order
so that we can find outer loops first. */
- if (!TEST_BIT (headers, rc_order[b]))
+ if (!bitmap_bit_p (headers, rc_order[b]))
continue;
header = BASIC_BLOCK (rc_order[b]);
bb = bbs[j];
/* Ignore this block if it is in an inner loop. */
- if (TEST_BIT (visited, bb->index))
+ if (bitmap_bit_p (visited, bb->index))
continue;
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
if (bb->loop_father != loop)
{
{
edge_iterator ei;
if (bb->flags & BB_IRREDUCIBLE_LOOP)
- SET_BIT (irreds, bb->index);
+ bitmap_set_bit (irreds, bb->index);
else
- RESET_BIT (irreds, bb->index);
+ bitmap_clear_bit (irreds, bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
e->flags |= EDGE_ALL_FLAGS + 1;
edge_iterator ei;
if ((bb->flags & BB_IRREDUCIBLE_LOOP)
- && !TEST_BIT (irreds, bb->index))
+ && !bitmap_bit_p (irreds, bb->index))
{
error ("basic block %d should be marked irreducible", bb->index);
err = 1;
}
else if (!(bb->flags & BB_IRREDUCIBLE_LOOP)
- && TEST_BIT (irreds, bb->index))
+ && bitmap_bit_p (irreds, bb->index))
{
error ("basic block %d should not be marked irreducible", bb->index);
err = 1;
in_queue = sbitmap_alloc (last_basic_block);
bitmap_clear (in_queue);
- SET_BIT (in_queue, from->index);
+ bitmap_set_bit (in_queue, from->index);
/* Prevent us from going out of the base_loop. */
- SET_BIT (in_queue, base_loop->header->index);
+ bitmap_set_bit (in_queue, base_loop->header->index);
queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
qtop = queue + base_loop->num_nodes + 1;
qbeg++;
if (qbeg == qtop)
qbeg = queue;
- RESET_BIT (in_queue, from->index);
+ bitmap_clear_bit (in_queue, from->index);
if (from->loop_father->header == from)
{
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
*irred_invalidated = true;
- if (TEST_BIT (in_queue, pred->index))
+ if (bitmap_bit_p (in_queue, pred->index))
continue;
/* If it is subloop, then it either was not moved, or
continue;
}
- if (TEST_BIT (in_queue, pred->index))
+ if (bitmap_bit_p (in_queue, pred->index))
continue;
/* Schedule the basic block. */
qend++;
if (qend == qtop)
qend = queue;
- SET_BIT (in_queue, pred->index);
+ bitmap_set_bit (in_queue, pred->index);
}
}
free (in_queue);
/* Find "border" hexes -- i.e. those with predecessor in removed path. */
for (i = 0; i < nrem; i++)
- SET_BIT (seen, rem_bbs[i]->index);
+ bitmap_set_bit (seen, rem_bbs[i]->index);
if (!irred_invalidated)
FOR_EACH_EDGE (ae, ei, e->src->succs)
- if (ae != e && ae->dest != EXIT_BLOCK_PTR && !TEST_BIT (seen, ae->dest->index)
+ if (ae != e && ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index)
&& ae->flags & EDGE_IRREDUCIBLE_LOOP)
irred_invalidated = true;
for (i = 0; i < nrem; i++)
{
bb = rem_bbs[i];
FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
- if (ae->dest != EXIT_BLOCK_PTR && !TEST_BIT (seen, ae->dest->index))
+ if (ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index))
{
- SET_BIT (seen, ae->dest->index);
+ bitmap_set_bit (seen, ae->dest->index);
bord_bbs[n_bord_bbs++] = ae->dest;
if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
basic_block ldom;
bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
- if (TEST_BIT (seen, bb->index))
+ if (bitmap_bit_p (seen, bb->index))
continue;
- SET_BIT (seen, bb->index);
+ bitmap_set_bit (seen, bb->index);
for (ldom = first_dom_son (CDI_DOMINATORS, bb);
ldom;
body = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
- SET_BIT (seen, body[i]->index);
+ bitmap_set_bit (seen, body[i]->index);
for (i = 0; i < loop->num_nodes; i++)
{
for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
ldom;
ldom = next_dom_son (CDI_DOMINATORS, ldom))
- if (!TEST_BIT (seen, ldom->index))
+ if (!bitmap_bit_p (seen, ldom->index))
{
- SET_BIT (seen, ldom->index);
+ bitmap_set_bit (seen, ldom->index);
VEC_safe_push (basic_block, heap, dom_bbs, ldom);
}
}
scale_step = XNEWVEC (int, ndupl);
for (i = 1; i <= ndupl; i++)
- scale_step[i - 1] = TEST_BIT (wont_exit, i)
+ scale_step[i - 1] = bitmap_bit_p (wont_exit, i)
? prob_pass_wont_exit
: prob_pass_thru;
}
else if (is_latch)
{
- prob_pass_main = TEST_BIT (wont_exit, 0)
+ prob_pass_main = bitmap_bit_p (wont_exit, 0)
? prob_pass_wont_exit
: prob_pass_thru;
p = prob_pass_main;
}
/* Record exit edge in this copy. */
- if (orig && TEST_BIT (wont_exit, j + 1))
+ if (orig && bitmap_bit_p (wont_exit, j + 1))
{
if (to_remove)
VEC_safe_push (edge, heap, *to_remove, new_spec_edges[SE_ORIG]);
free (orig_loops);
/* Record the exit edge in the original loop body, and update the frequencies. */
- if (orig && TEST_BIT (wont_exit, 0))
+ if (orig && bitmap_bit_p (wont_exit, 0))
{
if (to_remove)
VEC_safe_push (edge, heap, *to_remove, orig);
if (bb->flags & BB_SUPERBLOCK)
{
bb->flags &= ~BB_SUPERBLOCK;
- SET_BIT (superblocks, bb->index);
+ bitmap_set_bit (superblocks, bb->index);
need = true;
}
if (! blocks)
check_last_block = true;
else
- check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
+ check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
/* In the last basic block, before epilogue generation, there will be
a fallthru edge to EXIT. Special care is required if the last insn
if (!bb)
continue;
- if (blocks && !TEST_BIT (blocks, i))
+ if (blocks && !bitmap_bit_p (blocks, i))
continue;
for (insn = BB_END (bb); ; insn = prev_insn)
selected_mode = (enum attr_fp_mode) epiphany_normal_fp_rounding;
VEC_quick_push (basic_block, todo, bb);
- SET_BIT (pushed, bb->index);
+ bitmap_set_bit (pushed, bb->index);
}
XVECEXP (XEXP (src, 0), 0, 0) = GEN_INT (selected_mode);
SET_SRC (XVECEXP (PATTERN (insn), 0, 1)) = copy_rtx (src);
edge e;
edge_iterator ei;
- SET_BIT (pushed, bb->index);
- SET_BIT (pushed, bb->index);
+ bitmap_set_bit (pushed, bb->index);
+ bitmap_set_bit (pushed, bb->index);
if (epiphany_normal_fp_rounding == FP_MODE_ROUND_NEAREST)
{
continue;
if (REGNO_REG_SET_P (DF_LIVE_IN (succ), selected_reg))
{
- if (TEST_BIT (pushed, succ->index))
+ if (bitmap_bit_p (pushed, succ->index))
continue;
VEC_quick_push (basic_block, todo, succ);
- SET_BIT (pushed, bb->index);
+ bitmap_set_bit (pushed, bb->index);
continue;
}
start_sequence ();
bitmap_ones (in_pending);
FOR_EACH_BB (bb)
if (BLOCK_INFO (bb)->processed)
- RESET_BIT (in_pending, bb->index);
+ bitmap_clear_bit (in_pending, bb->index);
else
{
move_or_delete_vzeroupper_1 (bb, false);
while (!fibheap_empty (worklist))
{
bb = (basic_block) fibheap_extract_min (worklist);
- RESET_BIT (in_worklist, bb->index);
- gcc_assert (!TEST_BIT (visited, bb->index));
- if (!TEST_BIT (visited, bb->index))
+ bitmap_clear_bit (in_worklist, bb->index);
+ gcc_assert (!bitmap_bit_p (visited, bb->index));
+ if (!bitmap_bit_p (visited, bb->index))
{
edge_iterator ei;
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
if (move_or_delete_vzeroupper_1 (bb, false))
FOR_EACH_EDGE (e, ei, bb->succs)
|| BLOCK_INFO (e->dest)->processed)
continue;
- if (TEST_BIT (visited, e->dest->index))
+ if (bitmap_bit_p (visited, e->dest->index))
{
- if (!TEST_BIT (in_pending, e->dest->index))
+ if (!bitmap_bit_p (in_pending, e->dest->index))
{
/* Send E->DEST to next round. */
- SET_BIT (in_pending, e->dest->index);
+ bitmap_set_bit (in_pending, e->dest->index);
fibheap_insert (pending,
bb_order[e->dest->index],
e->dest);
}
}
- else if (!TEST_BIT (in_worklist, e->dest->index))
+ else if (!bitmap_bit_p (in_worklist, e->dest->index))
{
/* Add E->DEST to current round. */
- SET_BIT (in_worklist, e->dest->index);
+ bitmap_set_bit (in_worklist, e->dest->index);
fibheap_insert (worklist, bb_order[e->dest->index],
e->dest);
}
ready_try[n_ready] = 1;
if (data->ready_try_change)
- SET_BIT (data->ready_try_change, n_ready);
+ bitmap_set_bit (data->ready_try_change, n_ready);
}
}
}
FOR_EACH_EDGE (e, ei, bb->preds)
if (!single_succ_p (e->src)
- || !TEST_BIT (protected_bbs, e->src->index)
+ || !bitmap_bit_p (protected_bbs, e->src->index)
|| (e->flags & EDGE_COMPLEX) != 0)
return false;
return true;
/* Record whether the end of this block is protected. */
if (unprotected_region == NULL_RTX)
- SET_BIT (protected_bbs, bb->index);
+ bitmap_set_bit (protected_bbs, bb->index);
}
XDELETEVEC (rev_post_order);
LABEL_PRESERVE_P (branch_label) = 1;
insn = emit_label_before (branch_label, branch);
branch_label = gen_rtx_LABEL_REF (VOIDmode, branch_label);
- SET_BIT (blocks, BLOCK_FOR_INSN (branch)->index);
+ bitmap_set_bit (blocks, BLOCK_FOR_INSN (branch)->index);
hint = emit_insn_before (gen_hbr (branch_label, target), before);
recog_memoized (hint);
is killed in the block where the definition is. */
for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest));
def; def = DF_REF_NEXT_REG (def))
- SET_BIT (kill[DF_REF_BB (def)->index], indx);
+ bitmap_set_bit (kill[DF_REF_BB (def)->index], indx);
/* If the source is a pseudo-reg, for each definition of the source,
the expression is killed in the block where the definition is. */
if (REG_P (expr->src))
for (def = DF_REG_DEF_CHAIN (REGNO (expr->src));
def; def = DF_REF_NEXT_REG (def))
- SET_BIT (kill[DF_REF_BB (def)->index], indx);
+ bitmap_set_bit (kill[DF_REF_BB (def)->index], indx);
/* The occurrences recorded in avail_occr are exactly those that
are locally available in the block where they are. */
for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
{
- SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
+ bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
}
}
}
{
int index = implicit_set_indexes[bb->index];
if (index != -1)
- SET_BIT (cprop_avin[bb->index], index);
+ bitmap_set_bit (cprop_avin[bb->index], index);
}
}
\f
which contains INSN. */
while (set)
{
- if (TEST_BIT (cprop_avin[BLOCK_FOR_INSN (insn)->index],
+ if (bitmap_bit_p (cprop_avin[BLOCK_FOR_INSN (insn)->index],
set->bitmap_index))
break;
set = next_set (regno, set);
while (set)
{
- if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
+ if (bitmap_bit_p (cprop_avout[bb], set->bitmap_index))
break;
set = next_set (regno, set);
}
edge e;
int path_size;
- SET_BIT (cse_visited_basic_blocks, first_bb->index);
+ bitmap_set_bit (cse_visited_basic_blocks, first_bb->index);
/* See if there is a previous path. */
path_size = data->path_size;
We still want to visit each basic block only once, so
halt the path here if we have already visited BB. */
- && !TEST_BIT (cse_visited_basic_blocks, bb->index))
+ && !bitmap_bit_p (cse_visited_basic_blocks, bb->index))
{
- SET_BIT (cse_visited_basic_blocks, bb->index);
+ bitmap_set_bit (cse_visited_basic_blocks, bb->index);
data->path[path_size++].bb = bb;
break;
}
&& single_pred_p (e->dest)
/* Avoid visiting basic blocks twice. The large comment
above explains why this can happen. */
- && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
+ && !bitmap_bit_p (cse_visited_basic_blocks, e->dest->index))
{
basic_block bb2 = e->dest;
- SET_BIT (cse_visited_basic_blocks, bb2->index);
+ bitmap_set_bit (cse_visited_basic_blocks, bb2->index);
data->path[path_size++].bb = bb2;
bb = bb2;
}
/* If we truncate the path, we must also reset the
visited bit on the remaining blocks in the path,
or we will never visit them at all. */
- RESET_BIT (cse_visited_basic_blocks,
+ bitmap_clear_bit (cse_visited_basic_blocks,
ebb_data->path[path_size].bb->index);
ebb_data->path[path_size].bb = NULL;
}
{
bb = BASIC_BLOCK (rc_order[i++]);
}
- while (TEST_BIT (cse_visited_basic_blocks, bb->index)
+ while (bitmap_bit_p (cse_visited_basic_blocks, bb->index)
&& i < n_blocks);
/* Find all paths starting with BB, and process them. */
/* Artificial defs are always needed and they do not have an insn.
We should never see them here. */
gcc_assert (insn);
- return TEST_BIT (marked, INSN_UID (insn));
+ return bitmap_bit_p (marked, INSN_UID (insn));
}
{
if (!fast)
VEC_safe_push (rtx, heap, worklist, insn);
- SET_BIT (marked, INSN_UID (insn));
+ bitmap_set_bit (marked, INSN_UID (insn));
if (dump_file)
fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
if (CALL_P (insn)
{
/* Don't bother calculating inter-loop dep if an intra-loop dep
already exists. */
- if (! TEST_BIT (dest_node->successors, j))
+ if (! bitmap_bit_p (dest_node->successors, j))
add_inter_loop_mem_dep (g, dest_node, j_node);
/* If -fmodulo-sched-allow-regmoves
is set certain anti-dep edges are not created.
memory dependencies. Thus we add intra edges between
every two memory instructions in this case. */
if (flag_modulo_sched_allow_regmoves
- && !TEST_BIT (dest_node->predecessors, j))
+ && !bitmap_bit_p (dest_node->predecessors, j))
add_intra_loop_mem_dep (g, j_node, dest_node);
}
}
/* Should have allocated the sbitmaps. */
gcc_assert (src->successors && dest->predecessors);
- SET_BIT (src->successors, dest->cuid);
- SET_BIT (dest->predecessors, src->cuid);
+ bitmap_set_bit (src->successors, dest->cuid);
+ bitmap_set_bit (dest->predecessors, src->cuid);
e->next_in = dest->in;
dest->in = e;
e->next_out = src->out;
ddg_node_ptr n = &g->nodes[u];
for (e = n->out; e; e = e->next_out)
- if (TEST_BIT (nodes, e->dest->cuid))
+ if (bitmap_bit_p (nodes, e->dest->cuid))
{
e->aux.count = IN_SCC;
if (e->distance > 0)
bitmap_clear (scc_nodes);
bitmap_clear (from);
bitmap_clear (to);
- SET_BIT (from, dest->cuid);
- SET_BIT (to, src->cuid);
+ bitmap_set_bit (from, dest->cuid);
+ bitmap_set_bit (to, src->cuid);
if (find_nodes_on_paths (scc_nodes, g, from, to))
{
ddg_node_ptr v_node = e->dest;
int v = v_node->cuid;
- if (!TEST_BIT (reachable_from, v))
+ if (!bitmap_bit_p (reachable_from, v))
{
- SET_BIT (reachable_from, v);
- SET_BIT (tmp, v);
+ bitmap_set_bit (reachable_from, v);
+ bitmap_set_bit (tmp, v);
change = 1;
}
}
ddg_node_ptr v_node = e->src;
int v = v_node->cuid;
- if (!TEST_BIT (reach_to, v))
+ if (!bitmap_bit_p (reach_to, v))
{
- SET_BIT (reach_to, v);
- SET_BIT (tmp, v);
+ bitmap_set_bit (reach_to, v);
+ bitmap_set_bit (tmp, v);
change = 1;
}
}
ddg_node_ptr v_node = e->dest;
int v = v_node->cuid;
- if (TEST_BIT (nodes, v)
+ if (bitmap_bit_p (nodes, v)
&& (e->distance == 0)
&& (v_node->aux.count < u_node->aux.count + e->latency))
{
v_node->aux.count = u_node->aux.count + e->latency;
- SET_BIT (tmp, v);
+ bitmap_set_bit (tmp, v);
result = 1;
}
}
g->nodes[src].aux.count = 0;
bitmap_clear (tmp);
- SET_BIT (tmp, src);
+ bitmap_set_bit (tmp, src);
while (change)
{
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (age <= BB_LAST_CHANGE_AGE (e->src)
- && TEST_BIT (considered, e->src->index))
+ && bitmap_bit_p (considered, e->src->index))
changed |= dataflow->problem->con_fun_n (e);
}
else if (dataflow->problem->con_fun_0)
{
unsigned ob_index = e->dest->index;
- if (TEST_BIT (considered, ob_index))
+ if (bitmap_bit_p (considered, ob_index))
bitmap_set_bit (pending, bbindex_to_postorder[ob_index]);
}
return true;
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (age <= BB_LAST_CHANGE_AGE (e->dest)
- && TEST_BIT (considered, e->dest->index))
+ && bitmap_bit_p (considered, e->dest->index))
changed |= dataflow->problem->con_fun_n (e);
}
else if (dataflow->problem->con_fun_0)
{
unsigned ob_index = e->src->index;
- if (TEST_BIT (considered, ob_index))
+ if (bitmap_bit_p (considered, ob_index))
bitmap_set_bit (pending, bbindex_to_postorder[ob_index]);
}
return true;
bitmap_clear (considered);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_consider, 0, index, bi)
{
- SET_BIT (considered, index);
+ bitmap_set_bit (considered, index);
}
/* Initialize the mapping of block index to postorder. */
int sp = 0;
sbitmap visited = sbitmap_alloc (last_basic_block + 1);
bitmap_clear (visited);
- SET_BIT (visited, ENTRY_BLOCK_PTR->index);
+ bitmap_set_bit (visited, ENTRY_BLOCK_PTR->index);
while (true)
{
if (walk_data->before_dom_children)
(*walk_data->before_dom_children) (walk_data, bb);
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
/* Mark the current BB to be popped out of the recursion stack
once children are processed. */
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (!dominated_by_p (CDI_DOMINATORS, e->src, e->dest)
- && !TEST_BIT (visited, e->src->index))
+ && !bitmap_bit_p (visited, e->src->index))
{
found = false;
break;
edge e;
edge_iterator ei;
- if (TEST_BIT (unreachable_blocks, bb->index))
+ if (bitmap_bit_p (unreachable_blocks, bb->index))
{
- RESET_BIT (unreachable_blocks, bb->index);
+ bitmap_clear_bit (unreachable_blocks, bb->index);
FOR_EACH_EDGE (e, ei, bb->preds)
{
mark_reachable_blocks (unreachable_blocks, e->src);
return;
if (wordindex >= map->wordmask->n_bits
- || !TEST_BIT (map->wordmask, wordindex))
+ || !bitmap_bit_p (map->wordmask, wordindex))
return;
if (map->cache != NULL && map->cacheindex == wordindex)
map->cache = map->cache - 1;
}
- RESET_BIT_WITH_POPCOUNT (map->wordmask, wordindex);
+ bitmap_clear_bit_with_popcount (map->wordmask, wordindex);
memmove(&map->elts[eltwordindex], &map->elts[eltwordindex + 1],
sizeof (EBITMAP_ELT_TYPE) * (map->numwords - eltwordindex));
/* Allocate a new word in the array and move whatever is in it's
place, if necessary. */
- if (!TEST_BIT (map->wordmask, wordindex))
+ if (!bitmap_bit_p (map->wordmask, wordindex))
{
unsigned long count;
unsigned int i;
- SET_BIT_WITH_POPCOUNT (map->wordmask, wordindex);
+ bitmap_set_bit_with_popcount (map->wordmask, wordindex);
count = sbitmap_popcount (map->wordmask, wordindex);
gcc_assert (count <= map->numwords);
it's not set in the wordmask, this bit can't exist in our
ebitmap. */
if (wordindex >= map->wordmask->n_bits
- || !TEST_BIT (map->wordmask, wordindex))
+ || !bitmap_bit_p (map->wordmask, wordindex))
return false;
/* Find the bit and test it. */
*dstplace = tmpword;
}
else
- RESET_BIT_WITH_POPCOUNT (dst->wordmask, i);
+ bitmap_clear_bit_with_popcount (dst->wordmask, i);
}
#ifdef EBITMAP_DEBUGGING
{
{
bool src1hasword, src2hasword;
- src1hasword = TEST_BIT (src1->wordmask, i);
- src2hasword = TEST_BIT (src2->wordmask, i);
+ src1hasword = bitmap_bit_p (src1->wordmask, i);
+ src2hasword = bitmap_bit_p (src2->wordmask, i);
if (src1hasword && src2hasword)
{
*dstplace = tmpword;
}
else
- RESET_BIT_WITH_POPCOUNT (dst->wordmask, i);
+ bitmap_clear_bit_with_popcount (dst->wordmask, i);
}
else if (src1hasword)
src1eltindex++;
bool dsthasword, srchasword;
dsthasword = (i < dst->wordmask->n_bits
- && TEST_BIT (dst->wordmask, i));
+ && bitmap_bit_p (dst->wordmask, i));
srchasword = (i < src->wordmask->n_bits
- && TEST_BIT (src->wordmask, i));
+ && bitmap_bit_p (src->wordmask, i));
if (dsthasword && srchasword)
{
{
newarray [neweltindex++] = ebitmap_array_get (src, srceltindex++);
gcc_assert (i < dst->wordmask->n_bits);
- SET_BIT_WITH_POPCOUNT (dst->wordmask, i);
+ bitmap_set_bit_with_popcount (dst->wordmask, i);
changed |= true;
}
}
bool src1hasword, src2hasword;
EBITMAP_ELT_TYPE tmpword;
src1hasword = (i < src1->wordmask->n_bits
- && TEST_BIT (src1->wordmask, i));
+ && bitmap_bit_p (src1->wordmask, i));
src2hasword = (i < src2->wordmask->n_bits
- && TEST_BIT (src2->wordmask, i));
+ && bitmap_bit_p (src2->wordmask, i));
if (src1hasword && src2hasword)
{
newarray [neweltindex++] = tmpword;
}
- if (i >= dst->wordmask->n_bits || !TEST_BIT (dst->wordmask, i))
+ if (i >= dst->wordmask->n_bits || !bitmap_bit_p (dst->wordmask, i))
{
changed = true;
}
bool srchasword;
srchasword = (i < src->wordmask->n_bits
- && TEST_BIT (src->wordmask, i));
+ && bitmap_bit_p (src->wordmask, i));
if (srchasword)
{
*dstplace = tmpword;
}
else
- RESET_BIT_WITH_POPCOUNT (dst->wordmask, i);
+ bitmap_clear_bit_with_popcount (dst->wordmask, i);
}
else
{
EBITMAP_ELT_TYPE tmpword;
src2hasword = (i < src2->wordmask->n_bits
- && TEST_BIT (src2->wordmask, i));
+ && bitmap_bit_p (src2->wordmask, i));
if (src2hasword)
{
newarray[neweltindex++] = tmpword;
}
else
- RESET_BIT_WITH_POPCOUNT (tempmask, i);
+ bitmap_clear_bit_with_popcount (tempmask, i);
}
else
newarray[neweltindex++] = tmpword;
}
- if (i >= dst->wordmask->n_bits || !TEST_BIT (dst->wordmask, i))
+ if (i >= dst->wordmask->n_bits || !bitmap_bit_p (dst->wordmask, i))
{
changed = true;
}
}
else
{
- if (TEST_BIT (bmp->wordmask, min / EBITMAP_ELT_BITS) == 0)
+ if (bitmap_bit_p (bmp->wordmask, min / EBITMAP_ELT_BITS) == 0)
i->word = 0;
else
{
do
{
- SET_BIT (b_outer, region_b->index);
+ bitmap_set_bit (b_outer, region_b->index);
region_b = region_b->outer;
}
while (region_b);
do
{
- if (TEST_BIT (b_outer, region_a->index))
+ if (bitmap_bit_p (b_outer, region_a->index))
break;
region_a = region_a->outer;
}
/* Look for basic blocks within the prologue insns. */
blocks = sbitmap_alloc (last_basic_block);
bitmap_clear (blocks);
- SET_BIT (blocks, entry_edge->dest->index);
- SET_BIT (blocks, orig_entry_edge->dest->index);
+ bitmap_set_bit (blocks, entry_edge->dest->index);
+ bitmap_set_bit (blocks, orig_entry_edge->dest->index);
find_many_sub_basic_blocks (blocks);
sbitmap_free (blocks);
if (antloc)
for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
{
- SET_BIT (antloc[BLOCK_FOR_INSN (occr->insn)->index], indx);
+ bitmap_set_bit (antloc[BLOCK_FOR_INSN (occr->insn)->index], indx);
/* While we're scanning the table, this is a good place to
initialize this. */
if (comp)
for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
{
- SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
+ bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
/* While we're scanning the table, this is a good place to
initialize this. */
for (def = DF_REG_DEF_CHAIN (REGNO (x));
def;
def = DF_REF_NEXT_REG (def))
- RESET_BIT (bmap[DF_REF_BB (def)->index], indx);
+ bitmap_clear_bit (bmap[DF_REF_BB (def)->index], indx);
}
return;
do any list walking for them. */
EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
{
- RESET_BIT (bmap[bb_index], indx);
+ bitmap_clear_bit (bmap[bb_index], indx);
}
/* Now iterate over the blocks which have memory modifications
if (canon_true_dependence (dest, GET_MODE (dest),
dest_addr, x, NULL_RTX))
- RESET_BIT (bmap[bb_index], indx);
+ bitmap_clear_bit (bmap[bb_index], indx);
}
}
}
/* Note potentially trapping expressions. */
if (may_trap_p (expr->expr))
{
- SET_BIT (prune_exprs, expr->bitmap_index);
+ bitmap_set_bit (prune_exprs, expr->bitmap_index);
continue;
}
analysis to determine if this mem is actually killed
by this call. */
- SET_BIT (prune_exprs, expr->bitmap_index);
+ bitmap_set_bit (prune_exprs, expr->bitmap_index);
}
}
}
for (j = 0; j < (unsigned) n_elems; j++)
if (deletions[j]
&& ((unsigned) insertions[j] / deletions[j]) > MAX_GCSE_INSERTION_RATIO)
- SET_BIT (prune_exprs, j);
+ bitmap_set_bit (prune_exprs, j);
/* Now prune PRE_INSERT_MAP and PRE_DELETE_MAP based on PRUNE_EXPRS. */
EXECUTE_IF_SET_IN_SBITMAP (prune_exprs, 0, j, sbi)
{
for (i = 0; i < (unsigned) n_edges; i++)
- RESET_BIT (pre_insert_map[i], j);
+ bitmap_clear_bit (pre_insert_map[i], j);
for (i = 0; i < (unsigned) last_basic_block; i++)
- RESET_BIT (pre_delete_map[i], j);
+ bitmap_clear_bit (pre_delete_map[i], j);
}
sbitmap_free (prune_exprs);
;/* Nothing to do. */
/* Does this predecessor generate this expression? */
- else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
+ else if (bitmap_bit_p (comp[pred_bb->index], expr->bitmap_index))
{
/* Is this the occurrence we're looking for?
Note that there's only one generating occurrence per block
visited[pred_bb->index] = 1;
}
/* Ignore this predecessor if it kills the expression. */
- else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
+ else if (! bitmap_bit_p (transp[pred_bb->index], expr->bitmap_index))
visited[pred_bb->index] = 1;
/* Neither gen nor kill. */
/* Insert this expression on this edge if it would
reach the deleted occurrence in BB. */
- if (!TEST_BIT (inserted[e], j))
+ if (!bitmap_bit_p (inserted[e], j))
{
rtx insn;
edge eg = INDEX_EDGE (edge_list, e);
}
update_ld_motion_stores (expr);
- SET_BIT (inserted[e], j);
+ bitmap_set_bit (inserted[e], j);
did_insert = 1;
gcse_create_count++;
}
basic_block bb = BLOCK_FOR_INSN (insn);
/* We only delete insns that have a single_set. */
- if (TEST_BIT (pre_delete_map[bb->index], indx)
+ if (bitmap_bit_p (pre_delete_map[bb->index], indx)
&& (set = single_set (insn)) != 0
&& dbg_cnt (pre_insn))
{
{
if (bb->next_bb != EXIT_BLOCK_PTR)
{
- sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
- hoist_vbein, bb);
+ bitmap_intersection_of_succs (hoist_vbeout[bb->index],
+ hoist_vbein, bb);
/* Include expressions in VBEout that are calculated
in BB and available at its end. */
break;
else if (pred_bb == expr_bb)
continue;
- else if (TEST_BIT (visited, pred_bb->index))
+ else if (bitmap_bit_p (visited, pred_bb->index))
continue;
- else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
+ else if (! bitmap_bit_p (transp[pred_bb->index], expr->bitmap_index))
break;
/* Not killed. */
else
{
- SET_BIT (visited, pred_bb->index);
+ bitmap_set_bit (visited, pred_bb->index);
if (! should_hoist_expr_to_dom (expr_bb, expr, pred_bb,
visited, distance, bb_size,
pressure_class, nregs, hoisted_bbs))
block. These are the potentially hoistable expressions. */
for (i = 0; i < SBITMAP_SIZE (hoist_vbeout[bb->index]); i++)
{
- if (TEST_BIT (hoist_vbeout[bb->index], i))
+ if (bitmap_bit_p (hoist_vbeout[bb->index], i))
{
int nregs = 0;
enum reg_class pressure_class = NO_REGS;
/* If an expression is computed in BB and is available at end of
BB, hoist all occurrences dominated by BB to BB. */
- if (TEST_BIT (comp[bb->index], i))
+ if (bitmap_bit_p (comp[bb->index], i))
{
occr = find_occr_in_bb (expr->antic_occr, bb);
/* We've found a dominated block, now see if it computes
the busy expression and whether or not moving that
expression to the "beginning" of that block is safe. */
- if (!TEST_BIT (antloc[dominated->index], i))
+ if (!bitmap_bit_p (antloc[dominated->index], i))
continue;
occr = find_occr_in_bb (expr->antic_occr, dominated);
/* Set bit number bitno in the bit string. The macro is not side
effect proof. */
-#define SET_BIT(bitstring, bitno) \
+#define bitmap_set_bit(bitstring, bitno) \
((bitstring)[(bitno) / (sizeof (*(bitstring)) * CHAR_BIT)] |= \
(HOST_WIDE_INT)1 << (bitno) % (sizeof (*(bitstring)) * CHAR_BIT))
/* Test if bit number bitno in the bitstring is set. The macro is not
side effect proof. */
-#define TEST_BIT(bitstring, bitno) \
+#define bitmap_bit_p(bitstring, bitno) \
((bitstring)[(bitno) / (sizeof (*(bitstring)) * CHAR_BIT)] >> \
(bitno) % (sizeof (*(bitstring)) * CHAR_BIT) & 1)
set_unit_reserv (reserv_sets_t reservs, int cycle_num, int unit_num)
{
gcc_assert (cycle_num < max_cycles_num);
- SET_BIT (reservs, cycle_num * els_in_cycle_reserv
+ bitmap_set_bit (reservs, cycle_num * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num);
}
test_unit_reserv (reserv_sets_t reservs, int cycle_num, int unit_num)
{
gcc_assert (cycle_num < max_cycles_num);
- return TEST_BIT (reservs, cycle_num * els_in_cycle_reserv
+ return bitmap_bit_p (reservs, cycle_num * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num);
}
reserved_units_num = 0;
for (unit_num = 0; unit_num < description->units_num; unit_num++)
- if (TEST_BIT (reservs, start_cycle * els_in_cycle_reserv
+ if (bitmap_bit_p (reservs, start_cycle * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num))
reserved_units_num++;
gcc_assert (repetition_num > 0);
for (unit_num = 0;
unit_num < description->units_num;
unit_num++)
- if (TEST_BIT (reservs, start_cycle * els_in_cycle_reserv
+ if (bitmap_bit_p (reservs, start_cycle * els_in_cycle_reserv
* sizeof (set_el_t) * CHAR_BIT + unit_num))
{
if (reserved_units_num != 0)
el != NULL;
el = el->next_unit_set_el)
{
- SET_BIT (unit_excl_set, el->unit_decl->unit_num);
+ bitmap_set_bit (unit_excl_set, el->unit_decl->unit_num);
el->unit_decl->in_set_p = TRUE;
}
unit_excl_set_table [DECL_UNIT (decl)->unit_num] = unit_excl_set;
curr->next_pattern_reserv = NULL;
for (i = 0; i < el->units_num; i++)
{
- SET_BIT (curr->reserv, el->unit_decls [i]->unit_num);
+ bitmap_set_bit (curr->reserv, el->unit_decls [i]->unit_num);
el->unit_decls [i]->in_set_p = TRUE;
}
if (prev != NULL)
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->preds)
- if (!TEST_BIT (map, e->src->index)
+ if (!bitmap_bit_p (map, e->src->index)
&& !dominated_by_p (CDI_DOMINATORS, e->src, bb))
return false;
VEC (basic_block, heap) *dom;
poly_bb_p pbb;
- if (TEST_BIT (visited, bb->index)
+ if (bitmap_bit_p (visited, bb->index)
|| !bb_in_sese_p (bb, region))
return;
pbb = new_poly_bb (scop, try_generate_gimple_bb (scop, bb));
VEC_safe_push (poly_bb_p, heap, SCOP_BBS (scop), pbb);
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
dom = get_dominated_by (CDI_DOMINATORS, bb);
for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
{
ira_assert (r->start <= r->finish);
- SET_BIT (born, r->start);
- SET_BIT (dead, r->finish);
+ bitmap_set_bit (born, r->start);
+ bitmap_set_bit (dead, r->finish);
}
born_or_dead = sbitmap_alloc (ira_max_point);
prev_born_p = prev_dead_p = false;
EXECUTE_IF_SET_IN_SBITMAP (born_or_dead, 0, i, sbi)
{
- born_p = TEST_BIT (born, i);
- dead_p = TEST_BIT (dead, i);
+ born_p = bitmap_bit_p (born, i);
+ dead_p = bitmap_bit_p (dead, i);
if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
|| (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
map[i] = n;
while (start < last)
{
- RESET_BIT (live_subregs[regno], start);
+ bitmap_clear_bit (live_subregs[regno], start);
start++;
}
while (start < last)
{
- SET_BIT (live_subregs[regno], start);
+ bitmap_set_bit (live_subregs[regno], start);
start++;
}
}
/* Clear the aux field of this block so that it can be added to
the worklist again if necessary. */
bb->aux = NULL;
- sbitmap_intersection_of_succs (antout[bb->index], antin, bb);
+ bitmap_intersection_of_succs (antout[bb->index], antin, bb);
}
if (bitmap_or_and (antin[bb->index], antloc[bb->index],
/* Clear the aux field of this block so that it can be added to
the worklist again if necessary. */
bb->aux = NULL;
- sbitmap_intersection_of_preds (avin[bb->index], avout, bb);
+ bitmap_intersection_of_preds (avin[bb->index], avout, bb);
}
if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
wont_exit = sbitmap_alloc (npeel + 1);
bitmap_ones (wont_exit);
- RESET_BIT (wont_exit, 0);
+ bitmap_clear_bit (wont_exit, 0);
if (desc->noloop_assumptions)
- RESET_BIT (wont_exit, 1);
+ bitmap_clear_bit (wont_exit, 1);
remove_edges = NULL;
fprintf (dump_file, ";; Condition at beginning of loop.\n");
/* Peel exit_mod iterations. */
- RESET_BIT (wont_exit, 0);
+ bitmap_clear_bit (wont_exit, 0);
if (desc->noloop_assumptions)
- RESET_BIT (wont_exit, 1);
+ bitmap_clear_bit (wont_exit, 1);
if (exit_mod)
{
loop->any_estimate = false;
}
- SET_BIT (wont_exit, 1);
+ bitmap_set_bit (wont_exit, 1);
}
else
{
if (exit_mod != max_unroll
|| desc->noloop_assumptions)
{
- RESET_BIT (wont_exit, 0);
+ bitmap_clear_bit (wont_exit, 0);
if (desc->noloop_assumptions)
- RESET_BIT (wont_exit, 1);
+ bitmap_clear_bit (wont_exit, 1);
opt_info_start_duplication (opt_info);
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
loop->any_estimate = false;
desc->noloop_assumptions = NULL_RTX;
- SET_BIT (wont_exit, 0);
- SET_BIT (wont_exit, 1);
+ bitmap_set_bit (wont_exit, 0);
+ bitmap_set_bit (wont_exit, 1);
}
- RESET_BIT (wont_exit, max_unroll);
+ bitmap_clear_bit (wont_exit, max_unroll);
}
/* Now unroll the loop. */
bitmap_clear (wont_exit);
if (extra_zero_check
&& !desc->noloop_assumptions)
- SET_BIT (wont_exit, 1);
+ bitmap_set_bit (wont_exit, 1);
ezc_swtch = loop_preheader_edge (loop)->src;
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
1, wont_exit, desc->out_edge,
/* Peel the copy. */
bitmap_clear (wont_exit);
if (i != n_peel - 1 || !last_may_exit)
- SET_BIT (wont_exit, 1);
+ bitmap_set_bit (wont_exit, 1);
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
1, wont_exit, desc->out_edge,
&remove_edges,
/* And unroll loop. */
bitmap_ones (wont_exit);
- RESET_BIT (wont_exit, may_exit_copy);
+ bitmap_clear_bit (wont_exit, may_exit_copy);
opt_info_start_duplication (opt_info);
ok = duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
extract_insn (insn);
if (cfi)
- SET_BIT (sub_blocks, bb->index);
+ bitmap_set_bit (sub_blocks, bb->index);
}
}
else
for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
{
lra_assert (r->start <= r->finish);
- SET_BIT (born, r->start);
- SET_BIT (dead, r->finish);
+ bitmap_set_bit (born, r->start);
+ bitmap_set_bit (dead, r->finish);
}
}
born_or_dead = sbitmap_alloc (lra_live_max_point);
prev_born_p = prev_dead_p = false;
EXECUTE_IF_SET_IN_SBITMAP (born_or_dead, 0, i, sbi)
{
- born_p = TEST_BIT (born, i);
- dead_p = TEST_BIT (dead, i);
+ born_p = bitmap_bit_p (born, i);
+ dead_p = bitmap_bit_p (dead, i);
if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
|| (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
{
if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
lra_constraint_insn_stack_bitmap =
sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
- if (TEST_BIT (lra_constraint_insn_stack_bitmap, uid))
+ if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
return;
- SET_BIT (lra_constraint_insn_stack_bitmap, uid);
+ bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
if (! always_update)
lra_update_insn_regno_info (insn);
VEC_safe_push (rtx, heap, lra_constraint_insn_stack, insn);
lra_pop_insn (void)
{
rtx insn = VEC_pop (rtx, lra_constraint_insn_stack);
- RESET_BIT (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
+ bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
return insn;
}
{
basic_block pb = e->src;
- if (e->aux || ! TEST_BIT (transp[pb->index], j))
+ if (e->aux || ! bitmap_bit_p (transp[pb->index], j))
continue;
- RESET_BIT (transp[pb->index], j);
+ bitmap_clear_bit (transp[pb->index], j);
make_preds_opaque (pb, j);
}
}
{
ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now);
add_seginfo (info + bb->index, ptr);
- RESET_BIT (transp[bb->index], j);
+ bitmap_clear_bit (transp[bb->index], j);
}
}
last_mode = mode;
ptr = new_seginfo (mode, insn, bb->index, live_now);
add_seginfo (info + bb->index, ptr);
- RESET_BIT (transp[bb->index], j);
+ bitmap_clear_bit (transp[bb->index], j);
}
#ifdef MODE_AFTER
last_mode = MODE_AFTER (e, last_mode, insn);
an extra check in make_preds_opaque. We also
need this to avoid confusing pre_edge_lcm when
antic is cleared but transp and comp are set. */
- RESET_BIT (transp[bb->index], j);
+ bitmap_clear_bit (transp[bb->index], j);
/* Insert a fake computing definition of MODE into entry
blocks which compute no mode. This represents the mode on
FOR_EACH_BB (bb)
{
if (info[bb->index].seginfo->mode == m)
- SET_BIT (antic[bb->index], j);
+ bitmap_set_bit (antic[bb->index], j);
if (info[bb->index].computing == m)
- SET_BIT (comp[bb->index], j);
+ bitmap_set_bit (comp[bb->index], j);
}
}
eg->aux = 0;
- if (! TEST_BIT (insert[e], j))
+ if (! bitmap_bit_p (insert[e], j))
continue;
eg->aux = (void *)1;
}
FOR_EACH_BB_REVERSE (bb)
- if (TEST_BIT (del[bb->index], j))
+ if (bitmap_bit_p (del[bb->index], j))
{
make_preds_opaque (bb, j);
/* Cancel the 'deleted' mode set. */
{
this_insn = ps_rtl_insn (ps, u);
this_latency = insn_latency (move->insn, this_insn);
- if (distance1_uses && !TEST_BIT (distance1_uses, u))
+ if (distance1_uses && !bitmap_bit_p (distance1_uses, u))
this_distance = -1;
else
this_distance = 0;
}
bitmap_clear (must_follow);
- SET_BIT (must_follow, move->def);
+ bitmap_set_bit (must_follow, move->def);
start = MAX (start, end - (ii - 1));
for (c = end; c >= start; c--)
ps_reg_move_info *move;
move = ps_reg_move (ps, first_move + dest_copy - 1);
- SET_BIT (move->uses, e->dest->cuid);
+ bitmap_set_bit (move->uses, e->dest->cuid);
if (e->distance == 1)
- SET_BIT (distance1_uses, e->dest->cuid);
+ bitmap_set_bit (distance1_uses, e->dest->cuid);
}
}
{
int v = e->src->cuid;
- if (TEST_BIT (sched_nodes, v))
+ if (bitmap_bit_p (sched_nodes, v))
{
int p_st = SCHED_TIME (v);
int earliest = p_st + e->latency - (e->distance * ii);
{
int v = e->dest->cuid;
- if (TEST_BIT (sched_nodes, v))
+ if (bitmap_bit_p (sched_nodes, v))
{
int s_st = SCHED_TIME (v);
int earliest = (e->data_type == MEM_DEP ? s_st - ii + 1 : INT_MIN);
and check only if
SCHED_TIME (e->src) - (e->distance * ii) == first_cycle_in_window */
for (e = u_node->in; e != 0; e = e->next_in)
- if (TEST_BIT (sched_nodes, e->src->cuid)
+ if (bitmap_bit_p (sched_nodes, e->src->cuid)
&& ((SCHED_TIME (e->src->cuid) - (e->distance * ii)) ==
first_cycle_in_window))
{
if (dump_file)
fprintf (dump_file, "%d ", e->src->cuid);
- SET_BIT (must_precede, e->src->cuid);
+ bitmap_set_bit (must_precede, e->src->cuid);
}
if (dump_file)
and check only if
SCHED_TIME (e->dest) + (e->distance * ii) == last_cycle_in_window */
for (e = u_node->out; e != 0; e = e->next_out)
- if (TEST_BIT (sched_nodes, e->dest->cuid)
+ if (bitmap_bit_p (sched_nodes, e->dest->cuid)
&& ((SCHED_TIME (e->dest->cuid) + (e->distance * ii)) ==
last_cycle_in_window))
{
if (dump_file)
fprintf (dump_file, "%d ", e->dest->cuid);
- SET_BIT (must_follow, e->dest->cuid);
+ bitmap_set_bit (must_follow, e->dest->cuid);
}
if (dump_file)
if (psi)
{
SCHED_TIME (u) = cycle;
- SET_BIT (sched_nodes, u);
+ bitmap_set_bit (sched_nodes, u);
success = 1;
*num_splits = 0;
if (dump_file)
if (!NONDEBUG_INSN_P (insn))
{
- RESET_BIT (tobe_scheduled, u);
+ bitmap_clear_bit (tobe_scheduled, u);
continue;
}
- if (TEST_BIT (sched_nodes, u))
+ if (bitmap_bit_p (sched_nodes, u))
continue;
/* Try to get non-empty scheduling window. */
{
int v = e->src->cuid;
- if (TEST_BIT (sched_nodes, v)
+ if (bitmap_bit_p (sched_nodes, v)
&& (low == SCHED_TIME (v) + e->latency - (e->distance * ii)))
if (SCHED_TIME (v) > lower)
{
{
int v = e->dest->cuid;
- if (TEST_BIT (sched_nodes, v)
+ if (bitmap_bit_p (sched_nodes, v)
&& (up == SCHED_TIME (v) - e->latency + (e->distance * ii)))
if (SCHED_TIME (v) < upper)
{
int u = crr_insn->id;
length++;
- gcc_assert (TEST_BIT (sched_nodes, u));
+ gcc_assert (bitmap_bit_p (sched_nodes, u));
/* ??? Test also that all nodes of sched_nodes are in ps, perhaps by
popcount (sched_nodes) == number of insns in ps. */
gcc_assert (SCHED_TIME (u) >= ps->min_cycle);
if (dump_file)
fprintf (dump_file, "%d ", u);
- gcc_assert (u < num_nodes && u >= 0 && !TEST_BIT (tmp, u));
+ gcc_assert (u < num_nodes && u >= 0 && !bitmap_bit_p (tmp, u));
- SET_BIT (tmp, u);
+ bitmap_set_bit (tmp, u);
}
if (dump_file)
bitmap_clear (workset);
if ((u = find_max_asap (g, scc)) >= 0)
- SET_BIT (workset, u);
+ bitmap_set_bit (workset, u);
dir = BOTTOMUP;
}
/* Don't consider the already ordered successors again. */
bitmap_and_compl (tmp, tmp, nodes_ordered);
bitmap_ior (workset, workset, tmp);
- RESET_BIT (workset, v);
- SET_BIT (nodes_ordered, v);
+ bitmap_clear_bit (workset, v);
+ bitmap_set_bit (nodes_ordered, v);
}
dir = BOTTOMUP;
bitmap_clear (predecessors);
/* Don't consider the already ordered predecessors again. */
bitmap_and_compl (tmp, tmp, nodes_ordered);
bitmap_ior (workset, workset, tmp);
- RESET_BIT (workset, v);
- SET_BIT (nodes_ordered, v);
+ bitmap_clear_bit (workset, v);
+ bitmap_set_bit (nodes_ordered, v);
}
dir = TOPDOWN;
bitmap_clear (successors);
next_ps_i = next_ps_i->next_in_row)
{
if (must_follow
- && TEST_BIT (must_follow, next_ps_i->id)
+ && bitmap_bit_p (must_follow, next_ps_i->id)
&& ! first_must_follow)
first_must_follow = next_ps_i;
- if (must_precede && TEST_BIT (must_precede, next_ps_i->id))
+ if (must_precede && bitmap_bit_p (must_precede, next_ps_i->id))
{
/* If we have already met a node that must follow, then
there is no possible column. */
}
/* The closing branch must be the last in the row. */
if (must_precede
- && TEST_BIT (must_precede, next_ps_i->id)
+ && bitmap_bit_p (must_precede, next_ps_i->id)
&& JUMP_P (ps_rtl_insn (ps, next_ps_i->id)))
return false;
/* Check if next_in_row is dependent on ps_i, both having same sched
times (typically ANTI_DEP). If so, ps_i cannot skip over it. */
- if (must_follow && TEST_BIT (must_follow, ps_i->next_in_row->id))
+ if (must_follow && bitmap_bit_p (must_follow, ps_i->next_in_row->id))
return false;
/* Advance PS_I over its next_in_row in the doubly linked list. */
{
if (split_insn (insn))
{
- SET_BIT (blocks, bb->index);
+ bitmap_set_bit (blocks, bb->index);
changed = true;
}
}
FOR_EACH_BB (bb)
{
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
/* If a block has a single predecessor, that we've already
processed, begin with the value data that was live at
the end of the predecessor block. */
/* ??? Ought to use more intelligent queuing of blocks. */
if (single_pred_p (bb)
- && TEST_BIT (visited, single_pred (bb)->index)
+ && bitmap_bit_p (visited, single_pred (bb)->index)
&& ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
{
all_vd[bb->index] = all_vd[single_pred (bb)->index];
if (MAY_HAVE_DEBUG_INSNS)
{
FOR_EACH_BB (bb)
- if (TEST_BIT (visited, bb->index)
+ if (bitmap_bit_p (visited, bb->index)
&& all_vd[bb->index].n_debug_insn_changes)
{
unsigned int regno;
fprintf (file, "n_bits = %d, set = {", bmap->n_bits);
for (pos = 30, i = 0; i < bmap->n_bits; i++)
- if (TEST_BIT (bmap, i))
+ if (bitmap_bit_p (bmap, i))
{
if (pos > 70)
{
The following operations can be performed in O(1) time:
* set_size : SBITMAP_SIZE
- * member_p : TEST_BIT
- * add_member : SET_BIT
- * remove_member : RESET_BIT
+ * member_p : bitmap_bit_p
+ * add_member : bitmap_set_bit
+ * remove_member : bitmap_clear_bit
Most other operations on this set representation are O(U) where U is
the size of the set universe:
/* Test if bit number bitno in the bitmap is set. */
static inline SBITMAP_ELT_TYPE
-TEST_BIT (const_sbitmap map, unsigned int bitno)
+bitmap_bit_p (const_sbitmap map, int bitno)
{
size_t i = bitno / SBITMAP_ELT_BITS;
unsigned int s = bitno % SBITMAP_ELT_BITS;
/* Set bit number BITNO in the sbitmap MAP. */
static inline void
-SET_BIT (sbitmap map, unsigned int bitno)
+bitmap_set_bit (sbitmap map, int bitno)
{
gcc_checking_assert (! map->popcount);
map->elms[bitno / SBITMAP_ELT_BITS]
|= (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
}
-/* Like SET_BIT, but updates population count. */
+/* Like bitmap_set_bit, but updates population count. */
static inline void
-SET_BIT_WITH_POPCOUNT (sbitmap map, unsigned int bitno)
+bitmap_set_bit_with_popcount (sbitmap map, int bitno)
{
bool oldbit;
gcc_checking_assert (map->popcount);
- oldbit = TEST_BIT (map, bitno);
+ oldbit = bitmap_bit_p (map, bitno);
if (!oldbit)
map->popcount[bitno / SBITMAP_ELT_BITS]++;
map->elms[bitno / SBITMAP_ELT_BITS]
/* Reset bit number BITNO in the sbitmap MAP. */
static inline void
-RESET_BIT (sbitmap map, unsigned int bitno)
+bitmap_clear_bit (sbitmap map, int bitno)
{
gcc_checking_assert (! map->popcount);
map->elms[bitno / SBITMAP_ELT_BITS]
&= ~((SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS);
}
-/* Like RESET_BIT, but updates population count. */
+/* Like bitmap_clear_bit, but updates population count. */
static inline void
-RESET_BIT_WITH_POPCOUNT (sbitmap map, unsigned int bitno)
+bitmap_clear_bit_with_popcount (sbitmap map, int bitno)
{
bool oldbit;
gcc_checking_assert (map->popcount);
- oldbit = TEST_BIT (map, bitno);
+ oldbit = bitmap_bit_p (map, bitno);
if (oldbit)
map->popcount[bitno / SBITMAP_ELT_BITS]--;
map->elms[bitno / SBITMAP_ELT_BITS]
/* Is bb_src dominated by bb_trg. */
#define IS_DOMINATED(bb_src, bb_trg) \
-( TEST_BIT (dom[bb_src], bb_trg) )
+( bitmap_bit_p (dom[bb_src], bb_trg) )
/* Probability: Prob[i] is an int in [0, REG_BR_PROB_BASE] which is
the probability of bb i relative to the region entry. */
if (max_hdr[blk] == -1) \
max_hdr[blk] = hdr; \
else if (dfs_nr[max_hdr[blk]] > dfs_nr[hdr]) \
- RESET_BIT (inner, hdr); \
+ bitmap_clear_bit (inner, hdr); \
else if (dfs_nr[max_hdr[blk]] < dfs_nr[hdr]) \
{ \
- RESET_BIT (inner,max_hdr[blk]); \
+ bitmap_clear_bit (inner,max_hdr[blk]); \
max_hdr[blk] = hdr; \
} \
}
gcc_assert (node != ENTRY_BLOCK);
child = ei_edge (current_edge)->dest->index;
gcc_assert (child != EXIT_BLOCK);
- RESET_BIT (in_stack, child);
- if (max_hdr[child] >= 0 && TEST_BIT (in_stack, max_hdr[child]))
+ bitmap_clear_bit (in_stack, child);
+ if (max_hdr[child] >= 0 && bitmap_bit_p (in_stack, max_hdr[child]))
UPDATE_LOOP_RELATIONS (node, max_hdr[child]);
ei_next (¤t_edge);
}
/* Process a node. */
node = ei_edge (current_edge)->src->index;
gcc_assert (node != ENTRY_BLOCK);
- SET_BIT (in_stack, node);
+ bitmap_set_bit (in_stack, node);
dfs_nr[node] = ++count;
/* We don't traverse to the exit block. */
/* If the successor is in the stack, then we've found a loop.
Mark the loop, if it is not a natural loop, then it will
be rejected during the second traversal. */
- if (TEST_BIT (in_stack, child))
+ if (bitmap_bit_p (in_stack, child))
{
no_loops = 0;
- SET_BIT (header, child);
+ bitmap_set_bit (header, child);
UPDATE_LOOP_RELATIONS (node, child);
SET_EDGE_PASSED (current_edge);
ei_next (¤t_edge);
with a new edge. */
if (dfs_nr[child])
{
- if (max_hdr[child] >= 0 && TEST_BIT (in_stack, max_hdr[child]))
+ if (max_hdr[child] >= 0 && bitmap_bit_p (in_stack, max_hdr[child]))
UPDATE_LOOP_RELATIONS (node, max_hdr[child]);
SET_EDGE_PASSED (current_edge);
ei_next (¤t_edge);
bool extend_regions_p;
if (no_loops)
- SET_BIT (header, 0);
+ bitmap_set_bit (header, 0);
/* Second traversal:find reducible inner loops and topologically sort
block of each region. */
loops to consider at this point. */
FOR_EACH_BB (bb)
{
- if (TEST_BIT (header, bb->index) && TEST_BIT (inner, bb->index))
+ if (bitmap_bit_p (header, bb->index) && bitmap_bit_p (inner, bb->index))
{
edge e;
edge_iterator ei;
&& single_succ (jbb) == EXIT_BLOCK_PTR)
{
queue[++tail] = jbb->index;
- SET_BIT (in_queue, jbb->index);
+ bitmap_set_bit (in_queue, jbb->index);
if (too_large (jbb->index, &num_bbs, &num_insns))
{
{
/* This is a loop latch. */
queue[++tail] = node;
- SET_BIT (in_queue, node);
+ bitmap_set_bit (in_queue, node);
if (too_large (node, &num_bbs, &num_insns))
{
tail = -1;
break;
}
- else if (!TEST_BIT (in_queue, node) && node != bb->index)
+ else if (!bitmap_bit_p (in_queue, node) && node != bb->index)
{
queue[++tail] = node;
- SET_BIT (in_queue, node);
+ bitmap_set_bit (in_queue, node);
if (too_large (node, &num_bbs, &num_insns))
{
of one too_large region. */
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR)
- SET_BIT (extended_rgn_header, e->dest->index);
+ bitmap_set_bit (extended_rgn_header, e->dest->index);
}
}
}
edge_iterator ei;
int bbn = order[i];
- if (max_hdr[bbn] != -1 && !TEST_BIT (header, bbn))
+ if (max_hdr[bbn] != -1 && !bitmap_bit_p (header, bbn))
{
int hdr = -1;
{
/* If BB start its own region,
update set of headers with BB. */
- SET_BIT (header, bbn);
+ bitmap_set_bit (header, bbn);
rescan = 1;
}
else
if (IS_RGN_ENTRY (bb))
{
- SET_BIT (dom[bb], 0);
+ bitmap_set_bit (dom[bb], 0);
prob[bb] = REG_BR_PROB_BASE;
return;
}
bitmap_ior (ancestor_edges[bb],
ancestor_edges[bb], ancestor_edges[pred_bb]);
- SET_BIT (ancestor_edges[bb], EDGE_TO_BIT (in_edge));
+ bitmap_set_bit (ancestor_edges[bb], EDGE_TO_BIT (in_edge));
bitmap_ior (pot_split[bb], pot_split[bb], pot_split[pred_bb]);
FOR_EACH_EDGE (out_edge, out_ei, in_edge->src->succs)
- SET_BIT (pot_split[bb], EDGE_TO_BIT (out_edge));
+ bitmap_set_bit (pot_split[bb], EDGE_TO_BIT (out_edge));
prob[bb] += ((prob[pred_bb] * in_edge->probability) / REG_BR_PROB_BASE);
}
- SET_BIT (dom[bb], bb);
+ bitmap_set_bit (dom[bb], bb);
bitmap_and_compl (pot_split[bb], pot_split[bb], ancestor_edges[bb]);
if (sched_verbose >= 2)
block = el.first_member[j]->src;
FOR_EACH_EDGE (e, ei, block->succs)
{
- if (!TEST_BIT (visited, e->dest->index))
+ if (!bitmap_bit_p (visited, e->dest->index))
{
for (k = 0; k < el.nr_members; k++)
if (e == el.first_member[k])
if (k >= el.nr_members)
{
bblst_table[bblst_last++] = e->dest;
- SET_BIT (visited, e->dest->index);
+ bitmap_set_bit (visited, e->dest->index);
update_idx++;
}
}
#define IS_REACHABLE(bb_from, bb_to) \
(bb_from == bb_to \
|| IS_RGN_ENTRY (bb_from) \
- || (TEST_BIT (ancestor_edges[bb_to], \
+ || (bitmap_bit_p (ancestor_edges[bb_to], \
EDGE_TO_BIT (single_pred_edge (BASIC_BLOCK (BB_TO_BLOCK (bb_from)))))))
/* Turns on the fed_by_spec_load flag for insns fed by load_insn. */
{
if (! sched_insns_conditions_mutex_p (last, insn))
add_dependence (last, insn, REG_DEP_ANTI);
- SET_BIT (insn_referenced, INSN_LUID (insn));
+ bitmap_set_bit (insn_referenced, INSN_LUID (insn));
}
CANT_MOVE (insn) = 1;
{
insn = prev_nonnote_insn (insn);
- if (TEST_BIT (insn_referenced, INSN_LUID (insn))
+ if (bitmap_bit_p (insn_referenced, INSN_LUID (insn))
|| DEBUG_INSN_P (insn))
continue;
new_rgn_number = sel_create_new_region ();
sel_add_block_to_region (preheader_block, &bb_ord_index, new_rgn_number);
- SET_BIT (bbs_in_loop_rgns, preheader_block->index);
+ bitmap_set_bit (bbs_in_loop_rgns, preheader_block->index);
for (i = 0; i < loop->num_nodes; i++)
{
gcc_assert (new_rgn_number >= 0);
- if (! TEST_BIT (bbs_in_loop_rgns, loop_blocks[i]->index))
+ if (! bitmap_bit_p (bbs_in_loop_rgns, loop_blocks[i]->index))
{
sel_add_block_to_region (loop_blocks[i], &bb_ord_index,
new_rgn_number);
- SET_BIT (bbs_in_loop_rgns, loop_blocks[i]->index);
+ bitmap_set_bit (bbs_in_loop_rgns, loop_blocks[i]->index);
}
}
/* Traverse all inner nodes of the loop. */
for (cur_loop = loop->inner; cur_loop; cur_loop = cur_loop->next)
- if (! TEST_BIT (bbs_in_loop_rgns, cur_loop->header->index))
+ if (! bitmap_bit_p (bbs_in_loop_rgns, cur_loop->header->index))
return false;
/* At this moment all regular inner loops should have been pipelined.
{
degree[bb->index] = 0;
- if (!TEST_BIT (bbs_in_loop_rgns, bb->index))
+ if (!bitmap_bit_p (bbs_in_loop_rgns, bb->index))
{
FOR_EACH_EDGE (e, ei, bb->preds)
- if (!TEST_BIT (bbs_in_loop_rgns, e->src->index))
+ if (!bitmap_bit_p (bbs_in_loop_rgns, e->src->index))
degree[bb->index]++;
}
else
insn_t succ_insn;
succ_iterator si;
- SET_BIT (visited_bbs, bbi);
+ bitmap_set_bit (visited_bbs, bbi);
if (blocks_to_reschedule)
bitmap_clear_bit (blocks_to_reschedule, bb->index);
gcc_assert (in_current_region_p (succ));
- if (!TEST_BIT (visited_bbs, succ_bbi))
+ if (!bitmap_bit_p (visited_bbs, succ_bbi))
{
gcc_assert (succ_bbi > bbi);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_reschedule, 0, bbi, bi)
{
gcc_assert (BLOCK_TO_BB (bbi) < current_nr_blocks);
- RESET_BIT (visited_bbs, BLOCK_TO_BB (bbi));
+ bitmap_clear_bit (visited_bbs, BLOCK_TO_BB (bbi));
}
}
else
int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
gcc_assert (index != EDGE_INDEX_NO_EDGE);
- if (! TEST_BIT (st_insert_map[index], expr->index))
+ if (! bitmap_bit_p (st_insert_map[index], expr->index))
break;
}
FOR_EACH_EDGE (tmp, ei, e->dest->preds)
{
int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
- RESET_BIT (st_insert_map[index], expr->index);
+ bitmap_clear_bit (st_insert_map[index], expr->index);
}
insert_insn_start_basic_block (insn, bb);
return 0;
bb = act->dest;
if (bb == EXIT_BLOCK_PTR
- || TEST_BIT (visited, bb->index))
+ || bitmap_bit_p (visited, bb->index))
{
if (!ei_end_p (ei))
ei_next (&ei);
act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
continue;
}
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
- if (TEST_BIT (st_antloc[bb->index], smexpr->index))
+ if (bitmap_bit_p (st_antloc[bb->index], smexpr->index))
{
for (last = smexpr->antic_stores;
BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
we can delete this one (It occurs earlier in the block). We'll
copy the SRC expression to an unused register in case there
are any side effects. */
- if (TEST_BIT (st_avloc[bb->index], ptr->index))
+ if (bitmap_bit_p (st_avloc[bb->index], ptr->index))
{
rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
if (dump_file)
replace_store_insn (r, XEXP (st, 0), bb, ptr);
continue;
}
- SET_BIT (st_avloc[bb->index], ptr->index);
+ bitmap_set_bit (st_avloc[bb->index], ptr->index);
}
for (st = ptr->antic_stores; st != NULL; st = XEXP (st, 1))
{
insn = XEXP (st, 0);
bb = BLOCK_FOR_INSN (insn);
- SET_BIT (st_antloc[bb->index], ptr->index);
+ bitmap_set_bit (st_antloc[bb->index], ptr->index);
}
}
{
/* It should not be necessary to consider the expression
killed if it is both anticipatable and available. */
- if (!TEST_BIT (st_antloc[bb->index], ptr->index)
- || !TEST_BIT (st_avloc[bb->index], ptr->index))
- SET_BIT (st_kill[bb->index], ptr->index);
+ if (!bitmap_bit_p (st_antloc[bb->index], ptr->index)
+ || !bitmap_bit_p (st_avloc[bb->index], ptr->index))
+ bitmap_set_bit (st_kill[bb->index], ptr->index);
}
else
- SET_BIT (st_transp[bb->index], ptr->index);
+ bitmap_set_bit (st_transp[bb->index], ptr->index);
}
}
/* If any of the edges we have above are abnormal, we can't move this
store. */
for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
- if (TEST_BIT (st_insert_map[x], ptr->index)
+ if (bitmap_bit_p (st_insert_map[x], ptr->index)
&& (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
break;
/* Now we want to insert the new stores which are going to be needed. */
FOR_EACH_BB (bb)
- if (TEST_BIT (st_delete_map[bb->index], ptr->index))
+ if (bitmap_bit_p (st_delete_map[bb->index], ptr->index))
{
delete_store (ptr, bb);
n_stores_deleted++;
}
for (x = 0; x < NUM_EDGES (edge_list); x++)
- if (TEST_BIT (st_insert_map[x], ptr->index))
+ if (bitmap_bit_p (st_insert_map[x], ptr->index))
{
did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
n_stores_created++;
if ((unsigned int)bb->index >= size)
bb_seen = sbitmap_resize (bb_seen, size * 2, 0);
- SET_BIT (bb_seen, bb->index);
+ bitmap_set_bit (bb_seen, bb->index);
}
static inline bool
bb_seen_p (basic_block bb)
{
- return TEST_BIT (bb_seen, bb->index);
+ return bitmap_bit_p (bb_seen, bb->index);
}
/* Return true if we should ignore the basic block for purposes of tracing. */
if (! blocks)
check_last_block = true;
else
- check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
+ check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
/* In the last basic block, before epilogue generation, there will be
a fallthru edge to EXIT. Special care is required if the last insn
if (!bb)
continue;
- if (blocks && !TEST_BIT (blocks, i))
+ if (blocks && !bitmap_bit_p (blocks, i))
continue;
gsi = gsi_last_nondebug_bb (bb);
/* Negative LP numbers are MUST_NOT_THROW regions which
are not considered BB enders. */
if (lp_nr < 0)
- SET_BIT (r_reachable, -lp_nr);
+ bitmap_set_bit (r_reachable, -lp_nr);
/* Positive LP numbers are real landing pads, are are BB enders. */
else if (lp_nr > 0)
{
gcc_assert (gsi_one_before_end_p (gsi));
region = get_eh_region_from_lp_number (lp_nr);
- SET_BIT (r_reachable, region->index);
- SET_BIT (lp_reachable, lp_nr);
+ bitmap_set_bit (r_reachable, region->index);
+ bitmap_set_bit (lp_reachable, lp_nr);
}
/* Avoid removing regions referenced from RESX/EH_DISPATCH. */
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
- SET_BIT (r_reachable, gimple_resx_region (stmt));
+ bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
break;
case GIMPLE_EH_DISPATCH:
- SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
+ bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
break;
default:
break;
for (r_nr = 1;
VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
- if (region && !TEST_BIT (r_reachable, r_nr))
+ if (region && !bitmap_bit_p (r_reachable, r_nr))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
for (lp_nr = 1;
VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
- if (lp && !TEST_BIT (lp_reachable, lp_nr))
+ if (lp && !bitmap_bit_p (lp_reachable, lp_nr))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
- SET_BIT (r_reachable, gimple_resx_region (stmt));
+ bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
break;
case GIMPLE_EH_DISPATCH:
- SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
+ bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
break;
default:
break;
for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
- && !TEST_BIT (r_reachable, i))
+ && !bitmap_bit_p (r_reachable, i))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable region %d\n", i);
if (!new_ssa_names)
return false;
return (ver < SBITMAP_SIZE (new_ssa_names)
- && TEST_BIT (old_ssa_names, ver));
+ && bitmap_bit_p (old_ssa_names, ver));
}
if (!new_ssa_names)
return false;
return (ver < SBITMAP_SIZE (new_ssa_names)
- && TEST_BIT (new_ssa_names, ver));
+ && bitmap_bit_p (new_ssa_names, ver));
}
/* Register NEW_TREE and OLD in NEW_SSA_NAMES and OLD_SSA_NAMES,
respectively. */
- SET_BIT (new_ssa_names, SSA_NAME_VERSION (new_tree));
- SET_BIT (old_ssa_names, SSA_NAME_VERSION (old));
+ bitmap_set_bit (new_ssa_names, SSA_NAME_VERSION (new_tree));
+ bitmap_set_bit (old_ssa_names, SSA_NAME_VERSION (old));
}
set_rewrite_uses (stmt, true);
}
if (rewrite_uses_p (stmt))
- SET_BIT (interesting_blocks, bb->index);
+ bitmap_set_bit (interesting_blocks, bb->index);
return;
}
/* If we found the statement interesting then also mark the block BB
as interesting. */
if (rewrite_uses_p (stmt) || register_defs_p (stmt))
- SET_BIT (interesting_blocks, bb->index);
+ bitmap_set_bit (interesting_blocks, bb->index);
}
/* Structure used by prune_unused_phi_nodes to record bounds of the intervals
/* Step 2. Rewrite every variable used in each statement in the block
with its immediate reaching definitions. Update the current definition
of a variable when a new real or virtual definition is found. */
- if (TEST_BIT (interesting_blocks, bb->index))
+ if (bitmap_bit_p (interesting_blocks, bb->index))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
rewrite_stmt (&gsi);
}
/* Step 2. Rewrite every variable used in each statement in the block. */
- if (TEST_BIT (interesting_blocks, bb->index))
+ if (bitmap_bit_p (interesting_blocks, bb->index))
{
gcc_checking_assert (bitmap_bit_p (blocks_to_update, bb->index));
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
want to replace existing instances. */
if (names_to_release)
EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
- RESET_BIT (new_ssa_names, i);
+ bitmap_clear_bit (new_ssa_names, i);
/* First process names in NEW_SSA_NAMES. Otherwise, uses of old
names may be considered to be live-in on blocks that contain
interesting_blocks = sbitmap_alloc (last_basic_block);
bitmap_clear (interesting_blocks);
EXECUTE_IF_SET_IN_BITMAP (blocks_to_update, 0, i, bi)
- SET_BIT (interesting_blocks, i);
+ bitmap_set_bit (interesting_blocks, i);
rewrite_blocks (start_bb, REWRITE_UPDATE);
int S;
source_location locus;
- SET_BIT (g->visited, T);
+ bitmap_set_bit (g->visited, T);
FOR_EACH_ELIM_GRAPH_SUCC (g, T, S, locus,
{
- if (!TEST_BIT (g->visited, S))
+ if (!bitmap_bit_p (g->visited, S))
elim_forward (g, S);
});
VEC_safe_push (int, heap, g->stack, T);
FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
{
- if (!TEST_BIT (g->visited, P))
+ if (!bitmap_bit_p (g->visited, P))
return 1;
});
return 0;
int P;
source_location locus;
- SET_BIT (g->visited, T);
+ bitmap_set_bit (g->visited, T);
FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
{
- if (!TEST_BIT (g->visited, P))
+ if (!bitmap_bit_p (g->visited, P))
{
elim_backward (g, P);
insert_partition_copy_on_edge (g->e, P, T, locus);
insert_part_to_rtx_on_edge (g->e, U, T, UNKNOWN_LOCATION);
FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
{
- if (!TEST_BIT (g->visited, P))
+ if (!bitmap_bit_p (g->visited, P))
{
elim_backward (g, P);
insert_rtx_to_part_on_edge (g->e, P, U, unsignedsrcp, locus);
S = elim_graph_remove_succ_edge (g, T, &locus);
if (S != -1)
{
- SET_BIT (g->visited, T);
+ bitmap_set_bit (g->visited, T);
insert_partition_copy_on_edge (g->e, T, S, locus);
}
}
FOR_EACH_VEC_ELT (int, g->nodes, x, part)
{
- if (!TEST_BIT (g->visited, part))
+ if (!bitmap_bit_p (g->visited, part))
elim_forward (g, part);
}
while (VEC_length (int, g->stack) > 0)
{
x = VEC_pop (int, g->stack);
- if (!TEST_BIT (g->visited, x))
+ if (!bitmap_bit_p (g->visited, x))
elim_create (g, x);
}
}
if (add_to_worklist)
VEC_safe_push (gimple, heap, worklist, stmt);
if (bb_contains_live_stmts && !is_gimple_debug (stmt))
- SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
+ bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
}
gcc_assert (op);
ver = SSA_NAME_VERSION (op);
- if (TEST_BIT (processed, ver))
+ if (bitmap_bit_p (processed, ver))
{
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (gimple_nop_p (stmt)
|| gimple_plf (stmt, STMT_NECESSARY));
return;
}
- SET_BIT (processed, ver);
+ bitmap_set_bit (processed, ver);
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (stmt);
gimple_set_plf (stmt, STMT_NECESSARY, true);
if (bb_contains_live_stmts)
- SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
+ bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
VEC_safe_push (gimple, heap, worklist, stmt);
}
{
gimple stmt = last_stmt (bb);
- SET_BIT (last_stmt_necessary, bb->index);
- SET_BIT (bb_contains_live_stmts, bb->index);
+ bitmap_set_bit (last_stmt_necessary, bb->index);
+ bitmap_set_bit (bb_contains_live_stmts, bb->index);
/* We actually mark the statement only if it is a control statement. */
if (stmt && is_ctrl_stmt (stmt))
continue;
}
- if (!TEST_BIT (last_stmt_necessary, cd_bb->index))
+ if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
mark_last_stmt_necessary (cd_bb);
}
if (!skipped)
- SET_BIT (visited_control_parents, bb->index);
+ bitmap_set_bit (visited_control_parents, bb->index);
}
/* We have to skip already visited (and thus necessary) statements
to make the chaining work after we dropped back to simple mode. */
if (chain_ovfl
- && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
+ && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
{
gcc_assert (gimple_nop_p (def_stmt)
|| gimple_plf (def_stmt, STMT_NECESSARY));
already done so. */
basic_block bb = gimple_bb (stmt);
if (bb != ENTRY_BLOCK_PTR
- && !TEST_BIT (visited_control_parents, bb->index))
+ && !bitmap_bit_p (visited_control_parents, bb->index))
mark_control_dependent_edges_necessary (bb, el, false);
}
if (gimple_bb (stmt)
!= get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
{
- if (!TEST_BIT (last_stmt_necessary, arg_bb->index))
+ if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
mark_last_stmt_necessary (arg_bb);
}
else if (arg_bb != ENTRY_BLOCK_PTR
- && !TEST_BIT (visited_control_parents,
+ && !bitmap_bit_p (visited_control_parents,
arg_bb->index))
mark_control_dependent_edges_necessary (arg_bb, el, true);
}
call (); saving one operand. */
if (name
&& TREE_CODE (name) == SSA_NAME
- && !TEST_BIT (processed, SSA_NAME_VERSION (name))
+ && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
/* Avoid doing so for allocation calls which we
did not mark as necessary, it will confuse the
special logic we apply to malloc/free pair removal. */
{
prev_bb = bb->prev_bb;
- if (!TEST_BIT (bb_contains_live_stmts, bb->index)
+ if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
|| !(bb->flags & BB_REACHABLE))
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
edge_iterator ei;
basic_block pred_bb;
bitmap loe;
- gcc_assert (!TEST_BIT (visited, bb->index));
+ gcc_assert (!bitmap_bit_p (visited, bb->index));
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
loe = live_on_entry (live, bb);
FOR_EACH_EDGE (e, ei, bb->preds)
changes, and pred_bb has been visited already, add it to the
revisit stack. */
change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
- if (TEST_BIT (visited, pred_bb->index) && change)
+ if (bitmap_bit_p (visited, pred_bb->index) && change)
{
- RESET_BIT (visited, pred_bb->index);
+ bitmap_clear_bit (visited, pred_bb->index);
*(live->stack_top)++ = pred_bb->index;
}
}
if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
last = bb;
- if (TEST_BIT (contains_call, bb->index))
+ if (bitmap_bit_p (contains_call, bb->index))
break;
FOR_EACH_EDGE (e, ei, bb->succs)
}
if (!gsi_end_p (bsi))
- SET_BIT (contains_call, bb->index);
+ bitmap_set_bit (contains_call, bb->index);
}
for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
initialize_original_copy_tables ();
wont_exit = sbitmap_alloc (n_unroll + 1);
bitmap_ones (wont_exit);
- RESET_BIT (wont_exit, 0);
+ bitmap_clear_bit (wont_exit, 0);
if (!gimple_duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
n_unroll, wont_exit,
{
XEXP (addr, 1) = gen_int_mode (i, address_mode);
if (memory_address_addr_space_p (mode, addr, as))
- SET_BIT (valid_mult, i + MAX_RATIO);
+ bitmap_set_bit (valid_mult, i + MAX_RATIO);
}
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " allowed multipliers:");
for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
- if (TEST_BIT (valid_mult, i + MAX_RATIO))
+ if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
fprintf (dump_file, " %d", (int) i);
fprintf (dump_file, "\n");
fprintf (dump_file, "\n");
if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
return false;
- return TEST_BIT (valid_mult, ratio + MAX_RATIO);
+ return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
}
/* Returns cost of address in shape symbol + var + OFFSET + RATIO * index.
last one. */
wont_exit = sbitmap_alloc (factor);
bitmap_ones (wont_exit);
- RESET_BIT (wont_exit, factor - 1);
+ bitmap_clear_bit (wont_exit, factor - 1);
ok = gimple_duplicate_loop_to_header_edge
(loop, loop_latch_edge (loop), factor - 1,
unsigned np, i;
sbitmap visited = sbitmap_alloc (last_basic_block);
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
+#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
+#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
bitmap_clear (visited);
{
if (!BB_VISITED (phiblock))
{
- SET_BIT (changed_blocks, block->index);
+ bitmap_set_bit (changed_blocks, block->index);
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 1;
return false;
/* Of multiple successors we have to have visited one already. */
if (!first)
{
- SET_BIT (changed_blocks, block->index);
+ bitmap_set_bit (changed_blocks, block->index);
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 1;
changed = true;
if (!bitmap_set_equal (old, ANTIC_IN (block)))
{
changed = true;
- SET_BIT (changed_blocks, block->index);
+ bitmap_set_bit (changed_blocks, block->index);
FOR_EACH_EDGE (e, ei, block->preds)
- SET_BIT (changed_blocks, e->src->index);
+ bitmap_set_bit (changed_blocks, e->src->index);
}
else
- RESET_BIT (changed_blocks, block->index);
+ bitmap_clear_bit (changed_blocks, block->index);
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
{
changed = true;
- SET_BIT (changed_blocks, block->index);
+ bitmap_set_bit (changed_blocks, block->index);
FOR_EACH_EDGE (e, ei, block->preds)
- SET_BIT (changed_blocks, e->src->index);
+ bitmap_set_bit (changed_blocks, e->src->index);
}
else
- RESET_BIT (changed_blocks, block->index);
+ bitmap_clear_bit (changed_blocks, block->index);
maybe_dump_sets:
if (dump_file && (dump_flags & TDF_DETAILS))
e->flags &= ~EDGE_DFS_BACK;
if (e->flags & EDGE_ABNORMAL)
{
- SET_BIT (has_abnormal_preds, block->index);
+ bitmap_set_bit (has_abnormal_preds, block->index);
break;
}
}
changed = false;
for (i = postorder_num - 1; i >= 0; i--)
{
- if (TEST_BIT (changed_blocks, postorder[i]))
+ if (bitmap_bit_p (changed_blocks, postorder[i]))
{
basic_block block = BASIC_BLOCK (postorder[i]);
changed |= compute_antic_aux (block,
- TEST_BIT (has_abnormal_preds,
+ bitmap_bit_p (has_abnormal_preds,
block->index));
}
}
changed = false;
for (i = postorder_num - 1 ; i >= 0; i--)
{
- if (TEST_BIT (changed_blocks, postorder[i]))
+ if (bitmap_bit_p (changed_blocks, postorder[i]))
{
basic_block block = BASIC_BLOCK (postorder[i]);
changed
|= compute_partial_antic_aux (block,
- TEST_BIT (has_abnormal_preds,
+ bitmap_bit_p (has_abnormal_preds,
block->index));
}
}
bool head = false;
gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
- gcc_assert (!TEST_BIT (bb_in_list, bb->index));
+ gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
if (cfg_blocks_empty_p ())
{
VEC_replace (basic_block, cfg_blocks,
head ? cfg_blocks_head : cfg_blocks_tail,
bb);
- SET_BIT (bb_in_list, bb->index);
+ bitmap_set_bit (bb_in_list, bb->index);
}
cfg_blocks_head = ((cfg_blocks_head + 1)
% VEC_length (basic_block, cfg_blocks));
--cfg_blocks_num;
- RESET_BIT (bb_in_list, bb->index);
+ bitmap_clear_bit (bb_in_list, bb->index);
return bb;
}
e->flags |= EDGE_EXECUTABLE;
/* If the block is already in the list, we're done. */
- if (TEST_BIT (bb_in_list, bb->index))
+ if (bitmap_bit_p (bb_in_list, bb->index))
return;
cfg_blocks_add (bb);
the destination block is executable. Otherwise, visit the
statement only if its block is marked executable. */
if (gimple_code (stmt) == GIMPLE_PHI
- || TEST_BIT (executable_blocks, bb->index))
+ || bitmap_bit_p (executable_blocks, bb->index))
simulate_stmt (stmt);
}
}
/* If this is the first time we've simulated this block, then we
must simulate each of its statements. */
- if (!TEST_BIT (executable_blocks, block->index))
+ if (!bitmap_bit_p (executable_blocks, block->index))
{
gimple_stmt_iterator j;
unsigned int normal_edge_count;
edge_iterator ei;
/* Note that we have simulated this block. */
- SET_BIT (executable_blocks, block->index);
+ bitmap_set_bit (executable_blocks, block->index);
for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
{
|| !is_reassociable_op (oe1def, dcode, loop))
continue;
- SET_BIT (candidates, i);
+ bitmap_set_bit (candidates, i);
nr_candidates++;
}
{
if (oe1->op == c->op)
{
- SET_BIT (candidates2, i);
+ bitmap_set_bit (candidates2, i);
++nr_candidates2;
break;
}
for (j = 0; j < FIRST_REF_NODE; j++)
{
if (!get_varinfo (j)->is_special_var)
- SET_BIT (graph->direct_nodes, j);
+ bitmap_set_bit (graph->direct_nodes, j);
}
for (j = 0; j < graph->size; j++)
if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
else
- RESET_BIT (graph->direct_nodes, lhsvar);
+ bitmap_clear_bit (graph->direct_nodes, lhsvar);
}
else if (rhs.type == ADDRESSOF)
{
add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
/* All related variables are no longer direct nodes. */
- RESET_BIT (graph->direct_nodes, rhsvar);
+ bitmap_clear_bit (graph->direct_nodes, rhsvar);
v = get_varinfo (rhsvar);
if (!v->is_full_var)
{
v = lookup_vi_for_tree (v->decl);
do
{
- RESET_BIT (graph->direct_nodes, v->id);
+ bitmap_clear_bit (graph->direct_nodes, v->id);
v = v->next;
}
while (v != NULL);
else if (lhs.offset != 0 || rhs.offset != 0)
{
if (rhs.offset != 0)
- RESET_BIT (graph->direct_nodes, lhs.var);
+ bitmap_clear_bit (graph->direct_nodes, lhs.var);
else if (lhs.offset != 0)
- RESET_BIT (graph->direct_nodes, rhs.var);
+ bitmap_clear_bit (graph->direct_nodes, rhs.var);
}
}
}
t = find (storedanything_id);
for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
{
- if (!TEST_BIT (graph->direct_nodes, i)
+ if (!bitmap_bit_p (graph->direct_nodes, i)
&& get_varinfo (i)->may_have_pointers)
add_graph_edge (graph, find (i), t);
}
bitmap_iterator bi;
unsigned int my_dfs;
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
break;
w = find (i);
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
scc_visit (graph, si, w);
{
unsigned int t = find (w);
}
}
}
- SET_BIT (si->deleted, n);
+ bitmap_set_bit (si->deleted, n);
}
else
VEC_safe_push (unsigned, heap, si->scc_stack, n);
bitmap_iterator bi;
unsigned int j;
- SET_BIT (ti->visited, n);
+ bitmap_set_bit (ti->visited, n);
if (graph->succs[n])
EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
{
- if (!TEST_BIT (ti->visited, j))
+ if (!bitmap_bit_p (ti->visited, j))
topo_visit (graph, ti, j);
}
struct scc_info *si = init_scc_info (size);
for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
- if (!TEST_BIT (si->visited, i) && find (i) == i)
+ if (!bitmap_bit_p (si->visited, i) && find (i) == i)
scc_visit (graph, si, i);
free_scc_info (si);
unsigned int size = graph->size;
for (i = 0; i != size; ++i)
- if (!TEST_BIT (ti->visited, i) && find (i) == i)
+ if (!bitmap_bit_p (ti->visited, i) && find (i) == i)
topo_visit (graph, ti, i);
}
unsigned int my_dfs;
gcc_assert (si->node_mapping[n] == n);
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
si->dfs[n] = si->current_index ++;
my_dfs = si->dfs[n];
{
unsigned int w = si->node_mapping[i];
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
{
unsigned int w = si->node_mapping[i];
- if (TEST_BIT (si->deleted, w))
+ if (bitmap_bit_p (si->deleted, w))
continue;
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
condense_visit (graph, si, w);
{
unsigned int t = si->node_mapping[w];
unsigned int w = VEC_pop (unsigned, si->scc_stack);
si->node_mapping[w] = n;
- if (!TEST_BIT (graph->direct_nodes, w))
- RESET_BIT (graph->direct_nodes, n);
+ if (!bitmap_bit_p (graph->direct_nodes, w))
+ bitmap_clear_bit (graph->direct_nodes, n);
/* Unify our nodes. */
if (graph->preds[w])
graph->points_to[w]);
}
}
- SET_BIT (si->deleted, n);
+ bitmap_set_bit (si->deleted, n);
}
else
VEC_safe_push (unsigned, heap, si->scc_stack, n);
{
unsigned int i;
bitmap_iterator bi;
- SET_BIT (si->visited, n);
+ bitmap_set_bit (si->visited, n);
if (!graph->points_to[n])
graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
{
unsigned int w = si->node_mapping[i];
- if (!TEST_BIT (si->visited, w))
+ if (!bitmap_bit_p (si->visited, w))
label_visit (graph, si, w);
/* Skip unused edges */
bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
}
/* Indirect nodes get fresh variables. */
- if (!TEST_BIT (graph->direct_nodes, n))
+ if (!bitmap_bit_p (graph->direct_nodes, n))
bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
if (!bitmap_empty_p (graph->points_to[n]))
/* Condense the nodes, which means to find SCC's, count incoming
predecessors, and unite nodes in SCC's. */
for (i = 0; i < FIRST_REF_NODE; i++)
- if (!TEST_BIT (si->visited, si->node_mapping[i]))
+ if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
condense_visit (graph, si, si->node_mapping[i]);
bitmap_clear (si->visited);
/* Actually the label the nodes for pointer equivalences */
for (i = 0; i < FIRST_REF_NODE; i++)
- if (!TEST_BIT (si->visited, si->node_mapping[i]))
+ if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
label_visit (graph, si, si->node_mapping[i]);
/* Calculate location equivalence labels. */
if (dump_file && (dump_flags & TDF_DETAILS))
for (i = 0; i < FIRST_REF_NODE; i++)
{
- bool direct_node = TEST_BIT (graph->direct_nodes, i);
+ bool direct_node = bitmap_bit_p (graph->direct_nodes, i);
fprintf (dump_file,
"Equivalence classes for %s node id %d:%s are pointer: %d"
", location:%d\n",
gcc_assert (src != ENTRY_BLOCK_PTR);
- if (! TEST_BIT (visited, src->index))
+ if (! bitmap_bit_p (visited, src->index))
{
- SET_BIT (visited, src->index);
+ bitmap_set_bit (visited, src->index);
FOR_EACH_EDGE (e, ei, src->preds)
VEC_safe_push (edge, heap, stack, e);
}
for (k = 0; k < group_size; k++)
{
first_group_load_index = VEC_index (int, load_permutation, k);
- if (TEST_BIT (load_index, first_group_load_index))
+ if (bitmap_bit_p (load_index, first_group_load_index))
{
bad_permutation = true;
break;
}
- SET_BIT (load_index, first_group_load_index);
+ bitmap_set_bit (load_index, first_group_load_index);
}
if (!bad_permutation)
for (k = 0; k < group_size; k++)
- if (!TEST_BIT (load_index, k))
+ if (!bitmap_bit_p (load_index, k))
{
bad_permutation = true;
break;
prev = next;
}
- if (TEST_BIT (load_index, prev))
+ if (bitmap_bit_p (load_index, prev))
{
supported = false;
break;
}
- SET_BIT (load_index, prev);
+ bitmap_set_bit (load_index, prev);
}
for (j = 0; j < group_size; j++)
- if (!TEST_BIT (load_index, j))
+ if (!bitmap_bit_p (load_index, j))
return false;
sbitmap_free (load_index);
live_on_edge (edge e, tree name)
{
return (live[e->dest->index]
- && TEST_BIT (live[e->dest->index], SSA_NAME_VERSION (name)));
+ && bitmap_bit_p (live[e->dest->index], SSA_NAME_VERSION (name)));
}
/* Local functions. */
/* If op is not live beyond this stmt, do not bother to insert
asserts for it. */
- if (!TEST_BIT (live, SSA_NAME_VERSION (op)))
+ if (!bitmap_bit_p (live, SSA_NAME_VERSION (op)))
continue;
/* If OP is used in such a way that we can infer a value
/* Update live. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
- SET_BIT (live, SSA_NAME_VERSION (op));
+ bitmap_set_bit (live, SSA_NAME_VERSION (op));
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_DEF)
- RESET_BIT (live, SSA_NAME_VERSION (op));
+ bitmap_clear_bit (live, SSA_NAME_VERSION (op));
}
/* Traverse all PHI nodes in BB, updating live. */
{
tree arg = USE_FROM_PTR (arg_p);
if (TREE_CODE (arg) == SSA_NAME)
- SET_BIT (live, SSA_NAME_VERSION (arg));
+ bitmap_set_bit (live, SSA_NAME_VERSION (arg));
}
- RESET_BIT (live, SSA_NAME_VERSION (res));
+ bitmap_clear_bit (live, SSA_NAME_VERSION (res));
}
return need_assert;
while (!fibheap_empty (worklist))
{
bb = (basic_block) fibheap_extract_min (worklist);
- RESET_BIT (in_worklist, bb->index);
- gcc_assert (!TEST_BIT (visited, bb->index));
- if (!TEST_BIT (visited, bb->index))
+ bitmap_clear_bit (in_worklist, bb->index);
+ gcc_assert (!bitmap_bit_p (visited, bb->index));
+ if (!bitmap_bit_p (visited, bb->index))
{
bool changed;
edge_iterator ei;
int oldinsz, oldoutsz;
- SET_BIT (visited, bb->index);
+ bitmap_set_bit (visited, bb->index);
if (VTI (bb)->in.vars)
{
if (e->dest == EXIT_BLOCK_PTR)
continue;
- if (TEST_BIT (visited, e->dest->index))
+ if (bitmap_bit_p (visited, e->dest->index))
{
- if (!TEST_BIT (in_pending, e->dest->index))
+ if (!bitmap_bit_p (in_pending, e->dest->index))
{
/* Send E->DEST to next round. */
- SET_BIT (in_pending, e->dest->index);
+ bitmap_set_bit (in_pending, e->dest->index);
fibheap_insert (pending,
bb_order[e->dest->index],
e->dest);
}
}
- else if (!TEST_BIT (in_worklist, e->dest->index))
+ else if (!bitmap_bit_p (in_worklist, e->dest->index))
{
/* Add E->DEST to current round. */
- SET_BIT (in_worklist, e->dest->index);
+ bitmap_set_bit (in_worklist, e->dest->index);
fibheap_insert (worklist, bb_order[e->dest->index],
e->dest);
}