/* Basic block reordering routines for the GNU compiler.
- Copyright (C) 2000, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2000, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
This file is part of GCC.
/* Which trace is the bb in? */
int in_trace;
+ /* Which trace was this bb visited in? */
+ int visited;
+
/* Which heap is BB in (if any)? */
fibheap_t heap;
static bool better_edge_p (const_basic_block, const_edge, int, int, int, int, const_edge);
static void connect_traces (int, struct trace *);
static bool copy_bb_p (const_basic_block, int);
-static int get_uncond_jump_length (void);
static bool push_to_next_round_p (const_basic_block, int, int, int, gcov_type);
\f
+/* Return the trace number in which BB was visited. */
+
+static int
+bb_visited_trace (const_basic_block bb)
+{
+ gcc_assert (bb->index < array_size);
+ return bbd[bb->index].visited;
+}
+
+/* This function marks BB that it was visited in trace number TRACE. */
+
+static void
+mark_bb_visited (basic_block bb, int trace)
+{
+ bbd[bb->index].visited = trace;
+ if (bbd[bb->index].heap)
+ {
+ fibheap_delete_node (bbd[bb->index].heap, bbd[bb->index].node);
+ bbd[bb->index].heap = NULL;
+ bbd[bb->index].node = NULL;
+ }
+}
+
/* Check to see if bb should be pushed into the next round of trace
collections or not. Reasons for pushing the block forward are 1).
If the block is cold, we are doing partitioning, and there will be
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR
- && e->dest->il.rtl->visited != trace_n
+ && bb_visited_trace (e->dest) != trace_n
&& (e->flags & EDGE_CAN_FALLTHRU)
&& !(e->flags & EDGE_COMPLEX))
{
if (is_preferred)
{
/* The best edge is preferred. */
- if (!e->dest->il.rtl->visited
+ if (!bb_visited_trace (e->dest)
|| bbd[e->dest->index].start_of_trace >= 0)
{
/* The current edge E is also preferred. */
}
else
{
- if (!e->dest->il.rtl->visited
+ if (!bb_visited_trace (e->dest)
|| bbd[e->dest->index].start_of_trace >= 0)
{
/* The current edge E is preferred. */
return best_bb;
}
-/* This function marks BB that it was visited in trace number TRACE. */
-
-static void
-mark_bb_visited (basic_block bb, int trace)
-{
- bb->il.rtl->visited = trace;
- if (bbd[bb->index].heap)
- {
- fibheap_delete_node (bbd[bb->index].heap, bbd[bb->index].node);
- bbd[bb->index].heap = NULL;
- bbd[bb->index].node = NULL;
- }
-}
-
/* One round of finding traces. Find traces for BRANCH_TH and EXEC_TH i.e. do
not include basic blocks their probability is lower than BRANCH_TH or their
frequency is lower than EXEC_TH into traces (or count is lower than
if (e->dest == EXIT_BLOCK_PTR)
continue;
- if (e->dest->il.rtl->visited
- && e->dest->il.rtl->visited != *n_traces)
+ if (bb_visited_trace (e->dest)
+ && bb_visited_trace (e->dest) != *n_traces)
continue;
if (BB_PARTITION (e->dest) != BB_PARTITION (bb))
{
if (e == best_edge
|| e->dest == EXIT_BLOCK_PTR
- || e->dest->il.rtl->visited)
+ || bb_visited_trace (e->dest))
continue;
key = bb_to_key (e->dest);
if (best_edge) /* Suitable successor was found. */
{
- if (best_edge->dest->il.rtl->visited == *n_traces)
+ if (bb_visited_trace (best_edge->dest) == *n_traces)
{
/* We do nothing with one basic block loops. */
if (best_edge->dest != bb)
if (e != best_edge
&& (e->flags & EDGE_CAN_FALLTHRU)
&& !(e->flags & EDGE_COMPLEX)
- && !e->dest->il.rtl->visited
+ && !bb_visited_trace (e->dest)
&& single_pred_p (e->dest)
&& !(e->flags & EDGE_CROSSING)
&& single_succ_p (e->dest)
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (e->dest == EXIT_BLOCK_PTR
- || e->dest->il.rtl->visited)
+ || bb_visited_trace (e->dest))
continue;
if (bbd[e->dest->index].heap)
BB_COPY_PARTITION (new_bb, old_bb);
gcc_assert (e->dest == new_bb);
- gcc_assert (!e->dest->il.rtl->visited);
if (dump_file)
fprintf (dump_file,
"Duplicated bb %d (created bb %d)\n",
old_bb->index, new_bb->index);
- new_bb->il.rtl->visited = trace;
- new_bb->aux = bb->aux;
- bb->aux = new_bb;
if (new_bb->index >= array_size || last_basic_block > array_size)
{
for (i = array_size; i < new_size; i++)
{
bbd[i].start_of_trace = -1;
- bbd[i].in_trace = -1;
bbd[i].end_of_trace = -1;
+ bbd[i].in_trace = -1;
+ bbd[i].visited = 0;
bbd[i].heap = NULL;
bbd[i].node = NULL;
}
}
}
+ gcc_assert (!bb_visited_trace (e->dest));
+ mark_bb_visited (new_bb, trace);
+ new_bb->aux = bb->aux;
+ bb->aux = new_bb;
+
bbd[new_bb->index].in_trace = trace;
return new_bb;
/* Return the length of unconditional jump instruction. */
-static int
+int
get_uncond_jump_length (void)
{
rtx label, jump;
emit_barrier_after_bb (basic_block bb)
{
rtx barrier = emit_barrier_after (BB_END (bb));
- bb->il.rtl->footer = unlink_insn_chain (barrier, barrier);
+ BB_FOOTER (bb) = unlink_insn_chain (barrier, barrier);
}
/* The landing pad OLD_LP, in block OLD_BB, has edges from both partitions.
/* We know the fall-thru edge crosses; if the cond
jump edge does NOT cross, and its destination is the
next block in the bb order, invert the jump
- (i.e. fix it so the fall thru does not cross and
+ (i.e. fix it so the fall through does not cross and
the cond jump does). */
if (!cond_jump_crosses
/* Reorder basic blocks. The main entry point to this file. FLAGS is
the set of flags to pass to cfg_layout_initialize(). */
-void
+static void
reorder_basic_blocks (void)
{
int n_traces;
for (i = 0; i < array_size; i++)
{
bbd[i].start_of_trace = -1;
- bbd[i].in_trace = -1;
bbd[i].end_of_trace = -1;
+ bbd[i].in_trace = -1;
+ bbd[i].visited = 0;
bbd[i].heap = NULL;
bbd[i].node = NULL;
}
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
return 0;
+ clear_bb_flags ();
cfg_layout_initialize (0);
/* We are estimating the length of uncond jump insn only once
/* Duplicate computed gotos. */
FOR_EACH_BB (bb)
{
- if (bb->il.rtl->visited)
+ if (bb->flags & BB_VISITED)
continue;
- bb->il.rtl->visited = 1;
+ bb->flags |= BB_VISITED;
/* BB must have one outgoing edge. That edge must not lead to
the exit block or the next block.
new_bb = duplicate_block (single_succ (bb), single_succ_edge (bb), bb);
new_bb->aux = bb->aux;
bb->aux = new_bb;
- new_bb->il.rtl->visited = 1;
+ new_bb->flags |= BB_VISITED;
}
done:
/* Convert all crossing fall_thru edges to non-crossing fall
thrus to unconditional jumps (that jump to the original fall
- thru dest). */
+ through dest). */
fix_up_fall_thru_edges ();
/* If the architecture does not have conditional branches that can
add_reg_crossing_jump_notes ();
+ /* Clear bb->aux fields that the above routines were using. */
+ clear_aux_for_blocks ();
+
VEC_free (edge, heap, crossing_edges);
/* ??? FIXME: DF generates the bb info for a block immediately.