static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
static void find_bb_boundaries PARAMS ((basic_block));
static void compute_outgoing_frequencies PARAMS ((basic_block));
+static bool inside_basic_block_p PARAMS ((rtx));
+static bool control_flow_insn_p PARAMS ((rtx));
+
+/* Return true if insn is something that should be contained inside basic
+ block. */
+
+static bool
+inside_basic_block_p (insn)
+ rtx insn;
+{
+ switch (GET_CODE (insn))
+ {
+ case CODE_LABEL:
+ /* Avoid creating of basic block for jumptables. */
+ if (NEXT_INSN (insn)
+ && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
+ || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
+ return false;
+ return true;
+
+ case JUMP_INSN:
+ if (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ return false;
+ return true;
+
+ case CALL_INSN:
+ case INSN:
+ return true;
+
+ case BARRIER:
+ case NOTE:
+ return false;
+
+ default:
+ abort ();
+ }
+}
+
+/* Return true if INSN may cause control flow transfer, so
+ it should be last in the basic block. */
+
+static bool
+control_flow_insn_p (insn)
+ rtx insn;
+{
+ rtx note;
+ switch (GET_CODE (insn))
+ {
+ case NOTE:
+ case CODE_LABEL:
+ return false;
+
+ case JUMP_INSN:
+ /* Jump insn always causes control transfer except for tablejumps. */
+ if (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ return false;
+ return true;
+
+ case CALL_INSN:
+ /* Call insn may return to the nonlocal goto handler. */
+ if (nonlocal_goto_handler_labels
+ && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
+ || INTVAL (XEXP (note, 0)) >= 0))
+ return true;
+ /* Or may trap. */
+ return can_throw_internal (insn);
+
+ case INSN:
+ return (flag_non_call_exceptions
+ && can_throw_internal (insn));
+
+ case BARRIER:
+ /* It is nonsence to reach barrier when looking for the
+ end of basic block, but before dead code is elliminated
+ this may happen. */
+ return false;
+
+ default:
+ abort ();
+ }
+}
/* Count the basic blocks of the function. */
count_basic_blocks (f)
rtx f;
{
- rtx insn;
- RTX_CODE prev_code;
int count = 0;
- int saw_abnormal_edge = 0;
+ bool saw_insn = false;
+ rtx insn;
- prev_code = JUMP_INSN;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
- enum rtx_code code = GET_CODE (insn);
-
- if (code == CODE_LABEL
- || (GET_RTX_CLASS (code) == 'i'
- && (prev_code == JUMP_INSN
- || prev_code == BARRIER
- || saw_abnormal_edge)))
- {
- saw_abnormal_edge = 0;
- count++;
- }
-
- /* Record whether this insn created an edge. */
- if (code == CALL_INSN)
- {
- rtx note;
+ /* Code labels and barriers causes curent basic block to be
+ terminated at previous real insn. */
- /* If there is a nonlocal goto label and the specified
- region number isn't -1, we have an edge. */
- if (nonlocal_goto_handler_labels
- && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
- || INTVAL (XEXP (note, 0)) >= 0))
- saw_abnormal_edge = 1;
+ if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
+ && saw_insn)
+ count++, saw_insn = false;
- else if (can_throw_internal (insn))
- saw_abnormal_edge = 1;
- }
- else if (flag_non_call_exceptions
- && code == INSN
- && can_throw_internal (insn))
- saw_abnormal_edge = 1;
+ /* Start basic block if needed. */
+ if (!saw_insn && inside_basic_block_p (insn))
+ saw_insn = true;
- if (code != NOTE)
- prev_code = code;
+ /* Control flow insn causes current basic block to be terminated. */
+ if (saw_insn && control_flow_insn_p (insn))
+ count++, saw_insn = false;
}
+ if (saw_insn)
+ count++;
/* The rest of the compiler works a bit smoother when we don't have to
check for the edge case of do-nothing functions with no basic blocks. */
next = NEXT_INSN (insn);
+ if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
+ && head)
+ {
+ create_basic_block_structure (i++, head, end, bb_note);
+ head = end = NULL_RTX;
+ bb_note = NULL_RTX;
+ }
+ if (inside_basic_block_p (insn))
+ {
+ if (head == NULL_RTX)
+ head = insn;
+ end = insn;
+ }
+ if (head && control_flow_insn_p (insn))
+ {
+ create_basic_block_structure (i++, head, end, bb_note);
+ head = end = NULL_RTX;
+ bb_note = NULL_RTX;
+ }
+
switch (code)
{
case NOTE:
}
case CODE_LABEL:
- /* A basic block starts at a label. If we've closed one off due
- to a barrier or some such, no need to do it again. */
- if (head != NULL_RTX)
- {
- create_basic_block_structure (i++, head, end, bb_note);
- bb_note = NULL_RTX;
- }
-
- head = end = insn;
- break;
-
case JUMP_INSN:
- /* A basic block ends at a jump. */
- if (head == NULL_RTX)
- head = insn;
- else
- {
- /* ??? Make a special check for table jumps. The way this
- happens is truly and amazingly gross. We are about to
- create a basic block that contains just a code label and
- an addr*vec jump insn. Worse, an addr_diff_vec creates
- its own natural loop.
-
- Prevent this bit of brain damage, pasting things together
- correctly in make_edges.
-
- The correct solution involves emitting the table directly
- on the tablejump instruction as a note, or JUMP_LABEL. */
-
- if (GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
- {
- head = end = NULL;
- n_basic_blocks--;
- break;
- }
- }
- end = insn;
- goto new_bb_inclusive;
-
+ case INSN:
case BARRIER:
- /* A basic block ends at a barrier. It may be that an unconditional
- jump already closed the basic block -- no need to do it again. */
- if (head == NULL_RTX)
- break;
- goto new_bb_exclusive;
+ break;
case CALL_INSN:
- {
- /* Record whether this call created an edge. */
- rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- int region = (note ? INTVAL (XEXP (note, 0)) : 0);
-
- if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- /* Scan each of the alternatives for label refs. */
- lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
- lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
- lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
- /* Record its tail recursion label, if any. */
- if (XEXP (PATTERN (insn), 3) != NULL_RTX)
- trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
- }
-
- /* A basic block ends at a call that can either throw or
- do a non-local goto. */
- if ((nonlocal_goto_handler_labels && region >= 0)
- || can_throw_internal (insn))
- {
- new_bb_inclusive:
- if (head == NULL_RTX)
- head = insn;
- end = insn;
-
- new_bb_exclusive:
- create_basic_block_structure (i++, head, end, bb_note);
- head = end = NULL_RTX;
- bb_note = NULL_RTX;
- break;
- }
- }
- /* Fall through. */
-
- case INSN:
- /* Non-call exceptions generate new blocks just like calls. */
- if (flag_non_call_exceptions && can_throw_internal (insn))
- goto new_bb_inclusive;
-
- if (head == NULL_RTX)
- head = insn;
- end = insn;
+ if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ /* Scan each of the alternatives for label refs. */
+ lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
+ lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
+ lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
+ /* Record its tail recursion label, if any. */
+ if (XEXP (PATTERN (insn), 3) != NULL_RTX)
+ trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
+ }
break;
default:
{
enum rtx_code code = GET_CODE (insn);
- switch (code)
+ /* On code label, split current basic block. */
+ if (code == CODE_LABEL)
{
- case BARRIER:
- if (!flow_transfer_insn)
- abort ();
- break;
-
- /* On code label, split current basic block. */
- case CODE_LABEL:
fallthru = split_block (bb, PREV_INSN (insn));
if (flow_transfer_insn)
bb->end = flow_transfer_insn;
flow_transfer_insn = NULL_RTX;
if (LABEL_ALTERNATE_NAME (insn))
make_edge (ENTRY_BLOCK_PTR, bb, 0);
- break;
-
- case INSN:
- case JUMP_INSN:
- case CALL_INSN:
- /* In case we've previously split an insn that effects a control
- flow transfer, move the block header to proper place. */
- if (flow_transfer_insn)
- {
- fallthru = split_block (bb, PREV_INSN (insn));
- bb->end = flow_transfer_insn;
- bb = fallthru->dest;
- remove_edge (fallthru);
- flow_transfer_insn = NULL_RTX;
- }
-
- /* We need some special care for those expressions. */
- if (code == JUMP_INSN)
- {
- if (GET_CODE (PATTERN (insn)) == ADDR_VEC
- || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
- abort ();
- flow_transfer_insn = insn;
- }
- else if (code == CALL_INSN)
- {
- rtx note;
- if (nonlocal_goto_handler_labels
- && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
- || INTVAL (XEXP (note, 0)) >= 0))
- flow_transfer_insn = insn;
- else if (can_throw_internal (insn))
- flow_transfer_insn = insn;
- else if (SIBLING_CALL_P (insn))
- flow_transfer_insn = insn;
- else if (find_reg_note (insn, REG_NORETURN, 0))
- flow_transfer_insn = insn;
- }
- else if (flag_non_call_exceptions && can_throw_internal (insn))
- flow_transfer_insn = insn;
- break;
-
- default:
- break;
}
+ /* In case we've previously seen an insn that effects a control
+ flow transfer, split the block. */
+ if (flow_transfer_insn && inside_basic_block_p (insn))
+ {
+ fallthru = split_block (bb, PREV_INSN (insn));
+ bb->end = flow_transfer_insn;
+ bb = fallthru->dest;
+ remove_edge (fallthru);
+ flow_transfer_insn = NULL_RTX;
+ }
+ if (control_flow_insn_p (insn))
+ flow_transfer_insn = insn;
if (insn == end)
break;
insn = NEXT_INSN (insn);
static int classic_gcse PARAMS ((void));
static int one_classic_gcse_pass PARAMS ((int));
static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
-static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *,
+static void delete_null_pointer_checks_1 PARAMS ((unsigned int *,
sbitmap *, sbitmap *,
struct null_pointer_info *));
static rtx process_insert_insn PARAMS ((struct expr *));
they are not our responsibility to free. */
static void
-delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin,
+delete_null_pointer_checks_1 (block_reg, nonnull_avin,
nonnull_avout, npi)
- varray_type *delete_list;
unsigned int *block_reg;
sbitmap *nonnull_avin;
sbitmap *nonnull_avout;
LABEL_NUSES (JUMP_LABEL (new_jump))++;
emit_barrier_after (new_jump);
}
- if (!*delete_list)
- VARRAY_RTX_INIT (*delete_list, 10, "delete_list");
- VARRAY_PUSH_RTX (*delete_list, last_insn);
+ delete_insn (last_insn);
if (compare_and_branch == 2)
- VARRAY_PUSH_RTX (*delete_list, earliest);
+ delete_insn (earliest);
+ purge_dead_edges (BASIC_BLOCK (bb));
/* Don't check this block again. (Note that BLOCK_END is
invalid here; we deleted the last instruction in the
{
sbitmap *nonnull_avin, *nonnull_avout;
unsigned int *block_reg;
- varray_type delete_list = NULL;
int bb;
int reg;
int regs_per_pass;
int max_reg;
- unsigned int i;
struct null_pointer_info npi;
/* If we have only a single block, then there's nothing to do. */
{
npi.min_reg = reg;
npi.max_reg = MIN (reg + regs_per_pass, max_reg);
- delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin,
+ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
nonnull_avout, &npi);
}
- /* Now delete the instructions all at once. This breaks the CFG. */
- if (delete_list)
- {
- for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++)
- delete_related_insns (VARRAY_RTX (delete_list, i));
- VARRAY_FREE (delete_list);
- }
-
/* Free the table of registers compared at the end of every block. */
free (block_reg);