static void tree_merge_blocks (basic_block, basic_block);
static bool tree_can_merge_blocks_p (basic_block, basic_block);
static void remove_bb (basic_block);
-static void group_case_labels (void);
-static void cleanup_dead_labels (void);
static bool cleanup_control_flow (void);
static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
static edge find_taken_edge_cond_expr (basic_block, tree);
/* Initialize the basic block array. */
init_flow ();
+ profile_status = PROFILE_ABSENT;
n_basic_blocks = 0;
last_basic_block = 0;
VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
PROP_cfg, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_verify_stmts /* todo_flags_finish */
+ TODO_verify_stmts, /* todo_flags_finish */
+ 0 /* letter */
};
/* Search the CFG for any computed gotos. If found, factor them to a
create_block_annotation (basic_block bb)
{
/* Verify that the tree_annotations field is clear. */
- if (bb->tree_annotations)
- abort ();
+ gcc_assert (!bb->tree_annotations);
bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
}
{
basic_block bb;
- if (e)
- abort ();
+ gcc_assert (!e);
/* Create and initialize a new basic block. */
bb = alloc_block ();
/* We do not care about fake edges, so remove any that the CFG
builder inserted for completeness. */
- remove_fake_edges ();
+ remove_fake_exit_edges ();
/* Clean up the graph and warn for unreachable code. */
cleanup_tree_cfg ();
make_ctrl_stmt_edges (basic_block bb)
{
tree last = last_stmt (bb);
- tree first = first_stmt (bb);
-
-#if defined ENABLE_CHECKING
- if (last == NULL_TREE)
- abort();
-#endif
-
- if (TREE_CODE (first) == LABEL_EXPR
- && DECL_NONLOCAL (LABEL_EXPR_LABEL (first)))
- make_edge (ENTRY_BLOCK_PTR, bb, EDGE_ABNORMAL);
+ gcc_assert (last);
switch (TREE_CODE (last))
{
case GOTO_EXPR:
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
static void
make_exit_edges (basic_block bb)
{
- tree last = last_stmt (bb);
-
- if (last == NULL_TREE)
- abort ();
+ tree last = last_stmt (bb), op;
+ gcc_assert (last);
switch (TREE_CODE (last))
{
case CALL_EXPR:
/* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
may have an abnormal edge. Search the RHS for this case and
create any required edges. */
- if (TREE_CODE (TREE_OPERAND (last, 1)) == CALL_EXPR
- && TREE_SIDE_EFFECTS (TREE_OPERAND (last, 1))
+ op = get_call_expr_in (last);
+ if (op && TREE_SIDE_EFFECTS (op)
&& current_function_has_nonlocal_label)
make_goto_expr_edges (bb);
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
basic_block then_bb, else_bb;
tree then_label, else_label;
-#if defined ENABLE_CHECKING
- if (entry == NULL_TREE || TREE_CODE (entry) != COND_EXPR)
- abort ();
-#endif
+ gcc_assert (entry);
+ gcc_assert (TREE_CODE (entry) == COND_EXPR);
/* Entry basic blocks for each component. */
then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
if (simple_goto_p (goto_t))
{
edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
+#ifdef USE_MAPPED_LOCATION
+ e->goto_locus = EXPR_LOCATION (goto_t);
+#else
e->goto_locus = EXPR_LOCUS (goto_t);
+#endif
bsi_remove (&last);
return;
}
/* Remove unreachable blocks and other miscellaneous clean up work. */
-void
+bool
cleanup_tree_cfg (void)
{
bool something_changed = true;
+ bool retval = false;
timevar_push (TV_TREE_CLEANUP_CFG);
while (something_changed)
{
something_changed = cleanup_control_flow ();
- something_changed |= thread_jumps ();
something_changed |= delete_unreachable_blocks ();
+ something_changed |= thread_jumps ();
+ retval |= something_changed;
}
/* Merging the blocks creates no new opportunities for the other
verify_flow_info ();
#endif
timevar_pop (TV_TREE_CLEANUP_CFG);
+ return retval;
}
tree old_label = get_eh_region_tree_label (region);
if (old_label)
{
- tree new_label = label_for_bb[label_to_block (old_label)->index];
+ tree new_label;
+ basic_block bb = label_to_block (old_label);
+
+ /* ??? After optimizing, there may be EH regions with labels
+ that have already been removed from the function body, so
+ there is no basic block for them. */
+ if (! bb)
+ return;
+
+ new_label = label_for_bb[bb->index];
set_eh_region_tree_label (region, new_label);
}
}
2) Redirect all references to labels to the leading labels.
3) Cleanup all useless labels. */
-static void
+void
cleanup_dead_labels (void)
{
basic_block bb;
same label.
Eg. three separate entries 1: 2: 3: become one entry 1..3: */
-static void
+void
group_case_labels (void)
{
basic_block bb;
tree labels = SWITCH_LABELS (stmt);
int old_size = TREE_VEC_LENGTH (labels);
int i, j, new_size = old_size;
+ tree default_label = TREE_VEC_ELT (labels, old_size - 1);
/* Look for possible opportunities to merge cases.
Ignore the last element of the label vector because it
tree base_case, base_label, base_high, type;
base_case = TREE_VEC_ELT (labels, i);
- if (! base_case)
- abort ();
+ gcc_assert (base_case);
+ base_label = CASE_LABEL (base_case);
+
+ /* Discard cases that have the same destination as the
+ default case. */
+ if (base_label == default_label)
+ {
+ TREE_VEC_ELT (labels, i) = NULL_TREE;
+ i++;
+ continue;
+ }
type = TREE_TYPE (CASE_LOW (base_case));
- base_label = CASE_LABEL (base_case);
base_high = CASE_HIGH (base_case) ?
CASE_HIGH (base_case) : CASE_LOW (base_case);
/* Ensure that B follows A. */
move_block_after (b, a);
- if (!(a->succ->flags & EDGE_FALLTHRU))
- abort ();
-
- if (last_stmt (a)
- && stmt_ends_bb_p (last_stmt (a)))
- abort ();
+ gcc_assert (a->succ->flags & EDGE_FALLTHRU);
+ gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
/* Remove labels from B and set bb_for_stmt to A for other statements. */
for (bsi = bsi_start (b); !bsi_end_p (bsi);)
static bool
remove_useless_stmts_warn_notreached (tree stmt)
{
- if (EXPR_LOCUS (stmt))
+ if (EXPR_HAS_LOCATION (stmt))
{
- warning ("%Hwill never be executed", EXPR_LOCUS (stmt));
+ location_t loc = EXPR_LOCATION (stmt);
+ warning ("%Hwill never be executed", &loc);
return true;
}
static void
remove_useless_stmts_1 (tree *tp, struct rus_data *data)
{
- tree t = *tp;
+ tree t = *tp, op;
switch (TREE_CODE (t))
{
case MODIFY_EXPR:
data->last_goto = NULL;
fold_stmt (tp);
- if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
+ op = get_call_expr_in (t);
+ if (op)
{
- update_call_expr_flags (TREE_OPERAND (t, 1));
- notice_special_calls (TREE_OPERAND (t, 1));
+ update_call_expr_flags (op);
+ notice_special_calls (op);
}
if (tree_could_throw_p (t))
data->may_throw = true;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
continue;
}
- /* Invalidate the var if we encounter something that could modify it. */
+ /* Invalidate the var if we encounter something that could modify it.
+ Likewise for the value it was previously set to. Note that we only
+ consider values that are either a VAR_DECL or PARM_DECL so we
+ can test for conflict very simply. */
if (TREE_CODE (stmt) == ASM_EXPR
- || TREE_CODE (stmt) == VA_ARG_EXPR
|| (TREE_CODE (stmt) == MODIFY_EXPR
&& (TREE_OPERAND (stmt, 0) == var
- || TREE_OPERAND (stmt, 0) == val
- || TREE_CODE (TREE_OPERAND (stmt, 1)) == VA_ARG_EXPR)))
+ || TREE_OPERAND (stmt, 0) == val)))
return;
bsi_next (&bsi);
remove_bb (basic_block bb)
{
block_stmt_iterator i;
- location_t *loc = NULL;
+ source_locus loc = 0;
if (dump_file)
{
jump threading, thus resulting in bogus warnings. Not great,
since this way we lose warnings for gotos in the original
program that are indeed unreachable. */
- if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_LOCUS (stmt) && !loc)
+ if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
+#ifdef USE_MAPPED_LOCATION
+ loc = EXPR_LOCATION (stmt);
+#else
loc = EXPR_LOCUS (stmt);
+#endif
}
/* If requested, give a warning that the first statement in the
loop above, so the last statement we process is the first statement
in the block. */
if (warn_notreached && loc)
+#ifdef USE_MAPPED_LOCATION
+ warning ("%Hwill never be executed", &loc);
+#else
warning ("%Hwill never be executed", loc);
+#endif
remove_phi_nodes_and_edges_for_unreachable_block (bb);
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
taken_edge = find_taken_edge (bb, val);
}
-/* Given a control block BB and a constant value VAL, return the edge that
- will be taken out of the block. If VAL does not match a unique edge,
- NULL is returned. */
+/* Given a control block BB and a predicate VAL, return the edge that
+ will be taken out of the block. If VAL does not match a unique
+ edge, NULL is returned. */
edge
find_taken_edge (basic_block bb, tree val)
stmt = last_stmt (bb);
-#if defined ENABLE_CHECKING
- if (stmt == NULL_TREE || !is_ctrl_stmt (stmt))
- abort ();
-#endif
+ gcc_assert (stmt);
+ gcc_assert (is_ctrl_stmt (stmt));
+
+ /* If VAL is a predicate of the form N RELOP N, where N is an
+ SSA_NAME, we can always determine its truth value (except when
+ doing floating point comparisons that may involve NaNs). */
+ if (val
+ && TREE_CODE_CLASS (TREE_CODE (val)) == '<'
+ && TREE_OPERAND (val, 0) == TREE_OPERAND (val, 1)
+ && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME
+ && (TREE_CODE (TREE_TYPE (TREE_OPERAND (val, 0))) != REAL_TYPE
+ || !HONOR_NANS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (val, 0))))))
+ {
+ enum tree_code code = TREE_CODE (val);
+
+ if (code == EQ_EXPR || code == LE_EXPR || code == GE_EXPR)
+ val = boolean_true_node;
+ else if (code == LT_EXPR || code == GT_EXPR || code == NE_EXPR)
+ val = boolean_false_node;
+ }
/* If VAL is not a constant, we can't determine which edge might
be taken. */
dest_bb = label_to_block (CASE_LABEL (taken_case));
e = find_edge (bb, dest_bb);
- if (!e)
- abort ();
+ gcc_assert (e);
return e;
}
n1 = phi_arg_from_edge (phi, e1);
n2 = phi_arg_from_edge (phi, e2);
-#ifdef ENABLE_CHECKING
- if (n1 < 0 || n2 < 0)
- abort ();
-#endif
+ gcc_assert (n1 >= 0);
+ gcc_assert (n2 >= 0);
val1 = PHI_ARG_DEF (phi, n1);
val2 = PHI_ARG_DEF (phi, n2);
}
-/* Computing the Dominance Frontier:
-
- As described in Morgan, section 3.5, this may be done simply by
- walking the dominator tree bottom-up, computing the frontier for
- the children before the parent. When considering a block B,
- there are two cases:
-
- (1) A flow graph edge leaving B that does not lead to a child
- of B in the dominator tree must be a block that is either equal
- to B or not dominated by B. Such blocks belong in the frontier
- of B.
-
- (2) Consider a block X in the frontier of one of the children C
- of B. If X is not equal to B and is not dominated by B, it
- is in the frontier of B. */
-
-static void
-compute_dominance_frontiers_1 (bitmap *frontiers, basic_block bb, sbitmap done)
-{
- edge e;
- basic_block c;
-
- SET_BIT (done, bb->index);
-
- /* Do the frontier of the children first. Not all children in the
- dominator tree (blocks dominated by this one) are children in the
- CFG, so check all blocks. */
- for (c = first_dom_son (CDI_DOMINATORS, bb);
- c;
- c = next_dom_son (CDI_DOMINATORS, c))
- {
- if (! TEST_BIT (done, c->index))
- compute_dominance_frontiers_1 (frontiers, c, done);
- }
-
- /* Find blocks conforming to rule (1) above. */
- for (e = bb->succ; e; e = e->succ_next)
- {
- if (e->dest == EXIT_BLOCK_PTR)
- continue;
- if (get_immediate_dominator (CDI_DOMINATORS, e->dest) != bb)
- bitmap_set_bit (frontiers[bb->index], e->dest->index);
- }
-
- /* Find blocks conforming to rule (2). */
- for (c = first_dom_son (CDI_DOMINATORS, bb);
- c;
- c = next_dom_son (CDI_DOMINATORS, c))
- {
- int x;
-
- EXECUTE_IF_SET_IN_BITMAP (frontiers[c->index], 0, x,
- {
- if (get_immediate_dominator (CDI_DOMINATORS, BASIC_BLOCK (x)) != bb)
- bitmap_set_bit (frontiers[bb->index], x);
- });
- }
-}
-
-
-void
-compute_dominance_frontiers (bitmap *frontiers)
-{
- sbitmap done = sbitmap_alloc (last_basic_block);
-
- timevar_push (TV_DOM_FRONTIERS);
-
- sbitmap_zero (done);
-
- compute_dominance_frontiers_1 (frontiers, ENTRY_BLOCK_PTR->succ->dest, done);
-
- sbitmap_free (done);
-
- timevar_pop (TV_DOM_FRONTIERS);
-}
-
-
-
/*---------------------------------------------------------------------------
Debugging functions
---------------------------------------------------------------------------*/
{
static long max_num_merged_labels = 0;
unsigned long size, total = 0;
- long n_edges;
+ int n_edges;
basic_block bb;
const char * const fmt_str = "%-30s%-13s%12s\n";
- const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
+ const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
const char * const fmt_str_3 = "%-43s%11lu%c\n";
const char *funcname
= lang_hooks.decl_printable_name (current_function_decl, 2);
size = n_basic_blocks * sizeof (struct basic_block_def);
total += size;
- fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks, SCALE (size),
- LABEL (size));
+ fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
+ SCALE (size), LABEL (size));
n_edges = 0;
FOR_EACH_BB (bb)
bool
is_ctrl_altering_stmt (tree t)
{
- tree call = t;
-
-#if defined ENABLE_CHECKING
- if (t == NULL)
- abort ();
-#endif
+ tree call;
- switch (TREE_CODE (t))
+ gcc_assert (t);
+ call = get_call_expr_in (t);
+ if (call)
{
- case MODIFY_EXPR:
- /* A MODIFY_EXPR with a rhs of a call has the characteristics
- of the call. */
- call = TREE_OPERAND (t, 1);
- if (TREE_CODE (call) != CALL_EXPR)
- break;
- /* FALLTHRU */
-
- case CALL_EXPR:
/* A non-pure/const CALL_EXPR alters flow control if the current
function has nonlocal labels. */
- if (TREE_SIDE_EFFECTS (t)
- && current_function_has_nonlocal_label)
+ if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
return true;
/* A CALL_EXPR also alters control flow if it does not return. */
if (call_expr_flags (call) & (ECF_NORETURN | ECF_LONGJMP))
return true;
- break;
-
- default:
- return false;
}
/* If a statement can throw, it alters control flow. */
bool
simple_goto_p (tree expr)
{
- return (TREE_CODE (expr) == GOTO_EXPR
- && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL
- && (decl_function_context (GOTO_DESTINATION (expr))
- == current_function_decl));
+ return (TREE_CODE (expr) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
}
else if (e->flags & EDGE_FALSE_VALUE)
COND_EXPR_ELSE (stmt) = build_empty_stmt ();
else
- abort ();
+ gcc_unreachable ();
e->flags |= EDGE_FALLTHRU;
}
{
/* Remove the RETURN_EXPR if we may fall though to the exit
instead. */
- if (!bb->succ
- || bb->succ->succ_next
- || bb->succ->dest != EXIT_BLOCK_PTR)
- abort ();
+ gcc_assert (bb->succ);
+ gcc_assert (!bb->succ->succ_next);
+ gcc_assert (bb->succ->dest == EXIT_BLOCK_PTR);
if (bb->next_bb == EXIT_BLOCK_PTR
&& !TREE_OPERAND (stmt, 0))
if (!e || e->dest == bb->next_bb)
continue;
- if (e->dest == EXIT_BLOCK_PTR)
- abort ();
-
+ gcc_assert (e->dest != EXIT_BLOCK_PTR);
label = tree_block_label (e->dest);
stmt = build1 (GOTO_EXPR, void_type_node, label);
+#ifdef USE_MAPPED_LOCATION
+ SET_EXPR_LOCATION (stmt, e->goto_locus);
+#else
SET_EXPR_LOCUS (stmt, e->goto_locus);
+#endif
bsi_insert_after (&last, stmt, BSI_NEW_STMT);
e->flags &= ~EDGE_FALLTHRU;
}
VARRAY_GROW (label_to_block_map, 3 * uid / 2);
}
else
- {
-#ifdef ENABLE_CHECKING
- /* We're moving an existing label. Make sure that we've
- removed it from the old block. */
- if (bb && VARRAY_BB (label_to_block_map, uid))
- abort ();
-#endif
- }
+ /* We're moving an existing label. Make sure that we've
+ removed it from the old block. */
+ gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
VARRAY_BB (label_to_block_map, uid) = bb;
}
}
}
+/* Finds iterator for STMT. */
+
+extern block_stmt_iterator
+stmt_for_bsi (tree stmt)
+{
+ block_stmt_iterator bsi;
+
+ for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
+ if (bsi_stmt (bsi) == stmt)
+ return bsi;
+
+ gcc_unreachable ();
+}
/* Insert statement (or statement list) T before the statement
pointed-to by iterator I. M specifies how to update iterator I
bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
{
set_bb_for_stmt (t, i->bb);
- modify_stmt (t);
tsi_link_before (&i->tsi, t, m);
+ modify_stmt (t);
}
bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
{
set_bb_for_stmt (t, i->bb);
- modify_stmt (t);
tsi_link_after (&i->tsi, t, m);
+ modify_stmt (t);
}
{
tree t = bsi_stmt (*i);
set_bb_for_stmt (t, NULL);
- modify_stmt (t);
tsi_delink (&i->tsi);
}
In all cases, the returned *BSI points to the correct location. The
return value is true if insertion should be done after the location,
- or false if it should be done before the location. */
+ or false if it should be done before the location. If new basic block
+ has to be created, it is stored in *NEW_BB. */
static bool
-tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi)
+tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
+ basic_block *new_bb)
{
basic_block dest, src;
tree tmp;
tree op = TREE_OPERAND (tmp, 0);
if (!is_gimple_val (op))
{
- if (TREE_CODE (op) != MODIFY_EXPR)
- abort ();
+ gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
bsi_insert_before (bsi, op, BSI_NEW_STMT);
TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
}
/* Otherwise, create a new basic block, and split this edge. */
dest = split_edge (e);
+ if (new_bb)
+ *new_bb = dest;
e = dest->pred;
goto restart;
}
PENDING_STMT (e) = NULL_TREE;
- if (tree_find_edge_insert_loc (e, &bsi))
+ if (tree_find_edge_insert_loc (e, &bsi, NULL))
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
else
bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
append_to_statement_list (stmt, &PENDING_STMT (e));
}
+/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
+ be created, it is returned. */
-/* Specialized edge insertion for SSA-PRE. FIXME: This should
- probably disappear. The only reason it's here is because PRE needs
- the call to tree_find_edge_insert_loc(). */
-
-void pre_insert_on_edge (edge e, tree stmt);
-
-void
-pre_insert_on_edge (edge e, tree stmt)
+basic_block
+bsi_insert_on_edge_immediate (edge e, tree stmt)
{
block_stmt_iterator bsi;
+ basic_block new_bb = NULL;
- if (PENDING_STMT (e))
- abort ();
+ gcc_assert (!PENDING_STMT (e));
- if (tree_find_edge_insert_loc (e, &bsi))
+ if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
else
bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
-}
+ return new_bb;
+}
/*---------------------------------------------------------------------------
Tree specific functions for CFG manipulation
int i, num_elem;
/* Abnormal edges cannot be split. */
- if (edge_in->flags & EDGE_ABNORMAL)
- abort ();
+ gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
src = edge_in->src;
dest = edge_in->dest;
after_bb = edge_in->src;
new_bb = create_empty_bb (after_bb);
+ new_bb->frequency = EDGE_FREQUENCY (edge_in);
+ new_bb->count = edge_in->count;
new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
+ new_edge->probability = REG_BR_PROB_BASE;
+ new_edge->count = edge_in->count;
/* Find all the PHI arguments on the original edge, and change them to
the new edge. Do it before redirection, so that the argument does not
}
}
- if (!redirect_edge_and_branch (edge_in, new_bb))
- abort ();
-
- if (PENDING_STMT (edge_in))
- abort ();
+ e = redirect_edge_and_branch (edge_in, new_bb);
+ gcc_assert (e);
+ gcc_assert (!PENDING_STMT (edge_in));
return new_bb;
}
if (TYPE_P (t))
*walk_subtrees = 0;
- /* Check operand N for being valid GIMPLE and give error MSG if not. */
+ /* Check operand N for being valid GIMPLE and give error MSG if not.
+ We check for constants explicitly since they are not considered
+ gimple invariants if they overflowed. */
#define CHECK_OP(N, MSG) \
do { if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, N))) != 'c' \
&& !is_gimple_val (TREE_OPERAND (t, N))) \
case BIT_IOR_EXPR:
case BIT_XOR_EXPR:
case BIT_AND_EXPR:
- x = TREE_OPERAND (t, 0);
- /* We check for constants explicitly since they are not considered
- gimple invariants if they overflowed. */
- if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c'
- && !is_gimple_val (x))
- {
- error ("Invalid operand to binary operator");
- return x;
- }
- x = TREE_OPERAND (t, 1);
- /* We check for constants explicitly since they are not considered
- gimple invariants if they overflowed. */
- if (TREE_CODE_CLASS (TREE_CODE (x)) != 'c'
- && !is_gimple_val (x))
- {
- error ("Invalid operand to binary operator");
- return x;
- }
+ CHECK_OP (0, "Invalid operand to binary operator");
+ CHECK_OP (1, "Invalid operand to binary operator");
break;
default:
tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
basic_block label_bb = label_to_block (lab);
- if (label_bb->aux && label_bb->aux != (void *)1)
- abort ();
+ gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
label_bb->aux = (void *)1;
}
thread_jumps (void)
{
edge e, next, last, old;
- basic_block bb, dest, tmp;
+ basic_block bb, dest, tmp, old_dest, dom;
tree phi;
int arg;
bool retval = false;
forwardable. */
for (e = bb->succ; e; e = next)
{
+ int freq;
+ gcov_type count;
next = e->succ_next;
/* If the edge is abnormal or its destination is not
|| !tree_forwarder_block_p (e->dest))
continue;
+ count = e->count;
+ freq = EDGE_FREQUENCY (e);
+
/* Now walk through as many forwarder block as possible to
find the ultimate destination we want to thread our jump
to. */
break;
bb_ann (dest)->forwardable = 0;
+ dest->frequency -= freq;
+ if (dest->frequency < 0)
+ dest->frequency = 0;
+ dest->count -= count;
+ if (dest->count < 0)
+ dest->count = 0;
+ dest->succ->count -= count;
+ if (dest->succ->count < 0)
+ dest->succ->count = 0;
}
/* Reset the forwardable marks to 1. */
/* Perform the redirection. */
retval = true;
+ old_dest = e->dest;
e = redirect_edge_and_branch (e, dest);
- /* TODO -- updating dominators in this case is simple. */
- free_dominance_info (CDI_DOMINATORS);
-
if (!old)
{
/* Update PHI nodes. We know that the new argument should
for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
{
arg = phi_arg_from_edge (phi, last);
- if (arg < 0)
- abort ();
+ gcc_assert (arg >= 0);
add_phi_arg (&phi, PHI_ARG_DEF (phi, arg), e);
}
}
+
+ /* Update the dominators. */
+ if (dom_computed[CDI_DOMINATORS] >= DOM_CONS_OK)
+ {
+ /* Remove the unreachable blocks (observe that if all blocks
+ were reachable before, only those in the path we threaded
+ over and did not have any predecessor outside of the path
+ become unreachable). */
+ for (; old_dest != dest; old_dest = tmp)
+ {
+ tmp = old_dest->succ->dest;
+
+ if (old_dest->pred)
+ break;
+
+ delete_basic_block (old_dest);
+ }
+ /* If the dominator of the destination was in the path, set its
+ dominator to the start of the redirected edge. */
+ if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
+ set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
+
+ /* Now proceed like if we forwarded just over one edge at a time.
+ Algorithm for forwarding over edge A --> B then is
+
+ if (idom (B) == A)
+ idom (B) = idom (A);
+ recount_idom (A); */
+
+ for (; old_dest != dest; old_dest = tmp)
+ {
+ tmp = old_dest->succ->dest;
+
+ if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest)
+ {
+ dom = get_immediate_dominator (CDI_DOMINATORS, old_dest);
+ set_immediate_dominator (CDI_DOMINATORS, tmp, dom);
+ }
+
+ dom = recount_dominator (CDI_DOMINATORS, old_dest);
+ set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
+ }
+ }
}
/* Reset the forwardable bit on our block since it's no longer in
case GOTO_EXPR:
/* No non-abnormal edges should lead from a non-simple goto, and
simple ones should be represented implicitly. */
- abort ();
+ gcc_unreachable ();
case SWITCH_EXPR:
{
default:
/* Otherwise it must be a fallthru edge, and we don't need to
do anything besides redirecting it. */
- if (!(e->flags & EDGE_FALLTHRU))
- abort ();
+ gcc_assert (e->flags & EDGE_FALLTHRU);
break;
}
tree_redirect_edge_and_branch_force (edge e, basic_block dest)
{
e = tree_redirect_edge_and_branch (e, dest);
- if (!e)
- abort ();
+ gcc_assert (e);
return NULL;
}
{
basic_block new_bb;
block_stmt_iterator bsi, bsi_tgt;
+ tree phi, val;
+ ssa_op_iter op_iter;
new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
+
+ for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
+ {
+ mark_for_rewrite (PHI_RESULT (phi));
+ }
+
bsi_tgt = bsi_start (new_bb);
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
tree stmt = bsi_stmt (bsi);
+ tree copy;
if (TREE_CODE (stmt) == LABEL_EXPR)
continue;
- bsi_insert_after (&bsi_tgt, unshare_expr (stmt), BSI_NEW_STMT);
+ /* Record the definitions. */
+ get_stmt_operands (stmt);
+
+ FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
+ mark_for_rewrite (val);
+
+ copy = unshare_expr (stmt);
+
+ /* Copy also the virtual operands. */
+ get_stmt_ann (copy);
+ copy_virtual_operands (copy, stmt);
+
+ bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
}
return new_bb;
if (basic_block_info)
{
/* Make a CFG based dump. */
+ check_bb_profile (ENTRY_BLOCK_PTR, file);
if (!ignore_topmost_bind)
fprintf (file, "{\n");
dump_generic_bb (file, bb, 2, flags);
fprintf (file, "}\n");
+ check_bb_profile (EXIT_BLOCK_PTR, file);
}
else
{
tree_block_ends_with_call_p (basic_block bb)
{
block_stmt_iterator bsi = bsi_last (bb);
- tree t = tsi_stmt (bsi.tsi);
-
- if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
- t = TREE_OPERAND (t, 0);
-
- if (TREE_CODE (t) == MODIFY_EXPR)
- t = TREE_OPERAND (t, 1);
-
- return TREE_CODE (t) == CALL_EXPR;
+ return get_call_expr_in (bsi_stmt (bsi)) != NULL;
}
static bool
need_fake_edge_p (tree t)
{
- if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
- t = TREE_OPERAND (t, 0);
-
- if (TREE_CODE (t) == MODIFY_EXPR)
- t = TREE_OPERAND (t, 1);
+ tree call;
/* NORETURN and LONGJMP calls already have an edge to exit.
CONST, PURE and ALWAYS_RETURN calls do not need one.
figured out from the RTL in mark_constant_function, and
the counter incrementation code from -fprofile-arcs
leads to different results from -fbranch-probabilities. */
- if (TREE_CODE (t) == CALL_EXPR
- && !(call_expr_flags (t) &
- (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN)))
+ call = get_call_expr_in (t);
+ if (call
+ && !(call_expr_flags (call) &
+ (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN)))
return true;
if (TREE_CODE (t) == ASM_EXPR
#ifdef ENABLE_CHECKING
if (stmt == last_stmt)
for (e = bb->succ; e; e = e->succ_next)
- if (e->dest == EXIT_BLOCK_PTR)
- abort ();
+ gcc_assert (e->dest != EXIT_BLOCK_PTR);
#endif
/* Note that the following may create a new basic block
PROP_no_crit_edges, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
+
+\f
+/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
+ a temporary, make sure and register it to be renamed if necessary,
+ and finally return the temporary. Put the statements to compute
+ EXP before the current statement in BSI. */
+
+tree
+gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
+{
+ tree t, new_stmt, orig_stmt;
+
+ if (is_gimple_val (exp))
+ return exp;
+
+ t = make_rename_temp (type, NULL);
+ new_stmt = build (MODIFY_EXPR, type, t, exp);
+
+ orig_stmt = bsi_stmt (*bsi);
+ SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
+ TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
+
+ bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
+
+ return t;
+}
+
+/* Build a ternary operation and gimplify it. Emit code before BSI.
+ Return the gimple_val holding the result. */
+
+tree
+gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
+ tree type, tree a, tree b, tree c)
+{
+ tree ret;
+
+ ret = fold (build3 (code, type, a, b, c));
+ STRIP_NOPS (ret);
+
+ return gimplify_val (bsi, type, ret);
+}
+
+/* Build a binary operation and gimplify it. Emit code before BSI.
+ Return the gimple_val holding the result. */
+
+tree
+gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
+ tree type, tree a, tree b)
+{
+ tree ret;
+
+ ret = fold (build2 (code, type, a, b));
+ STRIP_NOPS (ret);
+
+ return gimplify_val (bsi, type, ret);
+}
+
+/* Build a unary operation and gimplify it. Emit code before BSI.
+ Return the gimple_val holding the result. */
+
+tree
+gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
+ tree a)
+{
+ tree ret;
+
+ ret = fold (build1 (code, type, a));
+ STRIP_NOPS (ret);
+
+ return gimplify_val (bsi, type, ret);
+}
+
+
\f
/* Emit return warnings. */
static void
execute_warn_function_return (void)
{
+#ifdef USE_MAPPED_LOCATION
+ source_location location;
+#else
location_t *locus;
+#endif
tree last;
edge e;
if (TREE_THIS_VOLATILE (cfun->decl)
&& EXIT_BLOCK_PTR->pred != NULL)
{
+#ifdef USE_MAPPED_LOCATION
+ location = UNKNOWN_LOCATION;
+#else
locus = NULL;
+#endif
for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
{
last = last_stmt (e->src);
if (TREE_CODE (last) == RETURN_EXPR
+#ifdef USE_MAPPED_LOCATION
+ && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
+#else
&& (locus = EXPR_LOCUS (last)) != NULL)
+#endif
break;
}
+#ifdef USE_MAPPED_LOCATION
+ if (location == UNKNOWN_LOCATION)
+ location = cfun->function_end_locus;
+ warning ("%H`noreturn' function does return", &location);
+#else
if (!locus)
locus = &cfun->function_end_locus;
warning ("%H`noreturn' function does return", locus);
+#endif
}
/* If we see "return;" in some basic block, then we do reach the end
if (TREE_CODE (last) == RETURN_EXPR
&& TREE_OPERAND (last, 0) == NULL)
{
+#ifdef USE_MAPPED_LOCATION
+ location = EXPR_LOCATION (last);
+ if (location == UNKNOWN_LOCATION)
+ location = cfun->function_end_locus;
+ warning ("%Hcontrol reaches end of non-void function", &location);
+#else
locus = EXPR_LOCUS (last);
if (!locus)
locus = &cfun->function_end_locus;
warning ("%Hcontrol reaches end of non-void function", locus);
+#endif
break;
}
}
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0 /* todo_flags_finish */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
};
#include "gt-tree-cfg.h"