/* Control flow functions for trees.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
#include "except.h"
#include "cfgloop.h"
#include "cfglayout.h"
-#include "hashtab.h"
#include "tree-ssa-propagate.h"
+#include "value-prof.h"
+#include "pointer-set.h"
/* This file contains functions for building the Control Flow Graph (CFG)
for a function tree. */
more persistent. The key is getting notification of changes to
the CFG (particularly edge removal, creation and redirection). */
-struct edge_to_cases_elt
-{
- /* The edge itself. Necessary for hashing and equality tests. */
- edge e;
-
- /* The case labels associated with this edge. We link these up via
- their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
- when we destroy the hash table. This prevents problems when copying
- SWITCH_EXPRs. */
- tree case_labels;
-};
-
-static htab_t edge_to_cases;
+static struct pointer_map_t *edge_to_cases;
/* CFG statistics. */
struct cfg_stats_d
static void make_cond_expr_edges (basic_block);
static void make_switch_expr_edges (basic_block);
static void make_goto_expr_edges (basic_block);
-static void make_omp_sections_edges (basic_block);
static edge tree_redirect_edge_and_branch (edge, basic_block);
static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
static unsigned int split_critical_edges (void);
static int tree_verify_flow_info (void);
static void tree_make_forwarder_block (edge);
static void tree_cfg2vcg (FILE *);
+static inline void change_bb_for_stmt (tree t, basic_block bb);
/* Flowgraph optimization and cleanup. */
static void tree_merge_blocks (basic_block, basic_block);
n_basic_blocks = NUM_FIXED_BLOCKS;
last_basic_block = NUM_FIXED_BLOCKS;
basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
- VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
- memset (VEC_address (basic_block, basic_block_info), 0,
- sizeof (basic_block) * initial_cfg_capacity);
+ VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
+ initial_cfg_capacity);
/* Build a mapping of labels to their associated blocks. */
label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
- VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
- memset (VEC_address (basic_block, label_to_block_map),
- 0, sizeof (basic_block) * initial_cfg_capacity);
+ VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
+ initial_cfg_capacity);
SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
/* Adjust the size of the array. */
if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
- {
- size_t old_size = VEC_length (basic_block, basic_block_info);
- basic_block *p;
- VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
- p = VEC_address (basic_block, basic_block_info);
- memset (&p[old_size], 0,
- sizeof (basic_block) * (n_basic_blocks - old_size));
- }
+ VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
/* To speed up statement iterator walks, we first purge dead labels. */
cleanup_dead_labels ();
/* Create the edges of the flowgraph. */
make_edges ();
+ cleanup_dead_labels ();
/* Debugging dumps. */
PROP_cfg, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_verify_stmts, /* todo_flags_finish */
+ TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
0 /* letter */
};
-/* Search the CFG for any computed gotos. If found, factor them to a
+/* Search the CFG for any computed gotos. If found, factor them to a
common computed goto site. Also record the location of that site so
- that we can un-factor the gotos after we have converted back to
+ that we can un-factor the gotos after we have converted back to
normal form. */
static void
/* We know there are one or more computed gotos in this function.
Examine the last statement in each basic block to see if the block
ends with a computed goto. */
-
+
FOR_EACH_BB (bb)
{
block_stmt_iterator bsi = bsi_last (bb);
}
/* Copy the original computed goto's destination into VAR. */
- assignment = build2 (MODIFY_EXPR, ptr_type_node,
- var, GOTO_DESTINATION (last));
+ assignment = build_gimple_modify_stmt (var,
+ GOTO_DESTINATION (last));
bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
/* And re-vector the computed goto to the new destination. */
bb->index = last_basic_block;
bb->flags = BB_NEW;
- bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
+ bb->il.tree = GGC_CNEW (struct tree_bb_info);
+ set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
/* Add the new block to the linked list of blocks. */
link_block (bb, after);
/* Grow the basic block array if needed. */
if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
{
- size_t old_size = VEC_length (basic_block, basic_block_info);
size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
- basic_block *p;
- VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
- p = VEC_address (basic_block, basic_block_info);
- memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
+ VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
}
/* Add the newly created block to the array. */
if (stmt
&& TREE_CODE (stmt) == COND_EXPR)
{
- tree cond = fold (COND_EXPR_COND (stmt));
- if (integer_zerop (cond))
+ tree cond;
+ bool zerop, onep;
+
+ fold_defer_overflow_warnings ();
+ cond = fold (COND_EXPR_COND (stmt));
+ zerop = integer_zerop (cond);
+ onep = integer_onep (cond);
+ fold_undefer_overflow_warnings (((zerop || onep)
+ && !TREE_NO_WARNING (stmt)),
+ stmt,
+ WARN_STRICT_OVERFLOW_CONDITIONAL);
+ if (zerop)
COND_EXPR_COND (stmt) = boolean_false_node;
- else if (integer_onep (cond))
+ else if (onep)
COND_EXPR_COND (stmt) = boolean_true_node;
}
}
make_edges (void)
{
basic_block bb;
+ struct omp_region *cur_region = NULL;
/* Create an edge from entry to the first block with executable
statements in it. */
if (last)
{
- switch (TREE_CODE (last))
+ enum tree_code code = TREE_CODE (last);
+ switch (code)
{
case GOTO_EXPR:
make_goto_expr_edges (bb);
/* If this function receives a nonlocal goto, then we need to
make edges from this call site to all the nonlocal goto
handlers. */
- if (TREE_SIDE_EFFECTS (last)
- && current_function_has_nonlocal_label)
- make_goto_expr_edges (bb);
+ if (tree_can_make_abnormal_goto (last))
+ make_abnormal_goto_edges (bb, true);
/* If this statement has reachable exception handlers, then
create abnormal edges to them. */
break;
case MODIFY_EXPR:
+ gcc_unreachable ();
+
+ case GIMPLE_MODIFY_STMT:
if (is_ctrl_altering_stmt (last))
{
- /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
- CALL_EXPR may have an abnormal edge. Search the RHS for
- this case and create any required edges. */
- tree op = get_call_expr_in (last);
- if (op && TREE_SIDE_EFFECTS (op)
- && current_function_has_nonlocal_label)
- make_goto_expr_edges (bb);
+ /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
+ the CALL_EXPR may have an abnormal edge. Search the RHS
+ for this case and create any required edges. */
+ if (tree_can_make_abnormal_goto (last))
+ make_abnormal_goto_edges (bb, true);
make_eh_edges (last);
}
case OMP_ORDERED:
case OMP_CRITICAL:
case OMP_SECTION:
+ cur_region = new_omp_region (bb, code, cur_region);
fallthru = true;
break;
- case OMP_RETURN_EXPR:
- /* In the case of an OMP_SECTION, we may have already made
- an edge in make_omp_sections_edges. */
- fallthru = EDGE_COUNT (bb->succs) == 0;
- break;
-
case OMP_SECTIONS:
- make_omp_sections_edges (bb);
+ cur_region = new_omp_region (bb, code, cur_region);
fallthru = false;
break;
+ case OMP_RETURN:
+ /* In the case of an OMP_SECTION, the edge will go somewhere
+ other than the next block. This will be created later. */
+ cur_region->exit = bb;
+ fallthru = cur_region->type != OMP_SECTION;
+ cur_region = cur_region->outer;
+ break;
+
+ case OMP_CONTINUE:
+ cur_region->cont = bb;
+ switch (cur_region->type)
+ {
+ case OMP_FOR:
+ /* ??? Technically there should be a some sort of loopback
+ edge here, but it goes to a block that doesn't exist yet,
+ and without it, updating the ssa form would be a real
+ bear. Fortunately, we don't yet do ssa before expanding
+ these nodes. */
+ break;
+
+ case OMP_SECTIONS:
+ /* Wire up the edges into and out of the nested sections. */
+ /* ??? Similarly wrt loopback. */
+ {
+ struct omp_region *i;
+ for (i = cur_region->inner; i ; i = i->next)
+ {
+ gcc_assert (i->type == OMP_SECTION);
+ make_edge (cur_region->entry, i->entry, 0);
+ make_edge (i->exit, bb, EDGE_FALLTHRU);
+ }
+ }
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ fallthru = true;
+ break;
+
default:
gcc_assert (!stmt_ends_bb_p (last));
fallthru = true;
make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
}
+ if (root_omp_region)
+ free_omp_regions ();
+
/* Fold COND_EXPR_COND of each COND_EXPR. */
fold_cond_expr_cond ();
-
- /* Clean up the graph and warn for unreachable code. */
- cleanup_tree_cfg ();
}
-/* Link an OMP_SECTIONS block to all the OMP_SECTION blocks in its body. */
-
-static void
-make_omp_sections_edges (basic_block bb)
-{
- basic_block exit_bb;
- size_t i, n;
- tree vec, stmt;
-
- stmt = last_stmt (bb);
- vec = OMP_SECTIONS_SECTIONS (stmt);
- n = TREE_VEC_LENGTH (vec);
- exit_bb = bb_for_stmt (TREE_VEC_ELT (vec, n - 1));
-
- for (i = 0; i < n - 1; i += 2)
- {
- basic_block start_bb = bb_for_stmt (TREE_VEC_ELT (vec, i));
- basic_block end_bb = bb_for_stmt (TREE_VEC_ELT (vec, i + 1));
- make_edge (bb, start_bb, 0);
- make_edge (end_bb, exit_bb, EDGE_FALLTHRU);
- }
-
- /* Once the CFG has been built, the vector of sections is no longer
- useful. The region can be easily obtained with build_omp_regions.
- Furthermore, this sharing of tree expressions is not allowed by the
- statement verifier. */
- OMP_SECTIONS_SECTIONS (stmt) = NULL_TREE;
-}
-
/* Create the edges for a COND_EXPR starting at block BB.
At this point, both clauses must contain only simple gotos. */
e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
#endif
}
-}
-
-/* Hashing routine for EDGE_TO_CASES. */
-
-static hashval_t
-edge_to_cases_hash (const void *p)
-{
- edge e = ((struct edge_to_cases_elt *)p)->e;
- /* Hash on the edge itself (which is a pointer). */
- return htab_hash_pointer (e);
+ /* We do not need the gotos anymore. */
+ COND_EXPR_THEN (entry) = NULL_TREE;
+ COND_EXPR_ELSE (entry) = NULL_TREE;
}
-/* Equality routine for EDGE_TO_CASES, edges are unique, so testing
- for equality is just a pointer comparison. */
-
-static int
-edge_to_cases_eq (const void *p1, const void *p2)
-{
- edge e1 = ((struct edge_to_cases_elt *)p1)->e;
- edge e2 = ((struct edge_to_cases_elt *)p2)->e;
-
- return e1 == e2;
-}
/* Called for each element in the hash table (P) as we delete the
edge to cases hash table.
- Clear all the TREE_CHAINs to prevent problems with copying of
+ Clear all the TREE_CHAINs to prevent problems with copying of
SWITCH_EXPRs and structure sharing rules, then free the hash table
element. */
-static void
-edge_to_cases_cleanup (void *p)
+static bool
+edge_to_cases_cleanup (void *key ATTRIBUTE_UNUSED, void **value,
+ void *data ATTRIBUTE_UNUSED)
{
- struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
tree t, next;
- for (t = elt->case_labels; t; t = next)
+ for (t = (tree) *value; t; t = next)
{
next = TREE_CHAIN (t);
TREE_CHAIN (t) = NULL;
}
- free (p);
+
+ *value = NULL;
+ return false;
}
/* Start recording information mapping edges to case labels. */
start_recording_case_labels (void)
{
gcc_assert (edge_to_cases == NULL);
-
- edge_to_cases = htab_create (37,
- edge_to_cases_hash,
- edge_to_cases_eq,
- edge_to_cases_cleanup);
+ edge_to_cases = pointer_map_create ();
}
/* Return nonzero if we are recording information for case labels. */
void
end_recording_case_labels (void)
{
- htab_delete (edge_to_cases);
+ pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
+ pointer_map_destroy (edge_to_cases);
edge_to_cases = NULL;
}
-/* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
-
-static void
-record_switch_edge (edge e, tree case_label)
-{
- struct edge_to_cases_elt *elt;
- void **slot;
-
- /* Build a hash table element so we can see if E is already
- in the table. */
- elt = XNEW (struct edge_to_cases_elt);
- elt->e = e;
- elt->case_labels = case_label;
-
- slot = htab_find_slot (edge_to_cases, elt, INSERT);
-
- if (*slot == NULL)
- {
- /* E was not in the hash table. Install E into the hash table. */
- *slot = (void *)elt;
- }
- else
- {
- /* E was already in the hash table. Free ELT as we do not need it
- anymore. */
- free (elt);
-
- /* Get the entry stored in the hash table. */
- elt = (struct edge_to_cases_elt *) *slot;
-
- /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
- TREE_CHAIN (case_label) = elt->case_labels;
- elt->case_labels = case_label;
- }
-}
-
/* If we are inside a {start,end}_recording_cases block, then return
a chain of CASE_LABEL_EXPRs from T which reference E.
static tree
get_cases_for_edge (edge e, tree t)
{
- struct edge_to_cases_elt elt, *elt_p;
void **slot;
size_t i, n;
tree vec;
chains available. Return NULL so the caller can detect this case. */
if (!recording_case_labels_p ())
return NULL;
-
-restart:
- elt.e = e;
- elt.case_labels = NULL;
- slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
+ slot = pointer_map_contains (edge_to_cases, e);
if (slot)
- {
- elt_p = (struct edge_to_cases_elt *)*slot;
- return elt_p->case_labels;
- }
+ return (tree) *slot;
/* If we did not find E in the hash table, then this must be the first
time we have been queried for information about E & T. Add all the
n = TREE_VEC_LENGTH (vec);
for (i = 0; i < n; i++)
{
- tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
+ tree elt = TREE_VEC_ELT (vec, i);
+ tree lab = CASE_LABEL (elt);
basic_block label_bb = label_to_block (lab);
- record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
+ edge this_edge = find_edge (e->src, label_bb);
+
+ /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
+ a new chain. */
+ slot = pointer_map_insert (edge_to_cases, this_edge);
+ TREE_CHAIN (elt) = (tree) *slot;
+ *slot = elt;
}
- goto restart;
+
+ return (tree) *pointer_map_contains (edge_to_cases, e);
}
/* Create the edges for a SWITCH_EXPR starting at block BB.
and undefined variable warnings quite right. */
if ((errorcount || sorrycount) && uid < 0)
{
- block_stmt_iterator bsi =
+ block_stmt_iterator bsi =
bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
tree stmt;
return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
}
+/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
+ is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
+
+void
+make_abnormal_goto_edges (basic_block bb, bool for_call)
+{
+ basic_block target_bb;
+ block_stmt_iterator bsi;
+
+ FOR_EACH_BB (target_bb)
+ for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree target = bsi_stmt (bsi);
+
+ if (TREE_CODE (target) != LABEL_EXPR)
+ break;
+
+ target = LABEL_EXPR_LABEL (target);
+
+ /* Make an edge to every label block that has been marked as a
+ potential target for a computed goto or a non-local goto. */
+ if ((FORCED_LABEL (target) && !for_call)
+ || (DECL_NONLOCAL (target) && for_call))
+ {
+ make_edge (bb, target_bb, EDGE_ABNORMAL);
+ break;
+ }
+ }
+}
+
/* Create edges for a goto statement at block BB. */
static void
make_goto_expr_edges (basic_block bb)
{
- tree goto_t;
- basic_block target_bb;
- bool for_call;
block_stmt_iterator last = bsi_last (bb);
+ tree goto_t = bsi_stmt (last);
- goto_t = bsi_stmt (last);
-
- /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
- CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
- from a nonlocal goto. */
- if (TREE_CODE (goto_t) != GOTO_EXPR)
- for_call = true;
- else
+ /* A simple GOTO creates normal edges. */
+ if (simple_goto_p (goto_t))
{
tree dest = GOTO_DESTINATION (goto_t);
- for_call = false;
-
- /* A GOTO to a local label creates normal edges. */
- if (simple_goto_p (goto_t))
- {
- edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
+ edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
#ifdef USE_MAPPED_LOCATION
- e->goto_locus = EXPR_LOCATION (goto_t);
+ e->goto_locus = EXPR_LOCATION (goto_t);
#else
- e->goto_locus = EXPR_LOCUS (goto_t);
+ e->goto_locus = EXPR_LOCUS (goto_t);
#endif
- bsi_remove (&last, true);
- return;
- }
-
- /* Nothing more to do for nonlocal gotos. */
- if (TREE_CODE (dest) == LABEL_DECL)
- return;
-
- /* Computed gotos remain. */
+ bsi_remove (&last, true);
+ return;
}
- /* Look for the block starting with the destination label. In the
- case of a computed goto, make an edge to any label block we find
- in the CFG. */
- FOR_EACH_BB (target_bb)
- {
- block_stmt_iterator bsi;
-
- for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree target = bsi_stmt (bsi);
-
- if (TREE_CODE (target) != LABEL_EXPR)
- break;
-
- if (
- /* Computed GOTOs. Make an edge to every label block that has
- been marked as a potential target for a computed goto. */
- (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && !for_call)
- /* Nonlocal GOTO target. Make an edge to every label block
- that has been marked as a potential target for a nonlocal
- goto. */
- || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call))
- {
- make_edge (bb, target_bb, EDGE_ABNORMAL);
- break;
- }
- }
- }
+ /* A computed GOTO creates abnormal edges. */
+ make_abnormal_goto_edges (bb, false);
}
to do early because it allows us to group case labels before creating
the edges for the CFG, and it speeds up block statement iterators in
all passes later on.
- We only run this pass once, running it more than once is probably not
- profitable. */
+ We rerun this pass after CFG is created, to get rid of the labels that
+ are no longer referenced. After then we do not run it any more, since
+ (almost) no new labels should be created. */
/* A map from basic block index to the leading label of that block. */
-static tree *label_for_bb;
+static struct label_record
+{
+ /* The label. */
+ tree label;
+
+ /* True if the label is referenced from somewhere. */
+ bool used;
+} *label_for_bb;
/* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
static void
if (! bb)
return;
- new_label = label_for_bb[bb->index];
+ new_label = label_for_bb[bb->index].label;
+ label_for_bb[bb->index].used = true;
set_eh_region_tree_label (region, new_label);
}
}
main_block_label (tree label)
{
basic_block bb = label_to_block (label);
+ tree main_label = label_for_bb[bb->index].label;
/* label_to_block possibly inserted undefined label into the chain. */
- if (!label_for_bb[bb->index])
- label_for_bb[bb->index] = label;
- return label_for_bb[bb->index];
+ if (!main_label)
+ {
+ label_for_bb[bb->index].label = label;
+ main_label = label;
+ }
+
+ label_for_bb[bb->index].used = true;
+ return main_label;
}
/* Cleanup redundant labels. This is a three-step process:
cleanup_dead_labels (void)
{
basic_block bb;
- label_for_bb = XCNEWVEC (tree, last_basic_block);
+ label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
/* Find a suitable label for each block. We use the first user-defined
label if there is one, or otherwise just the first label we see. */
/* If we have not yet seen a label for the current block,
remember this one and see if there are more labels. */
- if (! label_for_bb[bb->index])
+ if (!label_for_bb[bb->index].label)
{
- label_for_bb[bb->index] = label;
+ label_for_bb[bb->index].label = label;
continue;
}
/* If we did see a label for the current block already, but it
is an artificially created label, replace it if the current
label is a user defined label. */
- if (! DECL_ARTIFICIAL (label)
- && DECL_ARTIFICIAL (label_for_bb[bb->index]))
+ if (!DECL_ARTIFICIAL (label)
+ && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
{
- label_for_bb[bb->index] = label;
+ label_for_bb[bb->index].label = label;
break;
}
}
true_branch = COND_EXPR_THEN (stmt);
false_branch = COND_EXPR_ELSE (stmt);
- GOTO_DESTINATION (true_branch)
- = main_block_label (GOTO_DESTINATION (true_branch));
- GOTO_DESTINATION (false_branch)
- = main_block_label (GOTO_DESTINATION (false_branch));
+ if (true_branch)
+ GOTO_DESTINATION (true_branch)
+ = main_block_label (GOTO_DESTINATION (true_branch));
+ if (false_branch)
+ GOTO_DESTINATION (false_branch)
+ = main_block_label (GOTO_DESTINATION (false_branch));
break;
}
-
+
case SWITCH_EXPR:
{
size_t i;
tree vec = SWITCH_LABELS (stmt);
size_t n = TREE_VEC_LENGTH (vec);
-
+
/* Replace all destination labels. */
for (i = 0; i < n; ++i)
{
FOR_EACH_BB (bb)
{
block_stmt_iterator i;
- tree label_for_this_bb = label_for_bb[bb->index];
+ tree label_for_this_bb = label_for_bb[bb->index].label;
- if (! label_for_this_bb)
+ if (!label_for_this_bb)
continue;
+ /* If the main label of the block is unused, we may still remove it. */
+ if (!label_for_bb[bb->index].used)
+ label_for_this_bb = NULL;
+
for (i = bsi_start (bb); !bsi_end_p (i); )
{
tree label, stmt = bsi_stmt (i);
int old_size = TREE_VEC_LENGTH (labels);
int i, j, new_size = old_size;
tree default_case = TREE_VEC_ELT (labels, old_size - 1);
- tree default_label;
+ tree default_label;
/* The default label is always the last case in a switch
statement after gimplification. */
if (b == EXIT_BLOCK_PTR)
return false;
-
+
/* If A ends by a statement causing exceptions or something similar, we
cannot merge the blocks. */
stmt = last_stmt (a);
return false;
/* It must be possible to eliminate all phi nodes in B. If ssa form
- is not up-to-date, we cannot eliminate any phis. */
+ is not up-to-date, we cannot eliminate any phis; however, if only
+ some symbols as whole are marked for renaming, this is not a problem,
+ as phi nodes for those symbols are irrelevant in updating anyway. */
phi = phi_nodes (b);
if (phi)
{
- if (need_ssa_update_p ())
+ if (name_mappings_registered_p ())
return false;
for (; phi; phi = PHI_CHAIN (phi))
use_operand_p use;
tree stmt;
edge e;
- unsigned i;
- VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
- FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
+ FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
{
- stmt = USE_STMT (use);
- replace_exp (use, val);
+ if (TREE_CODE (stmt) != PHI_NODE)
+ push_stmt_changes (&stmt);
- if (TREE_CODE (stmt) == PHI_NODE)
- {
- e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
- if (e->flags & EDGE_ABNORMAL)
+ FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
+ {
+ replace_exp (use, val);
+
+ if (TREE_CODE (stmt) == PHI_NODE)
{
- /* This can only occur for virtual operands, since
- for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
- would prevent replacement. */
- gcc_assert (!is_gimple_reg (name));
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
+ e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ /* This can only occur for virtual operands, since
+ for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
+ would prevent replacement. */
+ gcc_assert (!is_gimple_reg (name));
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
+ }
}
}
- else
- VEC_safe_push (tree, heap, stmts, stmt);
- }
-
- /* We do not update the statements in the loop above. Consider
- x = w * w;
-
- If we performed the update in the first loop, the statement
- would be rescanned after first occurrence of w is replaced,
- the new uses would be placed to the beginning of the list,
- and we would never process them. */
- for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
- {
- tree rhs;
- fold_stmt_inplace (stmt);
+ if (TREE_CODE (stmt) != PHI_NODE)
+ {
+ tree rhs;
+
+ fold_stmt_inplace (stmt);
+ if (cfgcleanup_altered_bbs)
+ bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
+
+ /* FIXME. This should go in pop_stmt_changes. */
+ rhs = get_rhs (stmt);
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ recompute_tree_invariant_for_addr_expr (rhs);
- rhs = get_rhs (stmt);
- if (TREE_CODE (rhs) == ADDR_EXPR)
- recompute_tree_invariant_for_addr_expr (rhs);
+ maybe_clean_or_replace_eh_stmt (stmt, stmt);
- maybe_clean_or_replace_eh_stmt (stmt, stmt);
- mark_new_vars_to_rename (stmt);
+ pop_stmt_changes (&stmt);
+ }
}
- VEC_free (tree, heap, stmts);
+ gcc_assert (has_zero_uses (name));
/* Also update the trees stored in loop structures. */
if (current_loops)
{
struct loop *loop;
+ loop_iterator li;
- for (i = 0; i < current_loops->num; i++)
+ FOR_EACH_LOOP (li, loop, 0)
{
- loop = current_loops->parray[i];
- if (loop)
- substitute_in_loop_info (loop, name, val);
+ substitute_in_loop_info (loop, name, val);
}
}
}
with ordering of phi nodes. This is because A is the single
predecessor of B, therefore results of the phi nodes cannot
appear as arguments of the phi nodes. */
- copy = build2 (MODIFY_EXPR, void_type_node, def, use);
+ copy = build_gimple_modify_stmt (def, use);
bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
- SET_PHI_RESULT (phi, NULL_TREE);
SSA_NAME_DEF_STMT (def) = copy;
+ remove_phi_node (phi, NULL, false);
}
else
- replace_uses_by (def, use);
-
- remove_phi_node (phi, NULL);
+ {
+ replace_uses_by (def, use);
+ remove_phi_node (phi, NULL, true);
+ }
}
/* Ensure that B follows A. */
}
else
{
- set_bb_for_stmt (bsi_stmt (bsi), a);
+ change_bb_for_stmt (bsi_stmt (bsi), a);
bsi_next (&bsi);
}
}
/* Merge the chains. */
- last = tsi_last (a->stmt_list);
- tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
- b->stmt_list = NULL;
+ last = tsi_last (bb_stmt_list (a));
+ tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
+ set_bb_stmt_list (b, NULL_TREE);
+
+ if (cfgcleanup_altered_bbs)
+ bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
}
reached by a complex edge, if there is one. Else, return BB. We use
this in optimizations that use post-dominators for their heuristics,
to catch the cases in C++ where function calls are involved. */
-
+
basic_block
-single_noncomplex_succ (basic_block bb)
+single_noncomplex_succ (basic_block bb)
{
edge e0, e1;
if (EDGE_COUNT (bb->succs) != 2)
return bb;
-
+
e0 = EDGE_SUCC (bb, 0);
e1 = EDGE_SUCC (bb, 1);
if (e0->flags & EDGE_COMPLEX)
return e1->dest;
if (e1->flags & EDGE_COMPLEX)
return e0->dest;
-
+
return bb;
-}
-
+}
/* Walk the function tree removing unnecessary statements.
else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
{
if (else_stmt
- && TREE_CODE (else_stmt) == MODIFY_EXPR
- && TREE_OPERAND (else_stmt, 0) == cond
- && integer_zerop (TREE_OPERAND (else_stmt, 1)))
+ && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
+ && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
+ && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
}
else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
: &COND_EXPR_ELSE (*stmt_p));
if (stmt
- && TREE_CODE (stmt) == MODIFY_EXPR
- && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
- && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
+ && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+ && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
+ && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
*location = alloc_stmt_list ();
}
}
/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
decl. This allows us to eliminate redundant or useless
- calls to "const" functions.
+ calls to "const" functions.
Gimplifier already does the same operation, but we may notice functions
being const and pure once their calls has been gimplified, so we need
break;
case MODIFY_EXPR:
+ gcc_unreachable ();
+
+ case GIMPLE_MODIFY_STMT:
data->last_goto = NULL;
fold_stmt (tp);
op = get_call_expr_in (t);
tsi_delink (&i);
continue;
}
-
+
remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
t = tsi_stmt (i);
}
-struct tree_opt_pass pass_remove_useless_stmts =
+struct tree_opt_pass pass_remove_useless_stmts =
{
"useless", /* name */
NULL, /* gate */
while (phi)
{
tree next = PHI_CHAIN (phi);
- remove_phi_node (phi, NULL_TREE);
+ remove_phi_node (phi, NULL_TREE, true);
phi = next;
}
}
}
- /* If we remove the header or the latch of a loop, mark the loop for
- removal by setting its header and latch to NULL. */
if (current_loops)
{
struct loop *loop = bb->loop_father;
+ /* If a loop gets removed, clean up the information associated
+ with it. */
if (loop->latch == bb
|| loop->header == bb)
- {
- loop->latch = NULL;
- loop->header = NULL;
-
- /* Also clean up the information associated with the loop. Updating
- it would waste time. More importantly, it may refer to ssa
- names that were defined in other removed basic block -- these
- ssa names are now removed and invalid. */
- free_numbers_of_iterations_estimates_loop (loop);
- }
+ free_numbers_of_iterations_estimates_loop (loop);
}
/* Remove all the instructions in the block. */
- for (i = bsi_start (bb); !bsi_end_p (i);)
+ if (bb_stmt_list (bb) != NULL_TREE)
{
- tree stmt = bsi_stmt (i);
- if (TREE_CODE (stmt) == LABEL_EXPR
- && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
- || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
+ for (i = bsi_start (bb); !bsi_end_p (i);)
{
- basic_block new_bb;
- block_stmt_iterator new_bsi;
+ tree stmt = bsi_stmt (i);
+ if (TREE_CODE (stmt) == LABEL_EXPR
+ && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
+ || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
+ {
+ basic_block new_bb;
+ block_stmt_iterator new_bsi;
+
+ /* A non-reachable non-local label may still be referenced.
+ But it no longer needs to carry the extra semantics of
+ non-locality. */
+ if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
+ {
+ DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
+ FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
+ }
- /* A non-reachable non-local label may still be referenced.
- But it no longer needs to carry the extra semantics of
- non-locality. */
- if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
+ new_bb = bb->prev_bb;
+ new_bsi = bsi_start (new_bb);
+ bsi_remove (&i, false);
+ bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
+ }
+ else
{
- DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
- FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
+ /* Release SSA definitions if we are in SSA. Note that we
+ may be called when not in SSA. For example,
+ final_cleanup calls this function via
+ cleanup_tree_cfg. */
+ if (gimple_in_ssa_p (cfun))
+ release_defs (stmt);
+
+ bsi_remove (&i, true);
}
-
- new_bb = bb->prev_bb;
- new_bsi = bsi_start (new_bb);
- bsi_remove (&i, false);
- bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
- }
- else
- {
- /* Release SSA definitions if we are in SSA. Note that we
- may be called when not in SSA. For example,
- final_cleanup calls this function via
- cleanup_tree_cfg. */
- if (in_ssa_p)
- release_defs (stmt);
-
- bsi_remove (&i, true);
- }
- /* Don't warn for removed gotos. Gotos are often removed due to
- jump threading, thus resulting in bogus warnings. Not great,
- since this way we lose warnings for gotos in the original
- program that are indeed unreachable. */
- if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
- {
+ /* Don't warn for removed gotos. Gotos are often removed due to
+ jump threading, thus resulting in bogus warnings. Not great,
+ since this way we lose warnings for gotos in the original
+ program that are indeed unreachable. */
+ if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
+ {
#ifdef USE_MAPPED_LOCATION
- if (EXPR_HAS_LOCATION (stmt))
- loc = EXPR_LOCATION (stmt);
+ if (EXPR_HAS_LOCATION (stmt))
+ loc = EXPR_LOCATION (stmt);
#else
- source_locus t;
- t = EXPR_LOCUS (stmt);
- if (t && LOCATION_LINE (*t) > 0)
- loc = t;
+ source_locus t;
+ t = EXPR_LOCUS (stmt);
+ if (t && LOCATION_LINE (*t) > 0)
+ loc = t;
#endif
+ }
}
}
#endif
remove_phi_nodes_and_edges_for_unreachable_block (bb);
+ bb->il.tree = NULL;
}
return find_taken_edge_switch_expr (bb, val);
if (computed_goto_p (stmt))
- return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
+ {
+ /* Only optimize if the argument is a label, if the argument is
+ not a label then we can not construct a proper CFG.
+
+ It may be the case that we only need to allow the LABEL_REF to
+ appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
+ appear inside a LABEL_EXPR just to be safe. */
+ if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
+ && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
+ return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
+ return NULL;
+ }
gcc_unreachable ();
}
edge true_edge, false_edge;
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
-
+
gcc_assert (TREE_CODE (val) == INTEGER_CST);
- return (zero_p (val) ? false_edge : true_edge);
+ return (integer_zerop (val) ? false_edge : true_edge);
}
/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
void
tree_dump_bb (basic_block bb, FILE *outf, int indent)
{
- dump_generic_bb (outf, bb, indent, TDF_VOPS);
+ dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
}
{
debug_tree_bb (BASIC_BLOCK (n));
return BASIC_BLOCK (n);
-}
+}
/* Dump the CFG on stderr.
FLAGS are the same used by the tree dumping functions
- (see TDF_* in tree.h). */
+ (see TDF_* in tree-pass.h). */
void
debug_tree_cfg (int flags)
}
/* OpenMP directives alter control flow. */
- if (flag_openmp && OMP_DIRECTIVE_P (t))
+ if (OMP_DIRECTIVE_P (t))
return true;
/* If a statement can throw, it alters control flow. */
}
-/* Checks whether EXPR is a simple local goto. */
+/* Return true if T is a simple local goto. */
bool
-simple_goto_p (tree expr)
+simple_goto_p (tree t)
{
- return (TREE_CODE (expr) == GOTO_EXPR
- && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
+ return (TREE_CODE (t) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
+}
+
+
+/* Return true if T can make an abnormal transfer of control flow.
+ Transfers of control flow associated with EH are excluded. */
+
+bool
+tree_can_make_abnormal_goto (tree t)
+{
+ if (computed_goto_p (t))
+ return true;
+ if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+ t = GIMPLE_STMT_OPERAND (t, 1);
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) == CALL_EXPR)
+ return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
+ return false;
}
return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
}
-
-/* Add gotos that used to be represented implicitly in the CFG. */
+/* Remove block annotations and other datastructures. */
void
-disband_implicit_edges (void)
+delete_tree_cfg_annotations (void)
{
basic_block bb;
- block_stmt_iterator last;
- edge e;
- edge_iterator ei;
- tree stmt, label;
+ block_stmt_iterator bsi;
+ /* Remove annotations from every tree in the function. */
FOR_EACH_BB (bb)
- {
- last = bsi_last (bb);
- stmt = last_stmt (bb);
-
- if (stmt && TREE_CODE (stmt) == COND_EXPR)
- {
- /* Remove superfluous gotos from COND_EXPR branches. Moved
- from cfg_remove_useless_stmts here since it violates the
- invariants for tree--cfg correspondence and thus fits better
- here where we do it anyway. */
- e = find_edge (bb, bb->next_bb);
- if (e)
- {
- if (e->flags & EDGE_TRUE_VALUE)
- COND_EXPR_THEN (stmt) = build_empty_stmt ();
- else if (e->flags & EDGE_FALSE_VALUE)
- COND_EXPR_ELSE (stmt) = build_empty_stmt ();
- else
- gcc_unreachable ();
- e->flags |= EDGE_FALLTHRU;
- }
-
- continue;
- }
-
- if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
- {
- /* Remove the RETURN_EXPR if we may fall though to the exit
- instead. */
- gcc_assert (single_succ_p (bb));
- gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
-
- if (bb->next_bb == EXIT_BLOCK_PTR
- && !TREE_OPERAND (stmt, 0))
- {
- bsi_remove (&last, true);
- single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
- }
- continue;
- }
-
- /* There can be no fallthru edge if the last statement is a control
- one. */
- if (stmt && is_ctrl_stmt (stmt))
- continue;
-
- /* Find a fallthru edge and emit the goto if necessary. */
- FOR_EACH_EDGE (e, ei, bb->succs)
- if (e->flags & EDGE_FALLTHRU)
- break;
-
- if (!e || e->dest == bb->next_bb)
- continue;
-
- gcc_assert (e->dest != EXIT_BLOCK_PTR);
- label = tree_block_label (e->dest);
-
- stmt = build1 (GOTO_EXPR, void_type_node, label);
-#ifdef USE_MAPPED_LOCATION
- SET_EXPR_LOCATION (stmt, e->goto_locus);
-#else
- SET_EXPR_LOCUS (stmt, e->goto_locus);
-#endif
- bsi_insert_after (&last, stmt, BSI_NEW_STMT);
- e->flags &= ~EDGE_FALLTHRU;
- }
-}
-
-/* Remove block annotations and other datastructures. */
-
-void
-delete_tree_cfg_annotations (void)
-{
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ ggc_free (stmt->base.ann);
+ stmt->base.ann = NULL;
+ }
label_to_block_map = NULL;
}
}
-/* Return a pointer to the last statement in block BB. */
-
-tree *
-last_stmt_ptr (basic_block bb)
-{
- block_stmt_iterator last = bsi_last (bb);
- return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
-}
-
-
/* Return the last statement of an otherwise empty block. Return NULL
if the block is totally empty, or if it contains more than one
statement. */
LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
if (old_len <= (unsigned) uid)
{
- basic_block *addr;
unsigned new_len = 3 * uid / 2;
- VEC_safe_grow (basic_block, gc, label_to_block_map,
- new_len);
- addr = VEC_address (basic_block, label_to_block_map);
- memset (&addr[old_len],
- 0, sizeof (basic_block) * (new_len - old_len));
+ VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
+ new_len);
}
}
else
}
}
+/* Faster version of set_bb_for_stmt that assume that statement is being moved
+ from one basic block to another.
+ For BB splitting we can run into quadratic case, so performance is quite
+ important and knowing that the tables are big enough, change_bb_for_stmt
+ can inline as leaf function. */
+static inline void
+change_bb_for_stmt (tree t, basic_block bb)
+{
+ get_stmt_ann (t)->bb = bb;
+ if (TREE_CODE (t) == LABEL_EXPR)
+ VEC_replace (basic_block, label_to_block_map,
+ LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
+}
+
/* Finds iterator for STMT. */
extern block_stmt_iterator
static inline void
update_modified_stmts (tree t)
{
+ if (!ssa_operands_active ())
+ return;
if (TREE_CODE (t) == STATEMENT_LIST)
{
tree_stmt_iterator i;
/* Remove the statement pointed to by iterator I. The iterator is updated
- to the next statement.
+ to the next statement.
When REMOVE_EH_INFO is true we remove the statement pointed to by
iterator I from the EH tables. Otherwise we do not modify the EH
tsi_delink (&i->tsi);
mark_stmt_modified (t);
if (remove_eh_info)
- remove_stmt_from_eh_region (t);
+ {
+ remove_stmt_from_eh_region (t);
+ gimple_remove_stmt_histograms (cfun, t);
+ }
}
/* Move the statement at FROM so it comes right after the statement at TO. */
-void
+void
bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
{
tree stmt = bsi_stmt (*from);
bsi_remove (from, false);
- bsi_insert_after (to, stmt, BSI_SAME_STMT);
-}
+ /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
+ move statements to an empty block. */
+ bsi_insert_after (to, stmt, BSI_NEW_STMT);
+}
/* Move the statement at FROM so it comes right before the statement at TO. */
-void
+void
bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
{
tree stmt = bsi_stmt (*from);
bsi_remove (from, false);
+ /* For consistency with bsi_move_after, it might be better to have
+ BSI_NEW_STMT here; however, that breaks several places that expect
+ that TO does not change. */
bsi_insert_before (to, stmt, BSI_SAME_STMT);
}
bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
{
block_stmt_iterator last = bsi_last (bb);
-
+
/* Have to check bsi_end_p because it could be an empty block. */
if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
bsi_move_before (from, &last);
/* Replace the contents of the statement pointed to by iterator BSI
with STMT. If UPDATE_EH_INFO is true, the exception handling
information of the original statement is moved to the new statement. */
-
void
bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
int eh_region;
tree orig_stmt = bsi_stmt (*bsi);
+ if (stmt == orig_stmt)
+ return;
SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
set_bb_for_stmt (stmt, bsi->bb);
}
}
+ gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
+ gimple_remove_stmt_histograms (cfun, orig_stmt);
delink_stmt_imm_use (orig_stmt);
*bsi_stmt_ptr (*bsi) = stmt;
mark_stmt_modified (stmt);
restart:
/* If the destination has one predecessor which has no PHI nodes,
- insert there. Except for the exit block.
+ insert there. Except for the exit block.
The requirement for no PHI nodes could be relaxed. Basically we
would have to examine the PHIs to prove that none of them used
tree op = TREE_OPERAND (tmp, 0);
if (op && !is_gimple_val (op))
{
- gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
+ gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
bsi_insert_before (bsi, op, BSI_NEW_STMT);
- TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
+ TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
}
bsi_prev (bsi);
return true;
if (!PENDING_STMT (old_edge))
return;
-
+
for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
var && phi;
var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
PENDING_STMT (old_edge) = NULL;
}
-/* Returns the basic block after that the new basic block created
+/* Returns the basic block after which the new basic block created
by splitting edge EDGE_IN should be placed. Tries to keep the new block
near its "logical" location. This is of most help to humans looking
at debugging dumps. */
static basic_block
tree_split_edge (edge edge_in)
{
- basic_block new_bb, after_bb, dest, src;
+ basic_block new_bb, after_bb, dest;
edge new_edge, e;
/* Abnormal edges cannot be split. */
gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
- src = edge_in->src;
dest = edge_in->dest;
after_bb = split_edge_bb_loc (edge_in);
new_edge->count = edge_in->count;
e = redirect_edge_and_branch (edge_in, new_bb);
- gcc_assert (e);
+ gcc_assert (e == edge_in);
reinstall_phi_args (new_edge, e);
return new_bb;
}
-
-/* Return true when BB has label LABEL in it. */
-
-static bool
-has_label_p (basic_block bb, tree label)
-{
- block_stmt_iterator bsi;
-
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree stmt = bsi_stmt (bsi);
-
- if (TREE_CODE (stmt) != LABEL_EXPR)
- return false;
- if (LABEL_EXPR_LABEL (stmt) == label)
- return true;
- }
- return false;
-}
-
-
/* Callback for walk_tree, check that all elements with address taken are
properly noticed as such. The DATA is an int* that is 1 if TP was seen
inside a PHI node. */
if (TYPE_P (t))
*walk_subtrees = 0;
-
+
/* Check operand N for being valid GIMPLE and give error MSG if not. */
#define CHECK_OP(N, MSG) \
do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
break;
case MODIFY_EXPR:
- x = TREE_OPERAND (t, 0);
+ gcc_unreachable ();
+
+ case GIMPLE_MODIFY_STMT:
+ x = GIMPLE_STMT_OPERAND (t, 0);
if (TREE_CODE (x) == BIT_FIELD_REF
&& is_gimple_reg (TREE_OPERAND (x, 0)))
{
case NOP_EXPR:
case CONVERT_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
case FLOAT_EXPR:
case NEGATE_EXPR:
case ABS_EXPR:
CHECK_OP (1, "invalid operand to binary operator");
break;
+ case CONSTRUCTOR:
+ if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ *walk_subtrees = 0;
+ break;
+
default:
break;
}
static tree
verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
{
- htab_t htab = (htab_t) data;
- void **slot;
+ struct pointer_set_t *visited = (struct pointer_set_t *) data;
if (tree_node_can_be_shared (*tp))
{
return NULL;
}
- slot = htab_find_slot (htab, *tp, INSERT);
- if (*slot)
- return (tree) *slot;
- *slot = *tp;
+ if (pointer_set_insert (visited, *tp))
+ return *tp;
return NULL;
}
+/* Helper function for verify_gimple_tuples. */
+
+static tree
+verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data ATTRIBUTE_UNUSED)
+{
+ switch (TREE_CODE (*tp))
+ {
+ case MODIFY_EXPR:
+ error ("unexpected non-tuple");
+ debug_tree (*tp);
+ gcc_unreachable ();
+ return NULL_TREE;
+
+ default:
+ return NULL_TREE;
+ }
+}
+
+/* Verify that there are no trees that should have been converted to
+ gimple tuples. Return true if T contains a node that should have
+ been converted to a gimple tuple, but hasn't. */
+
+static bool
+verify_gimple_tuples (tree t)
+{
+ return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
+}
+
+static bool eh_error_found;
+static int
+verify_eh_throw_stmt_node (void **slot, void *data)
+{
+ struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
+ struct pointer_set_t *visited = (struct pointer_set_t *) data;
+
+ if (!pointer_set_contains (visited, node->stmt))
+ {
+ error ("Dead STMT in EH table");
+ debug_generic_stmt (node->stmt);
+ eh_error_found = true;
+ }
+ return 0;
+}
+
/* Verify the GIMPLE statement chain. */
void
basic_block bb;
block_stmt_iterator bsi;
bool err = false;
- htab_t htab;
+ struct pointer_set_t *visited, *visited_stmts;
tree addr;
timevar_push (TV_TREE_STMT_VERIFY);
- htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
+ visited = pointer_set_create ();
+ visited_stmts = pointer_set_create ();
FOR_EACH_BB (bb)
{
{
int phi_num_args = PHI_NUM_ARGS (phi);
+ pointer_set_insert (visited_stmts, phi);
if (bb_for_stmt (phi) != bb)
{
error ("bb_for_stmt (phi) is set to a wrong basic block");
err |= true;
}
- addr = walk_tree (&t, verify_node_sharing, htab, NULL);
+ addr = walk_tree (&t, verify_node_sharing, visited, NULL);
if (addr)
{
error ("incorrect sharing of tree nodes");
{
tree stmt = bsi_stmt (bsi);
+ pointer_set_insert (visited_stmts, stmt);
+ err |= verify_gimple_tuples (stmt);
+
if (bb_for_stmt (stmt) != bb)
{
error ("bb_for_stmt (stmt) is set to a wrong basic block");
bsi_next (&bsi);
err |= verify_stmt (stmt, bsi_end_p (bsi));
- addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
+ addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
if (addr)
{
error ("incorrect sharing of tree nodes");
}
}
}
+ eh_error_found = false;
+ if (get_eh_throw_stmt_table (cfun))
+ htab_traverse (get_eh_throw_stmt_table (cfun),
+ verify_eh_throw_stmt_node,
+ visited_stmts);
- if (err)
+ if (err | eh_error_found)
internal_error ("verify_stmts failed");
- htab_delete (htab);
+ pointer_set_destroy (visited);
+ pointer_set_destroy (visited_stmts);
+ verify_histograms ();
timevar_pop (TV_TREE_STMT_VERIFY);
}
edge e;
edge_iterator ei;
- if (ENTRY_BLOCK_PTR->stmt_list)
+ if (ENTRY_BLOCK_PTR->il.tree)
{
- error ("ENTRY_BLOCK has a statement list associated with it");
+ error ("ENTRY_BLOCK has IL associated with it");
err = 1;
}
- if (EXIT_BLOCK_PTR->stmt_list)
+ if (EXIT_BLOCK_PTR->il.tree)
{
- error ("EXIT_BLOCK has a statement list associated with it");
+ error ("EXIT_BLOCK has IL associated with it");
err = 1;
}
}
}
+ if (TREE_CODE (stmt) != COND_EXPR)
+ {
+ /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
+ after anything else but if statement. */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
+ {
+ error ("true/false edge after a non-COND_EXPR in bb %d",
+ bb->index);
+ err = 1;
+ }
+ }
+
switch (TREE_CODE (stmt))
{
case COND_EXPR:
{
edge true_edge;
edge false_edge;
- if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
- || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
+
+ if (COND_EXPR_THEN (stmt) != NULL_TREE
+ || COND_EXPR_ELSE (stmt) != NULL_TREE)
{
- error ("structured COND_EXPR at the end of bb %d", bb->index);
+ error ("COND_EXPR with code in branches at the end of bb %d",
+ bb->index);
err = 1;
}
bb->index);
err = 1;
}
-
- if (!has_label_p (true_edge->dest,
- GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
- {
- error ("%<then%> label does not match edge at end of bb %d",
- bb->index);
- err = 1;
- }
-
- if (!has_label_p (false_edge->dest,
- GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
- {
- error ("%<else%> label does not match edge at end of bb %d",
- bb->index);
- err = 1;
- }
}
break;
if (simple_goto_p (stmt))
{
error ("explicit goto at end of bb %d", bb->index);
- err = 1;
+ err = 1;
}
else
{
- /* FIXME. We should double check that the labels in the
+ /* FIXME. We should double check that the labels in the
destination blocks have their address taken. */
FOR_EACH_EDGE (e, ei, bb->succs)
if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
}
}
- if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
+ if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
verify_dominators (CDI_DOMINATORS);
return err;
if (single_pred_p (bb))
return;
- /* If we redirected a branch we must create new phi nodes at the
+ /* If we redirected a branch we must create new PHI nodes at the
start of BB. */
for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
{
basic_block bb = e->src;
block_stmt_iterator bsi;
edge ret;
- tree label, stmt;
+ tree stmt;
- if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
+ if (e->flags & EDGE_ABNORMAL)
return NULL;
- if (e->src != ENTRY_BLOCK_PTR
+ if (e->src != ENTRY_BLOCK_PTR
&& (ret = tree_try_redirect_by_replacing_jump (e, dest)))
return ret;
if (e->dest == dest)
return NULL;
- label = tree_block_label (dest);
-
bsi = bsi_last (bb);
stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
{
case COND_EXPR:
- stmt = (e->flags & EDGE_TRUE_VALUE
- ? COND_EXPR_THEN (stmt)
- : COND_EXPR_ELSE (stmt));
- GOTO_DESTINATION (stmt) = label;
+ /* For COND_EXPR, we only need to redirect the edge. */
break;
case GOTO_EXPR:
case SWITCH_EXPR:
{
tree cases = get_cases_for_edge (e, stmt);
+ tree label = tree_block_label (dest);
/* If we have a list of cases associated with E, then use it
as it's a lot faster than walking the entire case vector. */
return e;
}
+/* Returns true if it is possible to remove edge E by redirecting
+ it to the destination of the other edge from E->src. */
+
+static bool
+tree_can_remove_branch_p (edge e)
+{
+ if (e->flags & EDGE_ABNORMAL)
+ return false;
+
+ return true;
+}
/* Simple wrapper, as we can always redirect fallthru edges. */
static basic_block
tree_split_block (basic_block bb, void *stmt)
{
- block_stmt_iterator bsi, bsi_tgt;
- tree act;
+ block_stmt_iterator bsi;
+ tree_stmt_iterator tsi_tgt;
+ tree act, list;
basic_block new_bb;
edge e;
edge_iterator ei;
}
}
- bsi_tgt = bsi_start (new_bb);
- while (!bsi_end_p (bsi))
- {
- act = bsi_stmt (bsi);
- bsi_remove (&bsi, false);
- bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
- }
+ if (bsi_end_p (bsi))
+ return new_bb;
+
+ /* Split the statement list - avoid re-creating new containers as this
+ brings ugly quadratic memory consumption in the inliner.
+ (We are still quadratic since we need to update stmt BB pointers,
+ sadly.) */
+ list = tsi_split_statement_list_before (&bsi.tsi);
+ set_bb_stmt_list (new_bb, list);
+ for (tsi_tgt = tsi_start (list);
+ !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
+ change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
return new_bb;
}
region = lookup_stmt_eh_region (stmt);
if (region >= 0)
add_stmt_to_eh_region (copy, region);
+ gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
/* Create new names for all the definitions created by COPY and
add replacement mappings for each new name. */
edge e, e_copy;
edge_iterator ei;
tree phi, phi_copy, phi_next, def;
-
+
bb = get_bb_original (bb_copy);
FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
basic_block *region, unsigned n_region,
basic_block *region_copy)
{
- unsigned i, n_doms;
+ unsigned i;
bool free_region_copy = false, copying_header = false;
struct loop *loop = entry->dest->loop_father;
edge exit_copy;
- basic_block *doms;
+ VEC (basic_block, heap) *doms;
edge redirected;
int total_freq = 0, entry_freq = 0;
gcov_type total_count = 0, entry_count = 0;
return false;
}
- loop->copy = loop;
+ set_loop_copy (loop, loop);
/* In case the function is used for loop header copying (which is the primary
use), ensure that EXIT and its copy will be new latch and entry edges. */
if (loop->header == entry->dest)
{
copying_header = true;
- loop->copy = loop->outer;
+ set_loop_copy (loop, loop_outer (loop));
if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
return false;
/* Record blocks outside the region that are dominated by something
inside. */
- doms = XNEWVEC (basic_block, n_basic_blocks);
+ doms = NULL;
initialize_original_copy_tables ();
- n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
+ doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
if (entry->dest->count)
{
total_count - entry_count,
total_count);
scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
- total_count);
+ total_count);
}
else
{
region, but was dominated by something inside needs recounting as
well. */
set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
- doms[n_doms++] = get_bb_original (entry->dest);
- iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
+ VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
+ iterate_fix_dominators (CDI_DOMINATORS, doms, false);
free (doms);
/* Add the other PHI node arguments. */
struct move_stmt_d *p = (struct move_stmt_d *) data;
tree t = *tp;
- if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
+ if (p->block
+ && (EXPR_P (t) || GIMPLE_STMT_P (t)))
TREE_BLOCK (t) = p->block;
- if (OMP_DIRECTIVE_P (t) && TREE_CODE (t) != OMP_RETURN_EXPR)
+ if (OMP_DIRECTIVE_P (t)
+ && TREE_CODE (t) != OMP_RETURN
+ && TREE_CODE (t) != OMP_CONTINUE)
{
/* Do not remap variables inside OMP directives. Variables
referenced in clauses and directive header belong to the
if (p->new_label_map)
{
struct tree_map in, *out;
- in.from = t;
+ in.base.from = t;
out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
if (out)
*tp = t = out->to;
original array of blocks and placed in DEST_FN's array of blocks.
If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
updated to reflect the moved edges.
-
+
On exit, local variables that need to be removed from
CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
block_stmt_iterator si;
struct move_stmt_d d;
unsigned old_len, new_len;
- basic_block *addr;
+
+ /* Remove BB from dominance structures. */
+ delete_from_dominance_info (CDI_DOMINATORS, bb);
/* Link BB to the new linked list. */
move_block_after (bb, after);
/* Grow DEST_CFUN's basic block array if needed. */
cfg = dest_cfun->cfg;
cfg->x_n_basic_blocks++;
- if (bb->index > cfg->x_last_basic_block)
- cfg->x_last_basic_block = bb->index;
+ if (bb->index >= cfg->x_last_basic_block)
+ cfg->x_last_basic_block = bb->index + 1;
old_len = VEC_length (basic_block, cfg->x_basic_block_info);
if ((unsigned) cfg->x_last_basic_block >= old_len)
{
new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
- VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
- addr = VEC_address (basic_block, cfg->x_basic_block_info);
- memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
+ VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
+ new_len);
}
VEC_replace (basic_block, cfg->x_basic_block_info,
- cfg->x_last_basic_block, bb);
+ bb->index, bb);
/* The statements in BB need to be associated with a new TREE_BLOCK.
Labels need to be associated with a new label-to-block map. */
if (old_len <= (unsigned) uid)
{
new_len = 3 * uid / 2;
- VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
- new_len);
- addr = VEC_address (basic_block, cfg->x_label_to_block_map);
- memset (&addr[old_len], 0,
- sizeof (basic_block) * (new_len - old_len));
+ VEC_safe_grow_cleared (basic_block, gc,
+ cfg->x_label_to_block_map, new_len);
}
VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
{
add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
remove_stmt_from_eh_region (stmt);
+ gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
+ gimple_remove_stmt_histograms (cfun, stmt);
}
}
}
basic_block bb, int region)
{
block_stmt_iterator si;
-
+
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
tree stmt = bsi_stmt (si);
int stmt_region;
- stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
- if (stmt_region > 0
- && (region < 0 || eh_region_outer_p (src_cfun, stmt_region, region)))
- region = stmt_region;
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
+ else
+ stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
+ if (stmt_region > 0)
+ {
+ if (region < 0)
+ region = stmt_region;
+ else if (stmt_region != region)
+ {
+ region = eh_region_outermost (src_cfun, stmt_region, region);
+ gcc_assert (region != -1);
+ }
+ }
}
return region;
m = xmalloc (sizeof (struct tree_map));
m->hash = DECL_UID (decl);
- m->from = decl;
+ m->base.from = decl;
m->to = create_artificial_label ();
LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
/* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
region. */
gcc_assert (entry_bb != exit_bb
- && dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb));
+ && (!exit_bb
+ || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
bbs = NULL;
VEC_safe_push (basic_block, heap, bbs, entry_bb);
remove_edge (e);
}
- num_exit_edges = EDGE_COUNT (exit_bb->succs);
- exit_succ = (basic_block *) xcalloc (num_exit_edges, sizeof (basic_block));
- exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
- i = 0;
- for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
+ if (exit_bb)
{
- exit_flag[i] = e->flags;
- exit_succ[i++] = e->dest;
- remove_edge (e);
+ num_exit_edges = EDGE_COUNT (exit_bb->succs);
+ exit_succ = (basic_block *) xcalloc (num_exit_edges,
+ sizeof (basic_block));
+ exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
+ i = 0;
+ for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
+ {
+ exit_flag[i] = e->flags;
+ exit_succ[i++] = e->dest;
+ remove_edge (e);
+ }
+ }
+ else
+ {
+ num_exit_edges = 0;
+ exit_succ = NULL;
+ exit_flag = NULL;
}
/* Switch context to the child function to initialize DEST_FN's CFG. */
these helpers. */
cfun = dest_cfun;
make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
- make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
+ if (exit_bb)
+ make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
cfun = saved_cfun;
/* Back in the original function, the SESE region has disappeared,
for (i = 0; i < num_exit_edges; i++)
make_edge (bb, exit_succ[i], exit_flag[i]);
- free (exit_flag);
+ if (exit_bb)
+ {
+ free (exit_flag);
+ free (exit_succ);
+ }
free (entry_flag);
free (entry_pred);
- free (exit_succ);
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
VEC_free (basic_block, heap, bbs);
dump_function_to_file (tree fn, FILE *file, int flags)
{
tree arg, vars, var;
+ struct function *dsf;
bool ignore_topmost_bind = false, any_var = false;
basic_block bb;
tree chain;
struct function *saved_cfun;
-
+
fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
arg = DECL_ARGUMENTS (fn);
}
fprintf (file, ")\n");
- if (flags & TDF_DETAILS)
- dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
+ dsf = DECL_STRUCT_FUNCTION (fn);
+ if (dsf && (flags & TDF_DETAILS))
+ dump_eh_tree (file, dsf);
+
if (flags & TDF_RAW)
{
dump_node (fn, TDF_SLIM | flags, file);
FOR_EACH_BB (bb)
dump_generic_bb (file, bb, 2, flags);
-
+
fprintf (file, "}\n");
check_bb_profile (EXIT_BLOCK_PTR, file);
}
{
char *s_indent;
basic_block bb;
-
+
if (loop == NULL)
return;
/* Print the loop's header. */
fprintf (file, "%sloop_%d\n", s_indent, loop->num);
-
+
/* Print the loop's body. */
fprintf (file, "%s{\n", s_indent);
FOR_EACH_BB (bb)
fprintf (file, "}, succs = {");
print_succ_bbs (file, bb);
fprintf (file, "})\n");
-
+
/* Print the basic_block's body. */
fprintf (file, "%s {\n", s_indent);
tree_dump_bb (bb, file, indent + 4);
fprintf (file, "%s }\n", s_indent);
}
-
+
print_loop (file, loop->inner, indent + 2);
fprintf (file, "%s}\n", s_indent);
print_loop (file, loop->next, indent);
/* Follow a CFG edge from the entry point of the program, and on entry
of a loop, pretty print the loop structure on FILE. */
-void
+void
print_loop_ir (FILE *file)
{
basic_block bb;
-
+
bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
if (bb && bb->loop_father)
print_loop (file, bb->loop_father, 0);
/* Debugging loops structure at tree level. */
-void
+void
debug_loop_ir (void)
{
print_loop_ir (stderr);
return blocks_split;
}
+/* Purge dead abnormal call edges from basic block BB. */
+
+bool
+tree_purge_dead_abnormal_call_edges (basic_block bb)
+{
+ bool changed = tree_purge_dead_eh_edges (bb);
+
+ if (current_function_has_nonlocal_label)
+ {
+ tree stmt = last_stmt (bb);
+ edge_iterator ei;
+ edge e;
+
+ if (!(stmt && tree_can_make_abnormal_goto (stmt)))
+ for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
+ {
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ remove_edge (e);
+ changed = true;
+ }
+ else
+ ei_next (&ei);
+ }
+
+ /* See tree_purge_dead_eh_edges below. */
+ if (changed)
+ free_dominance_info (CDI_DOMINATORS);
+ }
+
+ return changed;
+}
+
+/* Stores all basic blocks dominated by BB to DOM_BBS. */
+
+static void
+get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
+{
+ basic_block son;
+
+ VEC_safe_push (basic_block, heap, *dom_bbs, bb);
+ for (son = first_dom_son (CDI_DOMINATORS, bb);
+ son;
+ son = next_dom_son (CDI_DOMINATORS, son))
+ get_all_dominated_blocks (son, dom_bbs);
+}
+
+/* Removes edge E and all the blocks dominated by it, and updates dominance
+ information. The IL in E->src needs to be updated separately.
+ If dominance info is not available, only the edge E is removed.*/
+
+void
+remove_edge_and_dominated_blocks (edge e)
+{
+ VEC (basic_block, heap) *bbs_to_remove = NULL;
+ VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
+ bitmap df, df_idom;
+ edge f;
+ edge_iterator ei;
+ bool none_removed = false;
+ unsigned i;
+ basic_block bb, dbb;
+ bitmap_iterator bi;
+
+ if (!dom_info_available_p (CDI_DOMINATORS))
+ {
+ remove_edge (e);
+ return;
+ }
+
+ /* No updating is needed for edges to exit. */
+ if (e->dest == EXIT_BLOCK_PTR)
+ {
+ if (cfgcleanup_altered_bbs)
+ bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
+ remove_edge (e);
+ return;
+ }
+
+ /* First, we find the basic blocks to remove. If E->dest has a predecessor
+ that is not dominated by E->dest, then this set is empty. Otherwise,
+ all the basic blocks dominated by E->dest are removed.
+
+ Also, to DF_IDOM we store the immediate dominators of the blocks in
+ the dominance frontier of E (i.e., of the successors of the
+ removed blocks, if there are any, and of E->dest otherwise). */
+ FOR_EACH_EDGE (f, ei, e->dest->preds)
+ {
+ if (f == e)
+ continue;
+
+ if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
+ {
+ none_removed = true;
+ break;
+ }
+ }
+
+ df = BITMAP_ALLOC (NULL);
+ df_idom = BITMAP_ALLOC (NULL);
+
+ if (none_removed)
+ bitmap_set_bit (df_idom,
+ get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
+ else
+ {
+ get_all_dominated_blocks (e->dest, &bbs_to_remove);
+ for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
+ {
+ FOR_EACH_EDGE (f, ei, bb->succs)
+ {
+ if (f->dest != EXIT_BLOCK_PTR)
+ bitmap_set_bit (df, f->dest->index);
+ }
+ }
+ for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
+ bitmap_clear_bit (df, bb->index);
+
+ EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
+ {
+ bb = BASIC_BLOCK (i);
+ bitmap_set_bit (df_idom,
+ get_immediate_dominator (CDI_DOMINATORS, bb)->index);
+ }
+ }
+
+ if (cfgcleanup_altered_bbs)
+ {
+ /* Record the set of the altered basic blocks. */
+ bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
+ bitmap_ior_into (cfgcleanup_altered_bbs, df);
+ }
+
+ /* Remove E and the cancelled blocks. */
+ if (none_removed)
+ remove_edge (e);
+ else
+ {
+ for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
+ delete_basic_block (bb);
+ }
+
+ /* Update the dominance information. The immediate dominator may change only
+ for blocks whose immediate dominator belongs to DF_IDOM:
+
+ Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
+ removal. Let Z the arbitrary block such that idom(Z) = Y and
+ Z dominates X after the removal. Before removal, there exists a path P
+ from Y to X that avoids Z. Let F be the last edge on P that is
+ removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
+ dominates W, and because of P, Z does not dominate W), and W belongs to
+ the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
+ EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
+ {
+ bb = BASIC_BLOCK (i);
+ for (dbb = first_dom_son (CDI_DOMINATORS, bb);
+ dbb;
+ dbb = next_dom_son (CDI_DOMINATORS, dbb))
+ VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
+ }
+
+ iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
+
+ BITMAP_FREE (df);
+ BITMAP_FREE (df_idom);
+ VEC_free (basic_block, heap, bbs_to_remove);
+ VEC_free (basic_block, heap, bbs_to_fix_dom);
+}
+
+/* Purge dead EH edges from basic block BB. */
+
bool
tree_purge_dead_eh_edges (basic_block bb)
{
{
if (e->flags & EDGE_EH)
{
- remove_edge (e);
+ remove_edge_and_dominated_blocks (e);
changed = true;
}
else
ei_next (&ei);
}
- /* Removal of dead EH edges might change dominators of not
- just immediate successors. E.g. when bb1 is changed so that
- it no longer can throw and bb1->bb3 and bb1->bb4 are dead
- eh edges purged by this function in:
- 0
- / \
- v v
- 1-->2
- / \ |
- v v |
- 3-->4 |
- \ v
- --->5
- |
- -
- idom(bb5) must be recomputed. For now just free the dominance
- info. */
- if (changed)
- free_dominance_info (CDI_DOMINATORS);
-
return changed;
}
of 'first'. Both of them are dominated by 'new_head' basic block. When
'new_head' was created by 'second's incoming edge it received phi arguments
on the edge by split_edge(). Later, additional edge 'e' was created to
- connect 'new_head' and 'first'. Now this routine adds phi args on this
- additional edge 'e' that new_head to second edge received as part of edge
+ connect 'new_head' and 'first'. Now this routine adds phi args on this
+ additional edge 'e' that new_head to second edge received as part of edge
splitting.
*/
/* Browse all 'second' basic block phi nodes and add phi args to
edge 'e' for 'first' head. PHI args are always in correct order. */
- for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
- phi2 && phi1;
+ for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
+ phi2 && phi1;
phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
{
tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
}
}
-/* Adds a if else statement to COND_BB with condition COND_EXPR.
- SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
+/* Adds a if else statement to COND_BB with condition COND_EXPR.
+ SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
the destination of the ELSE part. */
static void
-tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
- basic_block cond_bb, void *cond_e)
+tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
+ basic_block second_head ATTRIBUTE_UNUSED,
+ basic_block cond_bb, void *cond_e)
{
block_stmt_iterator bsi;
- tree goto1 = NULL_TREE;
- tree goto2 = NULL_TREE;
tree new_cond_expr = NULL_TREE;
tree cond_expr = (tree) cond_e;
edge e0;
/* Build new conditional expr */
- goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
- goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
- new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
+ new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
+ NULL_TREE, NULL_TREE);
- /* Add new cond in cond_bb. */
- bsi = bsi_start (cond_bb);
+ /* Add new cond in cond_bb. */
+ bsi = bsi_start (cond_bb);
bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
/* Adjust edges appropriately to connect new head with first head
as well as second head. */
create_bb, /* create_basic_block */
tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
+ tree_can_remove_branch_p, /* can_remove_branch_p */
remove_bb, /* delete_basic_block */
tree_split_block, /* split_block */
tree_move_block_after, /* move_block_after */
tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
extract_true_false_edges_from_block, /* extract_cond_bb_edges */
- flush_pending_stmts /* flush_pending_stmts */
+ flush_pending_stmts /* flush_pending_stmts */
};
return 0;
}
-struct tree_opt_pass pass_split_crit_edges =
+struct tree_opt_pass pass_split_crit_edges =
{
"crited", /* name */
NULL, /* gate */
return exp;
t = make_rename_temp (type, NULL);
- new_stmt = build2 (MODIFY_EXPR, type, t, exp);
+ new_stmt = build_gimple_modify_stmt (t, exp);
orig_stmt = bsi_stmt (*bsi);
SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
+ if (gimple_in_ssa_p (cfun))
+ mark_symbols_for_renaming (new_stmt);
return t;
}