#include "tree-ssa-propagate.h"
#include "value-prof.h"
#include "pointer-set.h"
+#include "tree-inline.h"
/* This file contains functions for building the Control Flow Graph (CFG)
for a function tree. */
static unsigned int split_critical_edges (void);
/* Various helpers. */
-static inline bool stmt_starts_bb_p (tree, tree);
+static inline bool stmt_starts_bb_p (const_tree, const_tree);
static int tree_verify_flow_info (void);
static void tree_make_forwarder_block (edge);
static void tree_cfg2vcg (FILE *);
case OMP_SECTIONS:
cur_region = new_omp_region (bb, code, cur_region);
+ fallthru = true;
+ break;
+
+ case OMP_SECTIONS_SWITCH:
fallthru = false;
break;
switch (cur_region->type)
{
case OMP_FOR:
- /* ??? Technically there should be a some sort of loopback
- edge here, but it goes to a block that doesn't exist yet,
- and without it, updating the ssa form would be a real
- bear. Fortunately, we don't yet do ssa before expanding
- these nodes. */
+ /* Make the loopback edge. */
+ make_edge (bb, single_succ (cur_region->entry), 0);
+
+ /* Create an edge from OMP_FOR to exit, which corresponds to
+ the case that the body of the loop is not executed at
+ all. */
+ make_edge (cur_region->entry, bb->next_bb, 0);
+ fallthru = true;
break;
case OMP_SECTIONS:
/* Wire up the edges into and out of the nested sections. */
- /* ??? Similarly wrt loopback. */
{
+ basic_block switch_bb = single_succ (cur_region->entry);
+
struct omp_region *i;
for (i = cur_region->inner; i ; i = i->next)
{
gcc_assert (i->type == OMP_SECTION);
- make_edge (cur_region->entry, i->entry, 0);
+ make_edge (switch_bb, i->entry, 0);
make_edge (i->exit, bb, EDGE_FALLTHRU);
}
+
+ /* Make the loopback edge to the block with
+ OMP_SECTIONS_SWITCH. */
+ make_edge (bb, switch_bb, 0);
+
+ /* Make the edge from the switch to exit. */
+ make_edge (switch_bb, bb->next_bb, 0);
+ fallthru = false;
}
break;
default:
gcc_unreachable ();
}
- fallthru = true;
break;
default:
element. */
static bool
-edge_to_cases_cleanup (void *key ATTRIBUTE_UNUSED, void **value,
+edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
void *data ATTRIBUTE_UNUSED)
{
tree t, next;
static bool
tree_can_merge_blocks_p (basic_block a, basic_block b)
{
- tree stmt;
+ const_tree stmt;
block_stmt_iterator bsi;
tree phi;
/* If A ends by a statement causing exceptions or something similar, we
cannot merge the blocks. */
- stmt = last_stmt (a);
+ /* This CONST_CAST is okay because last_stmt doesn't modify its
+ argument and the return value is assign to a const_tree. */
+ stmt = last_stmt (CONST_CAST_BB (a));
if (stmt && stmt_ends_bb_p (stmt))
return false;
tree copy;
bool may_replace_uses = may_propagate_copy (def, use);
- /* In case we have loops to care about, do not propagate arguments of
- loop closed ssa phi nodes. */
+ /* In case we maintain loop closed ssa form, do not propagate arguments
+ of loop exit phi nodes. */
if (current_loops
+ && loops_state_satisfies_p (LOOP_CLOSED_SSA)
&& is_gimple_reg (def)
&& TREE_CODE (use) == SSA_NAME
&& a->loop_father != b->loop_father)
loop above, so the last statement we process is the first statement
in the block. */
#ifdef USE_MAPPED_LOCATION
- if (loc > BUILTINS_LOCATION)
+ if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
#else
if (loc)
/* Return true if T represents a stmt that always transfers control. */
bool
-is_ctrl_stmt (tree t)
+is_ctrl_stmt (const_tree t)
{
return (TREE_CODE (t) == COND_EXPR
|| TREE_CODE (t) == SWITCH_EXPR
(e.g., a call to a non-returning function). */
bool
-is_ctrl_altering_stmt (tree t)
+is_ctrl_altering_stmt (const_tree t)
{
- tree call;
+ const_tree call;
gcc_assert (t);
- call = get_call_expr_in (t);
+ call = get_call_expr_in (CONST_CAST_TREE (t));
if (call)
{
/* A non-pure/const CALL_EXPR alters flow control if the current
/* Return true if T is a computed goto. */
bool
-computed_goto_p (tree t)
+computed_goto_p (const_tree t)
{
return (TREE_CODE (t) == GOTO_EXPR
&& TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
/* Return true if T is a simple local goto. */
bool
-simple_goto_p (tree t)
+simple_goto_p (const_tree t)
{
return (TREE_CODE (t) == GOTO_EXPR
&& TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
Transfers of control flow associated with EH are excluded. */
bool
-tree_can_make_abnormal_goto (tree t)
+tree_can_make_abnormal_goto (const_tree t)
{
if (computed_goto_p (t))
return true;
unnecessary basic blocks that only contain a single label. */
static inline bool
-stmt_starts_bb_p (tree t, tree prev_t)
+stmt_starts_bb_p (const_tree t, const_tree prev_t)
{
if (t == NULL_TREE)
return false;
/* Return true if T should end a basic block. */
bool
-stmt_ends_bb_p (tree t)
+stmt_ends_bb_p (const_tree t)
{
return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
}
return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
}
-
/* Return the last statement in basic block BB. */
tree
return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
}
-
/* Return the last statement of an otherwise empty block. Return NULL
if the block is totally empty, or if it contains more than one
statement. */
#undef CHECK_OP
}
-
-/* Verify STMT, return true if STMT is not in GIMPLE form.
- TODO: Implement type checking. */
+/* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
+ if there is an error, otherwise false. */
static bool
-verify_stmt (tree stmt, bool last_in_block)
+verify_gimple_unary_expr (const_tree expr)
{
- tree addr;
+ tree op = TREE_OPERAND (expr, 0);
+ tree type = TREE_TYPE (expr);
- if (OMP_DIRECTIVE_P (stmt))
+ if (!is_gimple_val (op))
{
- /* OpenMP directives are validated by the FE and never operated
- on by the optimizers. Furthermore, OMP_FOR may contain
- non-gimple expressions when the main index variable has had
- its address taken. This does not affect the loop itself
- because the header of an OMP_FOR is merely used to determine
- how to setup the parallel iteration. */
- return false;
+ error ("invalid operand in unary expression");
+ return true;
}
- if (!is_gimple_stmt (stmt))
+ /* For general unary expressions we have the operations type
+ as the effective type the operation is carried out on. So all
+ we need to require is that the operand is trivially convertible
+ to that type. */
+ if (!useless_type_conversion_p (type, TREE_TYPE (op)))
{
- error ("is not a valid GIMPLE statement");
- goto fail;
+ error ("type mismatch in unary expression");
+ debug_generic_expr (type);
+ debug_generic_expr (TREE_TYPE (op));
+ return true;
}
- addr = walk_tree (&stmt, verify_expr, NULL, NULL);
- if (addr)
+ return false;
+}
+
+/* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
+ if there is an error, otherwise false. */
+
+static bool
+verify_gimple_binary_expr (const_tree expr)
+{
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ tree type = TREE_TYPE (expr);
+
+ if (!is_gimple_val (op0) || !is_gimple_val (op1))
{
- debug_generic_stmt (addr);
+ error ("invalid operands in binary expression");
return true;
}
- /* If the statement is marked as part of an EH region, then it is
- expected that the statement could throw. Verify that when we
- have optimizations that simplify statements such that we prove
- that they cannot throw, that we update other data structures
- to match. */
- if (lookup_stmt_eh_region (stmt) >= 0)
+ /* For general binary expressions we have the operations type
+ as the effective type the operation is carried out on. So all
+ we need to require is that both operands are trivially convertible
+ to that type. */
+ if (!useless_type_conversion_p (type, TREE_TYPE (op0))
+ || !useless_type_conversion_p (type, TREE_TYPE (op1)))
{
- if (!tree_could_throw_p (stmt))
- {
- error ("statement marked for throw, but doesn%'t");
- goto fail;
- }
- if (!last_in_block && tree_can_throw_internal (stmt))
- {
- error ("statement marked for throw in middle of block");
- goto fail;
- }
+ error ("type mismatch in binary expression");
+ debug_generic_stmt (type);
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ return true;
}
return false;
-
- fail:
- debug_generic_stmt (stmt);
- return true;
}
-
-/* Return true when the T can be shared. */
+/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
+ Returns true if there is an error, otherwise false. */
static bool
-tree_node_can_be_shared (tree t)
+verify_gimple_min_lval (tree expr)
{
- if (IS_TYPE_OR_DECL_P (t)
- || is_gimple_min_invariant (t)
- || TREE_CODE (t) == SSA_NAME
- || t == error_mark_node
- || TREE_CODE (t) == IDENTIFIER_NODE)
- return true;
+ tree op;
- if (TREE_CODE (t) == CASE_LABEL_EXPR)
- return true;
+ if (is_gimple_id (expr))
+ return false;
- while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
- && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
- || TREE_CODE (t) == COMPONENT_REF
- || TREE_CODE (t) == REALPART_EXPR
- || TREE_CODE (t) == IMAGPART_EXPR)
- t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (expr) != INDIRECT_REF
+ && TREE_CODE (expr) != ALIGN_INDIRECT_REF
+ && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
+ {
+ error ("invalid expression for min lvalue");
+ return true;
+ }
- if (DECL_P (t))
- return true;
+ op = TREE_OPERAND (expr, 0);
+ if (!is_gimple_val (op))
+ {
+ error ("invalid operand in indirect reference");
+ debug_generic_stmt (op);
+ return true;
+ }
+ if (!useless_type_conversion_p (TREE_TYPE (expr),
+ TREE_TYPE (TREE_TYPE (op))))
+ {
+ error ("type mismatch in indirect reference");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
+ return true;
+ }
return false;
}
+/* Verify if EXPR is a valid GIMPLE reference expression. Returns true
+ if there is an error, otherwise false. */
-/* Called via walk_trees. Verify tree sharing. */
-
-static tree
-verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
+static bool
+verify_gimple_reference (tree expr)
{
- struct pointer_set_t *visited = (struct pointer_set_t *) data;
-
- if (tree_node_can_be_shared (*tp))
+ while (handled_component_p (expr))
{
- *walk_subtrees = false;
- return NULL;
- }
+ tree op = TREE_OPERAND (expr, 0);
- if (pointer_set_insert (visited, *tp))
- return *tp;
+ if (TREE_CODE (expr) == ARRAY_REF
+ || TREE_CODE (expr) == ARRAY_RANGE_REF)
+ {
+ if (!is_gimple_val (TREE_OPERAND (expr, 1))
+ || (TREE_OPERAND (expr, 2)
+ && !is_gimple_val (TREE_OPERAND (expr, 2)))
+ || (TREE_OPERAND (expr, 3)
+ && !is_gimple_val (TREE_OPERAND (expr, 3))))
+ {
+ error ("invalid operands to array reference");
+ debug_generic_stmt (expr);
+ return true;
+ }
+ }
- return NULL;
-}
+ /* Verify if the reference array element types are compatible. */
+ if (TREE_CODE (expr) == ARRAY_REF
+ && !useless_type_conversion_p (TREE_TYPE (expr),
+ TREE_TYPE (TREE_TYPE (op))))
+ {
+ error ("type mismatch in array reference");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
+ return true;
+ }
+ if (TREE_CODE (expr) == ARRAY_RANGE_REF
+ && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
+ TREE_TYPE (TREE_TYPE (op))))
+ {
+ error ("type mismatch in array range reference");
+ debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
+ debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
+ return true;
+ }
+ if ((TREE_CODE (expr) == REALPART_EXPR
+ || TREE_CODE (expr) == IMAGPART_EXPR)
+ && !useless_type_conversion_p (TREE_TYPE (expr),
+ TREE_TYPE (TREE_TYPE (op))))
+ {
+ error ("type mismatch in real/imagpart reference");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
+ return true;
+ }
-/* Helper function for verify_gimple_tuples. */
+ if (TREE_CODE (expr) == COMPONENT_REF
+ && !useless_type_conversion_p (TREE_TYPE (expr),
+ TREE_TYPE (TREE_OPERAND (expr, 1))))
+ {
+ error ("type mismatch in component reference");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
+ return true;
+ }
-static tree
-verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data ATTRIBUTE_UNUSED)
-{
- switch (TREE_CODE (*tp))
- {
- case MODIFY_EXPR:
- error ("unexpected non-tuple");
- debug_tree (*tp);
- gcc_unreachable ();
- return NULL_TREE;
+ /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
+ is nothing to verify. Gross mismatches at most invoke
+ undefined behavior. */
- default:
- return NULL_TREE;
+ expr = op;
}
+
+ return verify_gimple_min_lval (expr);
}
-/* Verify that there are no trees that should have been converted to
- gimple tuples. Return true if T contains a node that should have
- been converted to a gimple tuple, but hasn't. */
+/* Verify the GIMPLE expression EXPR. Returns true if there is an
+ error, otherwise false. */
static bool
-verify_gimple_tuples (tree t)
+verify_gimple_expr (tree expr)
{
- return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
-}
+ tree type = TREE_TYPE (expr);
-static bool eh_error_found;
-static int
-verify_eh_throw_stmt_node (void **slot, void *data)
-{
- struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
- struct pointer_set_t *visited = (struct pointer_set_t *) data;
+ if (is_gimple_val (expr))
+ return false;
- if (!pointer_set_contains (visited, node->stmt))
+ /* Special codes we cannot handle via their class. */
+ switch (TREE_CODE (expr))
{
- error ("Dead STMT in EH table");
- debug_generic_stmt (node->stmt);
- eh_error_found = true;
- }
- return 0;
-}
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!is_gimple_val (op))
+ {
+ error ("invalid operand in conversion");
+ return true;
+ }
-/* Verify the GIMPLE statement chain. */
+ /* Allow conversions between integral types and between
+ pointer types. */
+ if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
+ || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
+ return false;
-void
-verify_stmts (void)
-{
- basic_block bb;
- block_stmt_iterator bsi;
- bool err = false;
- struct pointer_set_t *visited, *visited_stmts;
- tree addr;
+ /* Allow conversions between integral types and pointers only if
+ there is no sign or zero extension involved. */
+ if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
+ || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
+ && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
+ return false;
- timevar_push (TV_TREE_STMT_VERIFY);
- visited = pointer_set_create ();
- visited_stmts = pointer_set_create ();
+ /* Allow conversion from integer to offset type and vice versa. */
+ if ((TREE_CODE (type) == OFFSET_TYPE
+ && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
+ return false;
- FOR_EACH_BB (bb)
- {
- tree phi;
- int i;
+ /* Otherwise assert we are converting between types of the
+ same kind. */
+ if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
+ {
+ error ("invalid types in nop conversion");
+ debug_generic_expr (type);
+ debug_generic_expr (TREE_TYPE (op));
+ return true;
+ }
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- {
- int phi_num_args = PHI_NUM_ARGS (phi);
+ return false;
+ }
- pointer_set_insert (visited_stmts, phi);
- if (bb_for_stmt (phi) != bb)
- {
- error ("bb_for_stmt (phi) is set to a wrong basic block");
- err |= true;
- }
+ case FLOAT_EXPR:
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!is_gimple_val (op))
+ {
+ error ("invalid operand in int to float conversion");
+ return true;
+ }
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
+ || !SCALAR_FLOAT_TYPE_P (type))
+ {
+ error ("invalid types in conversion to floating point");
+ debug_generic_expr (type);
+ debug_generic_expr (TREE_TYPE (op));
+ return true;
+ }
+ return false;
+ }
- for (i = 0; i < phi_num_args; i++)
- {
- tree t = PHI_ARG_DEF (phi, i);
- tree addr;
+ case FIX_TRUNC_EXPR:
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!is_gimple_val (op))
+ {
+ error ("invalid operand in float to int conversion");
+ return true;
+ }
+ if (!INTEGRAL_TYPE_P (type)
+ || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
+ {
+ error ("invalid types in conversion to integer");
+ debug_generic_expr (type);
+ debug_generic_expr (TREE_TYPE (op));
+ return true;
+ }
+ return false;
+ }
- /* Addressable variables do have SSA_NAMEs but they
- are not considered gimple values. */
- if (TREE_CODE (t) != SSA_NAME
- && TREE_CODE (t) != FUNCTION_DECL
- && !is_gimple_val (t))
- {
- error ("PHI def is not a GIMPLE value");
- debug_generic_stmt (phi);
- debug_generic_stmt (t);
- err |= true;
- }
+ case COMPLEX_EXPR:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ if (!is_gimple_val (op0) || !is_gimple_val (op1))
+ {
+ error ("invalid operands in complex expression");
+ return true;
+ }
+ if (!TREE_CODE (type) == COMPLEX_TYPE
+ || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
+ || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
+ || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
+ || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
+ || !useless_type_conversion_p (TREE_TYPE (type),
+ TREE_TYPE (op0))
+ || !useless_type_conversion_p (TREE_TYPE (type),
+ TREE_TYPE (op1)))
+ {
+ error ("type mismatch in complex expression");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ return true;
+ }
+ return false;
+ }
- addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
- if (addr)
- {
- debug_generic_stmt (addr);
- err |= true;
- }
+ case CONSTRUCTOR:
+ {
+ /* This is used like COMPLEX_EXPR but for vectors. */
+ if (TREE_CODE (type) != VECTOR_TYPE)
+ {
+ error ("constructor not allowed for non-vector types");
+ debug_generic_stmt (type);
+ return true;
+ }
+ /* FIXME: verify constructor arguments. */
+ return false;
+ }
- addr = walk_tree (&t, verify_node_sharing, visited, NULL);
- if (addr)
- {
- error ("incorrect sharing of tree nodes");
- debug_generic_stmt (phi);
- debug_generic_stmt (addr);
+ case LSHIFT_EXPR:
+ case RSHIFT_EXPR:
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ if (!is_gimple_val (op0) || !is_gimple_val (op1))
+ {
+ error ("invalid operands in shift expression");
+ return true;
+ }
+ if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
+ || !useless_type_conversion_p (type, TREE_TYPE (op0)))
+ {
+ error ("type mismatch in shift expression");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ return true;
+ }
+ return false;
+ }
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ if (POINTER_TYPE_P (type)
+ || POINTER_TYPE_P (TREE_TYPE (op0))
+ || POINTER_TYPE_P (TREE_TYPE (op1)))
+ {
+ error ("invalid (pointer) operands to plus/minus");
+ return true;
+ }
+ /* Continue with generic binary expression handling. */
+ break;
+ }
+
+ case POINTER_PLUS_EXPR:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ if (!is_gimple_val (op0) || !is_gimple_val (op1))
+ {
+ error ("invalid operands in pointer plus expression");
+ return true;
+ }
+ if (!POINTER_TYPE_P (TREE_TYPE (op0))
+ || TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE
+ || !useless_type_conversion_p (type, TREE_TYPE (op0))
+ || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
+ {
+ error ("type mismatch in pointer plus expression");
+ debug_generic_stmt (type);
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ return true;
+ }
+ return false;
+ }
+
+ case COND_EXPR:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ tree op2 = TREE_OPERAND (expr, 2);
+ if ((!is_gimple_val (op1)
+ && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
+ || (!is_gimple_val (op2)
+ && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
+ {
+ error ("invalid operands in conditional expression");
+ return true;
+ }
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
+ && !useless_type_conversion_p (type, TREE_TYPE (op1)))
+ || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
+ && !useless_type_conversion_p (type, TREE_TYPE (op2))))
+ {
+ error ("type mismatch in conditional expression");
+ debug_generic_stmt (type);
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ debug_generic_stmt (TREE_TYPE (op2));
+ return true;
+ }
+ return verify_gimple_expr (op0);
+ }
+
+ case ADDR_EXPR:
+ {
+ tree op = TREE_OPERAND (expr, 0);
+ if (!is_gimple_addressable (op))
+ {
+ error ("invalid operand in unary expression");
+ return true;
+ }
+ if (TYPE_POINTER_TO (TREE_TYPE (op))
+ && !useless_type_conversion_p (type,
+ TYPE_POINTER_TO (TREE_TYPE (op)))
+ /* FIXME: a longstanding wart, &a == &a[0]. */
+ && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
+ || (TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (op)))
+ && !useless_type_conversion_p (type,
+ TYPE_POINTER_TO (TREE_TYPE (TREE_TYPE (op)))))))
+ {
+ error ("type mismatch in address expression");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
+ return true;
+ }
+
+ return verify_gimple_reference (op);
+ }
+
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_ORIF_EXPR:
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+
+ if (!is_gimple_val (op0) || !is_gimple_val (op1))
+ {
+ error ("invalid operands in truth expression");
+ return true;
+ }
+
+ /* We allow any kind of integral typed argument and result. */
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
+ || !INTEGRAL_TYPE_P (type))
+ {
+ error ("type mismatch in binary truth expression");
+ debug_generic_stmt (type);
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ return true;
+ }
+
+ return false;
+ }
+
+ case TRUTH_NOT_EXPR:
+ {
+ tree op = TREE_OPERAND (expr, 0);
+
+ if (!is_gimple_val (op))
+ {
+ error ("invalid operand in unary not");
+ return true;
+ }
+
+ /* For TRUTH_NOT_EXPR we can have any kind of integral
+ typed arguments and results. */
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
+ || !INTEGRAL_TYPE_P (type))
+ {
+ error ("type mismatch in not expression");
+ debug_generic_expr (TREE_TYPE (expr));
+ debug_generic_expr (TREE_TYPE (op));
+ return true;
+ }
+
+ return false;
+ }
+
+ case CALL_EXPR:
+ /* FIXME. The C frontend passes unpromoted arguments in case it
+ didn't see a function declaration before the call. */
+ return false;
+
+ default:;
+ }
+
+ /* Generic handling via classes. */
+ switch (TREE_CODE_CLASS (TREE_CODE (expr)))
+ {
+ case tcc_unary:
+ return verify_gimple_unary_expr (expr);
+
+ case tcc_binary:
+ return verify_gimple_binary_expr (expr);
+
+ case tcc_reference:
+ return verify_gimple_reference (expr);
+
+ case tcc_comparison:
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ tree op1 = TREE_OPERAND (expr, 1);
+ if (!is_gimple_val (op0) || !is_gimple_val (op1))
+ {
+ error ("invalid operands in comparison expression");
+ return true;
+ }
+ /* For comparisons we do not have the operations type as the
+ effective type the comparison is carried out in. Instead
+ we require that either the first operand is trivially
+ convertible into the second, or the other way around.
+ The resulting type of a comparison may be any integral type.
+ Because we special-case pointers to void we allow
+ comparisons of pointers with the same mode as well. */
+ if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
+ && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
+ && (!POINTER_TYPE_P (TREE_TYPE (op0))
+ || !POINTER_TYPE_P (TREE_TYPE (op1))
+ || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
+ || !INTEGRAL_TYPE_P (type))
+ {
+ error ("type mismatch in comparison expression");
+ debug_generic_stmt (TREE_TYPE (expr));
+ debug_generic_stmt (TREE_TYPE (op0));
+ debug_generic_stmt (TREE_TYPE (op1));
+ return true;
+ }
+ break;
+ }
+
+ default:
+ gcc_unreachable ();
+ }
+
+ return false;
+}
+
+/* Verify the GIMPLE assignment statement STMT. Returns true if there
+ is an error, otherwise false. */
+
+static bool
+verify_gimple_modify_stmt (const_tree stmt)
+{
+ tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+
+ gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+
+ if (!useless_type_conversion_p (TREE_TYPE (lhs),
+ TREE_TYPE (rhs)))
+ {
+ error ("non-trivial conversion at assignment");
+ debug_generic_expr (TREE_TYPE (lhs));
+ debug_generic_expr (TREE_TYPE (rhs));
+ return true;
+ }
+
+ /* Loads/stores from/to a variable are ok. */
+ if ((is_gimple_val (lhs)
+ && is_gimple_variable (rhs))
+ || (is_gimple_val (rhs)
+ && is_gimple_variable (lhs)))
+ return false;
+
+ /* Aggregate copies are ok. */
+ if (!is_gimple_reg_type (TREE_TYPE (lhs))
+ && !is_gimple_reg_type (TREE_TYPE (rhs)))
+ return false;
+
+ /* We might get 'loads' from a parameter which is not a gimple value. */
+ if (TREE_CODE (rhs) == PARM_DECL)
+ return verify_gimple_expr (lhs);
+
+ if (!is_gimple_variable (lhs)
+ && verify_gimple_expr (lhs))
+ return true;
+
+ if (!is_gimple_variable (rhs)
+ && verify_gimple_expr (rhs))
+ return true;
+
+ return false;
+}
+
+/* Verify the GIMPLE statement STMT. Returns true if there is an
+ error, otherwise false. */
+
+static bool
+verify_gimple_stmt (tree stmt)
+{
+ if (!is_gimple_stmt (stmt))
+ {
+ error ("is not a valid GIMPLE statement");
+ return true;
+ }
+
+ if (OMP_DIRECTIVE_P (stmt))
+ {
+ /* OpenMP directives are validated by the FE and never operated
+ on by the optimizers. Furthermore, OMP_FOR may contain
+ non-gimple expressions when the main index variable has had
+ its address taken. This does not affect the loop itself
+ because the header of an OMP_FOR is merely used to determine
+ how to setup the parallel iteration. */
+ return false;
+ }
+
+ switch (TREE_CODE (stmt))
+ {
+ case GIMPLE_MODIFY_STMT:
+ return verify_gimple_modify_stmt (stmt);
+
+ case GOTO_EXPR:
+ case LABEL_EXPR:
+ return false;
+
+ case SWITCH_EXPR:
+ if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
+ {
+ error ("invalid operand to switch statement");
+ debug_generic_expr (TREE_OPERAND (stmt, 0));
+ }
+ return false;
+
+ case RETURN_EXPR:
+ {
+ tree op = TREE_OPERAND (stmt, 0);
+
+ if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
+ {
+ error ("type error in return expression");
+ return true;
+ }
+
+ if (op == NULL_TREE
+ || TREE_CODE (op) == RESULT_DECL)
+ return false;
+
+ return verify_gimple_modify_stmt (op);
+ }
+
+ case CALL_EXPR:
+ case COND_EXPR:
+ return verify_gimple_expr (stmt);
+
+ case NOP_EXPR:
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ case ASM_EXPR:
+ return false;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Verify the GIMPLE statements inside the statement list STMTS. */
+
+void
+verify_gimple_1 (tree stmts)
+{
+ tree_stmt_iterator tsi;
+
+ for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+ {
+ tree stmt = tsi_stmt (tsi);
+
+ switch (TREE_CODE (stmt))
+ {
+ case BIND_EXPR:
+ verify_gimple_1 (BIND_EXPR_BODY (stmt));
+ break;
+
+ case TRY_CATCH_EXPR:
+ case TRY_FINALLY_EXPR:
+ verify_gimple_1 (TREE_OPERAND (stmt, 0));
+ verify_gimple_1 (TREE_OPERAND (stmt, 1));
+ break;
+
+ case CATCH_EXPR:
+ verify_gimple_1 (CATCH_BODY (stmt));
+ break;
+
+ case EH_FILTER_EXPR:
+ verify_gimple_1 (EH_FILTER_FAILURE (stmt));
+ break;
+
+ default:
+ if (verify_gimple_stmt (stmt))
+ debug_generic_expr (stmt);
+ }
+ }
+}
+
+/* Verify the GIMPLE statements inside the current function. */
+
+void
+verify_gimple (void)
+{
+ verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
+}
+
+/* Verify STMT, return true if STMT is not in GIMPLE form.
+ TODO: Implement type checking. */
+
+static bool
+verify_stmt (tree stmt, bool last_in_block)
+{
+ tree addr;
+
+ if (OMP_DIRECTIVE_P (stmt))
+ {
+ /* OpenMP directives are validated by the FE and never operated
+ on by the optimizers. Furthermore, OMP_FOR may contain
+ non-gimple expressions when the main index variable has had
+ its address taken. This does not affect the loop itself
+ because the header of an OMP_FOR is merely used to determine
+ how to setup the parallel iteration. */
+ return false;
+ }
+
+ if (!is_gimple_stmt (stmt))
+ {
+ error ("is not a valid GIMPLE statement");
+ goto fail;
+ }
+
+ addr = walk_tree (&stmt, verify_expr, NULL, NULL);
+ if (addr)
+ {
+ debug_generic_stmt (addr);
+ return true;
+ }
+
+ /* If the statement is marked as part of an EH region, then it is
+ expected that the statement could throw. Verify that when we
+ have optimizations that simplify statements such that we prove
+ that they cannot throw, that we update other data structures
+ to match. */
+ if (lookup_stmt_eh_region (stmt) >= 0)
+ {
+ if (!tree_could_throw_p (stmt))
+ {
+ error ("statement marked for throw, but doesn%'t");
+ goto fail;
+ }
+ if (!last_in_block && tree_can_throw_internal (stmt))
+ {
+ error ("statement marked for throw in middle of block");
+ goto fail;
+ }
+ }
+
+ return false;
+
+ fail:
+ debug_generic_stmt (stmt);
+ return true;
+}
+
+
+/* Return true when the T can be shared. */
+
+static bool
+tree_node_can_be_shared (tree t)
+{
+ if (IS_TYPE_OR_DECL_P (t)
+ || is_gimple_min_invariant (t)
+ || TREE_CODE (t) == SSA_NAME
+ || t == error_mark_node
+ || TREE_CODE (t) == IDENTIFIER_NODE)
+ return true;
+
+ if (TREE_CODE (t) == CASE_LABEL_EXPR)
+ return true;
+
+ while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
+ && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
+ || TREE_CODE (t) == COMPONENT_REF
+ || TREE_CODE (t) == REALPART_EXPR
+ || TREE_CODE (t) == IMAGPART_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ if (DECL_P (t))
+ return true;
+
+ return false;
+}
+
+
+/* Called via walk_trees. Verify tree sharing. */
+
+static tree
+verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
+{
+ struct pointer_set_t *visited = (struct pointer_set_t *) data;
+
+ if (tree_node_can_be_shared (*tp))
+ {
+ *walk_subtrees = false;
+ return NULL;
+ }
+
+ if (pointer_set_insert (visited, *tp))
+ return *tp;
+
+ return NULL;
+}
+
+
+/* Helper function for verify_gimple_tuples. */
+
+static tree
+verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data ATTRIBUTE_UNUSED)
+{
+ switch (TREE_CODE (*tp))
+ {
+ case MODIFY_EXPR:
+ error ("unexpected non-tuple");
+ debug_tree (*tp);
+ gcc_unreachable ();
+ return NULL_TREE;
+
+ default:
+ return NULL_TREE;
+ }
+}
+
+/* Verify that there are no trees that should have been converted to
+ gimple tuples. Return true if T contains a node that should have
+ been converted to a gimple tuple, but hasn't. */
+
+static bool
+verify_gimple_tuples (tree t)
+{
+ return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
+}
+
+static bool eh_error_found;
+static int
+verify_eh_throw_stmt_node (void **slot, void *data)
+{
+ struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
+ struct pointer_set_t *visited = (struct pointer_set_t *) data;
+
+ if (!pointer_set_contains (visited, node->stmt))
+ {
+ error ("Dead STMT in EH table");
+ debug_generic_stmt (node->stmt);
+ eh_error_found = true;
+ }
+ return 0;
+}
+
+/* Verify the GIMPLE statement chain. */
+
+void
+verify_stmts (void)
+{
+ basic_block bb;
+ block_stmt_iterator bsi;
+ bool err = false;
+ struct pointer_set_t *visited, *visited_stmts;
+ tree addr;
+
+ timevar_push (TV_TREE_STMT_VERIFY);
+ visited = pointer_set_create ();
+ visited_stmts = pointer_set_create ();
+
+ FOR_EACH_BB (bb)
+ {
+ tree phi;
+ int i;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ int phi_num_args = PHI_NUM_ARGS (phi);
+
+ pointer_set_insert (visited_stmts, phi);
+ if (bb_for_stmt (phi) != bb)
+ {
+ error ("bb_for_stmt (phi) is set to a wrong basic block");
+ err |= true;
+ }
+
+ for (i = 0; i < phi_num_args; i++)
+ {
+ tree t = PHI_ARG_DEF (phi, i);
+ tree addr;
+
+ /* Addressable variables do have SSA_NAMEs but they
+ are not considered gimple values. */
+ if (TREE_CODE (t) != SSA_NAME
+ && TREE_CODE (t) != FUNCTION_DECL
+ && !is_gimple_val (t))
+ {
+ error ("PHI def is not a GIMPLE value");
+ debug_generic_stmt (phi);
+ debug_generic_stmt (t);
+ err |= true;
+ }
+
+ addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
+ if (addr)
+ {
+ debug_generic_stmt (addr);
+ err |= true;
+ }
+
+ addr = walk_tree (&t, verify_node_sharing, visited, NULL);
+ if (addr)
+ {
+ error ("incorrect sharing of tree nodes");
+ debug_generic_stmt (phi);
+ debug_generic_stmt (addr);
err |= true;
}
}
e->flags |= EDGE_FALLTHRU;
break;
+ case OMP_RETURN:
+ case OMP_CONTINUE:
+ case OMP_SECTIONS_SWITCH:
+ case OMP_FOR:
+ /* The edges from OMP constructs can be simply redirected. */
+ break;
+
default:
/* Otherwise it must be a fallthru edge, and we don't need to
do anything besides redirecting it. */
it to the destination of the other edge from E->src. */
static bool
-tree_can_remove_branch_p (edge e)
+tree_can_remove_branch_p (const_edge e)
{
if (e->flags & EDGE_ABNORMAL)
return false;
/* Return true if basic_block can be duplicated. */
static bool
-tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
+tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
{
return true;
}
return new_bb;
}
+/* Adds phi node arguments for edge E_COPY after basic block duplication. */
+
+static void
+add_phi_args_after_copy_edge (edge e_copy)
+{
+ basic_block bb, bb_copy = e_copy->src, dest;
+ edge e;
+ edge_iterator ei;
+ tree phi, phi_copy, phi_next, def;
+
+ if (!phi_nodes (e_copy->dest))
+ return;
+
+ bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
+
+ if (e_copy->dest->flags & BB_DUPLICATED)
+ dest = get_bb_original (e_copy->dest);
+ else
+ dest = e_copy->dest;
+
+ e = find_edge (bb, dest);
+ if (!e)
+ {
+ /* During loop unrolling the target of the latch edge is copied.
+ In this case we are not looking for edge to dest, but to
+ duplicated block whose original was dest. */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ if ((e->dest->flags & BB_DUPLICATED)
+ && get_bb_original (e->dest) == dest)
+ break;
+ }
+
+ gcc_assert (e != NULL);
+ }
+
+ for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
+ phi;
+ phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
+ {
+ phi_next = PHI_CHAIN (phi);
+ def = PHI_ARG_DEF_FROM_EDGE (phi, e);
+ add_phi_arg (phi_copy, def, e_copy);
+ }
+}
+
/* Basic block BB_COPY was created by code duplication. Add phi node
arguments for edges going out of BB_COPY. The blocks that were
void
add_phi_args_after_copy_bb (basic_block bb_copy)
{
- basic_block bb, dest;
- edge e, e_copy;
edge_iterator ei;
- tree phi, phi_copy, phi_next, def;
-
- bb = get_bb_original (bb_copy);
+ edge e_copy;
FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
{
- if (!phi_nodes (e_copy->dest))
- continue;
-
- if (e_copy->dest->flags & BB_DUPLICATED)
- dest = get_bb_original (e_copy->dest);
- else
- dest = e_copy->dest;
-
- e = find_edge (bb, dest);
- if (!e)
- {
- /* During loop unrolling the target of the latch edge is copied.
- In this case we are not looking for edge to dest, but to
- duplicated block whose original was dest. */
- FOR_EACH_EDGE (e, ei, bb->succs)
- if ((e->dest->flags & BB_DUPLICATED)
- && get_bb_original (e->dest) == dest)
- break;
-
- gcc_assert (e != NULL);
- }
-
- for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
- phi;
- phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
- {
- phi_next = PHI_CHAIN (phi);
- def = PHI_ARG_DEF_FROM_EDGE (phi, e);
- add_phi_arg (phi_copy, def, e_copy);
- }
+ add_phi_args_after_copy_edge (e_copy);
}
}
/* Blocks in REGION_COPY array of length N_REGION were created by
duplication of basic blocks. Add phi node arguments for edges
- going from these blocks. */
+ going from these blocks. If E_COPY is not NULL, also add
+ phi node arguments for its destination.*/
void
-add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
+add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
+ edge e_copy)
{
unsigned i;
for (i = 0; i < n_region; i++)
add_phi_args_after_copy_bb (region_copy[i]);
+ if (e_copy)
+ add_phi_args_after_copy_edge (e_copy);
for (i = 0; i < n_region; i++)
region_copy[i]->flags &= ~BB_DUPLICATED;
set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
iterate_fix_dominators (CDI_DOMINATORS, doms, false);
- free (doms);
+ VEC_free (basic_block, heap, doms);
/* Add the other PHI node arguments. */
- add_phi_args_after_copy (region_copy, n_region);
+ add_phi_args_after_copy (region_copy, n_region, NULL);
+
+ /* Update the SSA web. */
+ update_ssa (TODO_update_ssa);
+
+ if (free_region_copy)
+ free (region_copy);
+
+ free_original_copy_tables ();
+ return true;
+}
+
+/* Duplicates REGION consisting of N_REGION blocks. The new blocks
+ are stored to REGION_COPY in the same order in that they appear
+ in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
+ the region, EXIT an exit from it. The condition guarding EXIT
+ is moved to ENTRY. Returns true if duplication succeeds, false
+ otherwise.
+
+ For example,
+
+ some_code;
+ if (cond)
+ A;
+ else
+ B;
+
+ is transformed to
+
+ if (cond)
+ {
+ some_code;
+ A;
+ }
+ else
+ {
+ some_code;
+ B;
+ }
+*/
+
+bool
+tree_duplicate_sese_tail (edge entry, edge exit,
+ basic_block *region, unsigned n_region,
+ basic_block *region_copy)
+{
+ unsigned i;
+ bool free_region_copy = false;
+ struct loop *loop = exit->dest->loop_father;
+ struct loop *orig_loop = entry->dest->loop_father;
+ basic_block switch_bb, entry_bb, nentry_bb;
+ VEC (basic_block, heap) *doms;
+ int total_freq = 0, exit_freq = 0;
+ gcov_type total_count = 0, exit_count = 0;
+ edge exits[2], nexits[2], e;
+ block_stmt_iterator bsi;
+ tree cond;
+ edge sorig, snew;
+
+ gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
+ exits[0] = exit;
+ exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
+
+ if (!can_copy_bbs_p (region, n_region))
+ return false;
+
+ /* Some sanity checking. Note that we do not check for all possible
+ missuses of the functions. I.e. if you ask to copy something weird
+ (e.g., in the example, if there is a jump from inside to the middle
+ of some_code, or come_code defines some of the values used in cond)
+ it will work, but the resulting code will not be correct. */
+ for (i = 0; i < n_region; i++)
+ {
+ /* We do not handle subloops, i.e. all the blocks must belong to the
+ same loop. */
+ if (region[i]->loop_father != orig_loop)
+ return false;
+
+ if (region[i] == orig_loop->latch)
+ return false;
+ }
+
+ initialize_original_copy_tables ();
+ set_loop_copy (orig_loop, loop);
+
+ if (!region_copy)
+ {
+ region_copy = XNEWVEC (basic_block, n_region);
+ free_region_copy = true;
+ }
+
+ gcc_assert (!need_ssa_update_p ());
+
+ /* Record blocks outside the region that are dominated by something
+ inside. */
+ doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
+
+ if (exit->src->count)
+ {
+ total_count = exit->src->count;
+ exit_count = exit->count;
+ /* Fix up corner cases, to avoid division by zero or creation of negative
+ frequencies. */
+ if (exit_count > total_count)
+ exit_count = total_count;
+ }
+ else
+ {
+ total_freq = exit->src->frequency;
+ exit_freq = EDGE_FREQUENCY (exit);
+ /* Fix up corner cases, to avoid division by zero or creation of negative
+ frequencies. */
+ if (total_freq == 0)
+ total_freq = 1;
+ if (exit_freq > total_freq)
+ exit_freq = total_freq;
+ }
+
+ copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
+ split_edge_bb_loc (exit));
+ if (total_count)
+ {
+ scale_bbs_frequencies_gcov_type (region, n_region,
+ total_count - exit_count,
+ total_count);
+ scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
+ total_count);
+ }
+ else
+ {
+ scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
+ total_freq);
+ scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
+ }
+
+ /* Create the switch block, and put the exit condition to it. */
+ entry_bb = entry->dest;
+ nentry_bb = get_bb_copy (entry_bb);
+ if (!last_stmt (entry->src)
+ || !stmt_ends_bb_p (last_stmt (entry->src)))
+ switch_bb = entry->src;
+ else
+ switch_bb = split_edge (entry);
+ set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
+
+ bsi = bsi_last (switch_bb);
+ cond = last_stmt (exit->src);
+ gcc_assert (TREE_CODE (cond) == COND_EXPR);
+ bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
+
+ sorig = single_succ_edge (switch_bb);
+ sorig->flags = exits[1]->flags;
+ snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
+
+ /* Register the new edge from SWITCH_BB in loop exit lists. */
+ rescan_loop_exit (snew, true, false);
+
+ /* Add the PHI node arguments. */
+ add_phi_args_after_copy (region_copy, n_region, snew);
+
+ /* Get rid of now superfluous conditions and associated edges (and phi node
+ arguments). */
+ e = redirect_edge_and_branch (exits[0], exits[1]->dest);
+ PENDING_STMT (e) = NULL_TREE;
+ e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
+ PENDING_STMT (e) = NULL_TREE;
+
+ /* Anything that is outside of the region, but was dominated by something
+ inside needs to update dominance info. */
+ iterate_fix_dominators (CDI_DOMINATORS, doms, false);
+ VEC_free (basic_block, heap, doms);
/* Update the SSA web. */
update_ssa (TODO_update_ssa);
}
}
+/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
+ The duplicates are recorded in VARS_MAP. */
+
+static void
+replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
+ tree to_context)
+{
+ tree t = *tp, new_t;
+ struct function *f = DECL_STRUCT_FUNCTION (to_context);
+ void **loc;
+
+ if (DECL_CONTEXT (t) == to_context)
+ return;
+
+ loc = pointer_map_contains (vars_map, t);
+
+ if (!loc)
+ {
+ loc = pointer_map_insert (vars_map, t);
+
+ if (SSA_VAR_P (t))
+ {
+ new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
+ f->unexpanded_var_list
+ = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
+ }
+ else
+ {
+ gcc_assert (TREE_CODE (t) == CONST_DECL);
+ new_t = copy_node (t);
+ }
+ DECL_CONTEXT (new_t) = to_context;
+
+ *loc = new_t;
+ }
+ else
+ new_t = *loc;
+
+ *tp = new_t;
+}
+
+/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
+ VARS_MAP maps old ssa names and var_decls to the new ones. */
+
+static tree
+replace_ssa_name (tree name, struct pointer_map_t *vars_map,
+ tree to_context)
+{
+ void **loc;
+ tree new_name, decl = SSA_NAME_VAR (name);
+
+ gcc_assert (is_gimple_reg (name));
+
+ loc = pointer_map_contains (vars_map, name);
+
+ if (!loc)
+ {
+ replace_by_duplicate_decl (&decl, vars_map, to_context);
+
+ push_cfun (DECL_STRUCT_FUNCTION (to_context));
+ if (gimple_in_ssa_p (cfun))
+ add_referenced_var (decl);
+
+ new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
+ if (SSA_NAME_IS_DEFAULT_DEF (name))
+ set_default_def (decl, new_name);
+ pop_cfun ();
+
+ loc = pointer_map_insert (vars_map, name);
+ *loc = new_name;
+ }
+ else
+ new_name = *loc;
+
+ return new_name;
+}
struct move_stmt_d
{
tree block;
tree from_context;
tree to_context;
- bitmap vars_to_remove;
+ struct pointer_map_t *vars_map;
htab_t new_label_map;
bool remap_decls_p;
};
p->remap_decls_p = save_remap_decls_p;
}
- else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
+ else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
{
- if (TREE_CODE (t) == LABEL_DECL)
+ if (TREE_CODE (t) == SSA_NAME)
+ *tp = replace_ssa_name (t, p->vars_map, p->to_context);
+ else if (TREE_CODE (t) == LABEL_DECL)
{
if (p->new_label_map)
{
}
else if (p->remap_decls_p)
{
- DECL_CONTEXT (t) = p->to_context;
-
- if (TREE_CODE (t) == VAR_DECL)
+ /* Replace T with its duplicate. T should no longer appear in the
+ parent function, so this looks wasteful; however, it may appear
+ in referenced_vars, and more importantly, as virtual operands of
+ statements, and in alias lists of other variables. It would be
+ quite difficult to expunge it from all those places. ??? It might
+ suffice to do this for addressable variables. */
+ if ((TREE_CODE (t) == VAR_DECL
+ && !is_global_var (t))
+ || TREE_CODE (t) == CONST_DECL)
+ replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
+
+ if (SSA_VAR_P (t)
+ && gimple_in_ssa_p (cfun))
{
- struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
- f->unexpanded_var_list
- = tree_cons (0, t, f->unexpanded_var_list);
-
- /* Mark T to be removed from the original function,
- otherwise it will be given a DECL_RTL when the
- original function is expanded. */
- bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
+ push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
+ add_referenced_var (*tp);
+ pop_cfun ();
}
}
+ *walk_subtrees = 0;
}
else if (TYPE_P (t))
*walk_subtrees = 0;
return NULL_TREE;
}
+/* Marks virtual operands of all statements in basic blocks BBS for
+ renaming. */
+
+static void
+mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
+{
+ tree phi;
+ block_stmt_iterator bsi;
+ basic_block bb;
+ unsigned i;
+
+ for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
+ {
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ mark_virtual_ops_for_renaming (phi);
+
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ mark_virtual_ops_for_renaming (bsi_stmt (bsi));
+ }
+}
/* Move basic block BB from function CFUN to function DEST_FN. The
block is moved out of the original linked list and placed after
If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
updated to reflect the moved edges.
- On exit, local variables that need to be removed from
- CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
+ The local variables are remapped to new instances, VARS_MAP is used
+ to record the mapping. */
static void
move_block_to_fn (struct function *dest_cfun, basic_block bb,
basic_block after, bool update_edge_count_p,
- bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
+ struct pointer_map_t *vars_map, htab_t new_label_map,
+ int eh_offset)
{
struct control_flow_graph *cfg;
edge_iterator ei;
block_stmt_iterator si;
struct move_stmt_d d;
unsigned old_len, new_len;
+ tree phi, next_phi;
/* Remove BB from dominance structures. */
delete_from_dominance_info (CDI_DOMINATORS, bb);
+ if (current_loops)
+ remove_bb_from_loops (bb);
/* Link BB to the new linked list. */
move_block_after (bb, after);
VEC_replace (basic_block, cfg->x_basic_block_info,
bb->index, bb);
+ /* Remap the variables in phi nodes. */
+ for (phi = phi_nodes (bb); phi; phi = next_phi)
+ {
+ use_operand_p use;
+ tree op = PHI_RESULT (phi);
+ ssa_op_iter oi;
+
+ next_phi = PHI_CHAIN (phi);
+ if (!is_gimple_reg (op))
+ {
+ /* Remove the phi nodes for virtual operands (alias analysis will be
+ run for the new function, anyway). */
+ remove_phi_node (phi, NULL, true);
+ continue;
+ }
+
+ SET_PHI_RESULT (phi, replace_ssa_name (op, vars_map, dest_cfun->decl));
+ FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
+ {
+ op = USE_FROM_PTR (use);
+ if (TREE_CODE (op) == SSA_NAME)
+ SET_USE (use, replace_ssa_name (op, vars_map, dest_cfun->decl));
+ }
+ }
+
/* The statements in BB need to be associated with a new TREE_BLOCK.
Labels need to be associated with a new label-to-block map. */
memset (&d, 0, sizeof (d));
- d.vars_to_remove = vars_to_remove;
+ d.vars_map = vars_map;
+ d.from_context = cfun->decl;
+ d.to_context = dest_cfun->decl;
+ d.new_label_map = new_label_map;
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
tree stmt = bsi_stmt (si);
int region;
- d.from_context = cfun->decl;
- d.to_context = dest_cfun->decl;
d.remap_decls_p = true;
- d.new_label_map = new_label_map;
if (TREE_BLOCK (stmt))
d.block = DECL_INITIAL (dest_cfun->decl);
gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
gimple_remove_stmt_histograms (cfun, stmt);
}
+
+ /* We cannot leave any operands allocated from the operand caches of
+ the current function. */
+ free_stmt_operands (stmt);
+ push_cfun (dest_cfun);
+ update_stmt (stmt);
+ pop_cfun ();
}
}
move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
basic_block exit_bb)
{
- VEC(basic_block,heap) *bbs;
- basic_block after, bb, *entry_pred, *exit_succ;
- struct function *saved_cfun;
+ VEC(basic_block,heap) *bbs, *dom_bbs;
+ basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
+ basic_block after, bb, *entry_pred, *exit_succ, abb;
+ struct function *saved_cfun = cfun;
int *entry_flag, *exit_flag, eh_offset;
+ unsigned *entry_prob, *exit_prob;
unsigned i, num_entry_edges, num_exit_edges;
edge e;
edge_iterator ei;
- bitmap vars_to_remove;
htab_t new_label_map;
-
- saved_cfun = cfun;
-
- /* Collect all the blocks in the region. Manually add ENTRY_BB
- because it won't be added by dfs_enumerate_from. */
- calculate_dominance_info (CDI_DOMINATORS);
+ struct pointer_map_t *vars_map;
+ struct loop *loop = entry_bb->loop_father;
/* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
region. */
&& (!exit_bb
|| dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
+ /* Collect all the blocks in the region. Manually add ENTRY_BB
+ because it won't be added by dfs_enumerate_from. */
bbs = NULL;
VEC_safe_push (basic_block, heap, bbs, entry_bb);
gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
+ /* The blocks that used to be dominated by something in BBS will now be
+ dominated by the new block. */
+ dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
+ VEC_address (basic_block, bbs),
+ VEC_length (basic_block, bbs));
+
/* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
the predecessor edges to ENTRY_BB and the successor edges to
EXIT_BB so that we can re-attach them to the new basic block that
num_entry_edges = EDGE_COUNT (entry_bb->preds);
entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
+ entry_prob = XNEWVEC (unsigned, num_entry_edges);
i = 0;
for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
{
+ entry_prob[i] = e->probability;
entry_flag[i] = e->flags;
entry_pred[i++] = e->src;
remove_edge (e);
exit_succ = (basic_block *) xcalloc (num_exit_edges,
sizeof (basic_block));
exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
+ exit_prob = XNEWVEC (unsigned, num_exit_edges);
i = 0;
for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
{
+ exit_prob[i] = e->probability;
exit_flag[i] = e->flags;
exit_succ[i++] = e->dest;
remove_edge (e);
num_exit_edges = 0;
exit_succ = NULL;
exit_flag = NULL;
+ exit_prob = NULL;
}
/* Switch context to the child function to initialize DEST_FN's CFG. */
gcc_assert (dest_cfun->cfg == NULL);
- cfun = dest_cfun;
+ push_cfun (dest_cfun);
init_empty_tree_cfg ();
}
}
- cfun = saved_cfun;
+ pop_cfun ();
+
+ /* The ssa form for virtual operands in the source function will have to
+ be repaired. We do not care for the real operands -- the sese region
+ must be closed with respect to those. */
+ mark_virtual_ops_in_region (bbs);
/* Move blocks from BBS into DEST_CFUN. */
gcc_assert (VEC_length (basic_block, bbs) >= 2);
after = dest_cfun->cfg->x_entry_block_ptr;
- vars_to_remove = BITMAP_ALLOC (NULL);
+ vars_map = pointer_map_create ();
for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
{
/* No need to update edge counts on the last block. It has
already been updated earlier when we detached the region from
the original CFG. */
- move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
+ move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
new_label_map, eh_offset);
after = bb;
}
if (new_label_map)
htab_delete (new_label_map);
-
- /* Remove the variables marked in VARS_TO_REMOVE from
- CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
- DECL_RTL in the context of CFUN. */
- if (!bitmap_empty_p (vars_to_remove))
- {
- tree *p;
-
- for (p = &cfun->unexpanded_var_list; *p; )
- {
- tree var = TREE_VALUE (*p);
- if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
- {
- *p = TREE_CHAIN (*p);
- continue;
- }
-
- p = &TREE_CHAIN (*p);
- }
- }
-
- BITMAP_FREE (vars_to_remove);
+ pointer_map_destroy (vars_map);
/* Rewire the entry and exit blocks. The successor to the entry
block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
FIXME, this is silly. The CFG ought to become a parameter to
these helpers. */
- cfun = dest_cfun;
+ push_cfun (dest_cfun);
make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
if (exit_bb)
make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
- cfun = saved_cfun;
+ pop_cfun ();
/* Back in the original function, the SESE region has disappeared,
create a new basic block in its place. */
bb = create_empty_bb (entry_pred[0]);
+ if (current_loops)
+ add_bb_to_loop (bb, loop);
for (i = 0; i < num_entry_edges; i++)
- make_edge (entry_pred[i], bb, entry_flag[i]);
+ {
+ e = make_edge (entry_pred[i], bb, entry_flag[i]);
+ e->probability = entry_prob[i];
+ }
for (i = 0; i < num_exit_edges; i++)
- make_edge (bb, exit_succ[i], exit_flag[i]);
+ {
+ e = make_edge (bb, exit_succ[i], exit_flag[i]);
+ e->probability = exit_prob[i];
+ }
+
+ set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
+ for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
+ set_immediate_dominator (CDI_DOMINATORS, abb, bb);
+ VEC_free (basic_block, heap, dom_bbs);
if (exit_bb)
{
+ free (exit_prob);
free (exit_flag);
free (exit_succ);
}
+ free (entry_prob);
free (entry_flag);
free (entry_pred);
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
VEC_free (basic_block, heap, bbs);
return bb;
bool ignore_topmost_bind = false, any_var = false;
basic_block bb;
tree chain;
- struct function *saved_cfun;
fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
}
/* Switch CFUN to point to FN. */
- saved_cfun = cfun;
- cfun = DECL_STRUCT_FUNCTION (fn);
+ push_cfun (DECL_STRUCT_FUNCTION (fn));
/* When GIMPLE is lowered, the variables are no longer available in
BIND_EXPRs, so display them separately. */
fprintf (file, "\n\n");
/* Restore CFUN. */
- cfun = saved_cfun;
+ pop_cfun ();
}
otherwise. */
static bool
-tree_block_ends_with_condjump_p (basic_block bb)
+tree_block_ends_with_condjump_p (const_basic_block bb)
{
- tree stmt = last_stmt (bb);
+ /* This CONST_CAST is okay because last_stmt doesn't modify its
+ argument and the return value is not modified. */
+ const_tree stmt = last_stmt (CONST_CAST_BB(bb));
return (stmt && TREE_CODE (stmt) == COND_EXPR);
}
}
bool
-tree_purge_all_dead_eh_edges (bitmap blocks)
+tree_purge_all_dead_eh_edges (const_bitmap blocks)
{
bool changed = false;
unsigned i;