/* Miscellaneous SSA utility functions.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
This file is part of GCC.
#include "tm.h"
#include "tree.h"
#include "flags.h"
-#include "rtl.h"
#include "tm_p.h"
#include "target.h"
#include "ggc.h"
#include "langhooks.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
-#include "expr.h"
#include "function.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "bitmap.h"
#include "pointer-set.h"
#include "tree-flow.h"
#include "gimple.h"
#include "tree-inline.h"
-#include "varray.h"
#include "timevar.h"
#include "hashtab.h"
#include "tree-dump.h"
#include "tree-pass.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
+#include "cfgloop.h"
/* Pointer map of variable mappings, keyed by edge. */
static struct pointer_map_t *edge_var_maps;
return NULL_TREE;
if (!is_gimple_reg (var))
- return NULL_TREE;
+ {
+ if (is_gimple_reg_type (TREE_TYPE (var))
+ && referenced_var_lookup (cfun, DECL_UID (var)) == NULL_TREE)
+ return var;
+ return NULL_TREE;
+ }
return var;
}
{
struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
- if (wi->is_lhs)
+ if (wi && wi->is_lhs)
return NULL_TREE;
if (TREE_CODE (*tp) == SSA_NAME)
if (!MAY_HAVE_DEBUG_STMTS)
return;
- /* First of all, check whether there are debug stmts that reference
- this variable and, if there are, decide whether we should use a
- debug temp. */
+ /* If this name has already been registered for replacement, do nothing
+ as anything that uses this name isn't in SSA form. */
+ if (name_registered_for_update_p (var))
+ return;
+
+ /* Check whether there are debug stmts that reference this variable and,
+ if there are, decide whether we should use a debug temp. */
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
{
stmt = USE_STMT (use_p);
/* If we didn't get an insertion point, and the stmt has already
been removed, we won't be able to insert the debug bind stmt, so
we'll have to drop debug information. */
- if (is_gimple_assign (def_stmt))
+ if (gimple_code (def_stmt) == GIMPLE_PHI)
+ {
+ value = degenerate_phi_result (def_stmt);
+ if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
+ value = NULL;
+ /* error_mark_node is what fixup_noreturn_call changes PHI arguments
+ to. */
+ else if (value == error_mark_node)
+ value = NULL;
+ }
+ else if (is_gimple_assign (def_stmt))
{
bool no_value = false;
dead SSA NAMEs. SSA verification shall catch any
errors. */
if ((!gsi && !gimple_bb (def_stmt))
- || !walk_gimple_op (def_stmt, find_released_ssa_name,
- &wi))
+ || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
no_value = true;
}
at the expense of duplication of expressions. */
if (CONSTANT_CLASS_P (value)
+ || gimple_code (def_stmt) == GIMPLE_PHI
|| (usecount == 1
&& (!gimple_assign_single_p (def_stmt)
|| is_gimple_min_invariant (value)))
continue;
if (value)
- FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
- /* unshare_expr is not needed here. vexpr is either a
- SINGLE_RHS, that can be safely shared, some other RHS
- that was unshared when we found it had a single debug
- use, or a DEBUG_EXPR_DECL, that can be safely
- shared. */
- SET_USE (use_p, value);
+ {
+ FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
+ /* unshare_expr is not needed here. vexpr is either a
+ SINGLE_RHS, that can be safely shared, some other RHS
+ that was unshared when we found it had a single debug
+ use, or a DEBUG_EXPR_DECL, that can be safely
+ shared. */
+ SET_USE (use_p, value);
+ /* If we didn't replace uses with a debug decl fold the
+ resulting expression. Otherwise we end up with invalid IL. */
+ if (TREE_CODE (value) != DEBUG_EXPR_DECL)
+ {
+ gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
+ fold_stmt_inplace (&gsi);
+ }
+ }
else
gimple_debug_bind_reset_value (stmt);
stmt = gsi_stmt (*gsi);
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_DEF)
+ FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
{
tree var = DEF_FROM_PTR (def_p);
}
}
+/* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
+
+void
+reset_debug_uses (gimple stmt)
+{
+ ssa_op_iter op_iter;
+ def_operand_p def_p;
+ imm_use_iterator imm_iter;
+ gimple use_stmt;
+
+ if (!MAY_HAVE_DEBUG_STMTS)
+ return;
+
+ FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
+ {
+ tree var = DEF_FROM_PTR (def_p);
+
+ if (TREE_CODE (var) != SSA_NAME)
+ continue;
+
+ FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
+ {
+ if (!gimple_debug_bind_p (use_stmt))
+ continue;
+
+ gimple_debug_bind_reset_value (use_stmt);
+ update_stmt (use_stmt);
+ }
+ }
+}
+
/* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
dominated stmts before their dominators, so that release_ssa_defs
stands a chance of propagating DEFs into debug bind stmts. */
if (verify_ssa_name (ssa_name, is_virtual))
goto err;
+ if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
+ && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
+ {
+ error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
+ goto err;
+ }
+
if (definition_block[SSA_NAME_VERSION (ssa_name)])
{
error ("SSA_NAME created in two different blocks %i and %i",
err = true;
}
- /* Make sure the use is in an appropriate list by checking the previous
+ /* Make sure the use is in an appropriate list by checking the previous
element to make sure it's the same. */
if (use_p->prev == NULL)
{
/* Verify common invariants in the SSA web.
TODO: verify the variable annotations. */
-void
+DEBUG_FUNCTION void
verify_ssa (bool check_modified_stmt)
{
size_t i;
gcc_assert (!need_ssa_update_p (cfun));
- verify_stmts ();
-
timevar_push (TV_TREE_SSA_VERIFY);
/* Keep track of SSA names present in the IL. */
gimple stmt;
TREE_VISITED (name) = 0;
+ verify_ssa_name (name, !is_gimple_reg (name));
+
stmt = SSA_NAME_DEF_STMT (name);
if (!gimple_nop_p (stmt))
{
{
gimple stmt = gsi_stmt (gsi);
use_operand_p use_p;
- bool has_err;
if (check_modified_stmt && gimple_modified_p (stmt))
{
goto err;
}
- if (is_gimple_assign (stmt)
- && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
+ if (verify_ssa_operands (stmt))
{
- tree lhs, base_address;
-
- lhs = gimple_assign_lhs (stmt);
- base_address = get_base_address (lhs);
-
- if (base_address
- && SSA_VAR_P (base_address)
- && !gimple_vdef (stmt)
- && optimize > 0)
- {
- error ("statement makes a memory store, but has no VDEFS");
- print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
- goto err;
- }
- }
- else if (gimple_debug_bind_p (stmt)
- && !gimple_debug_bind_has_value_p (stmt))
- continue;
-
- /* Verify the single virtual operand and its constraints. */
- has_err = false;
- if (gimple_vdef (stmt))
- {
- if (gimple_vdef_op (stmt) == NULL_DEF_OPERAND_P)
- {
- error ("statement has VDEF operand not in defs list");
- has_err = true;
- }
- if (!gimple_vuse (stmt))
- {
- error ("statement has VDEF but no VUSE operand");
- has_err = true;
- }
- else if (SSA_NAME_VAR (gimple_vdef (stmt))
- != SSA_NAME_VAR (gimple_vuse (stmt)))
- {
- error ("VDEF and VUSE do not use the same symbol");
- has_err = true;
- }
- has_err |= verify_ssa_name (gimple_vdef (stmt), true);
- }
- if (gimple_vuse (stmt))
- {
- if (gimple_vuse_op (stmt) == NULL_USE_OPERAND_P)
- {
- error ("statement has VUSE operand not in uses list");
- has_err = true;
- }
- has_err |= verify_ssa_name (gimple_vuse (stmt), true);
- }
- if (has_err)
- {
- error ("in statement");
- print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
+ print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
goto err;
}
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
- {
- if (verify_ssa_name (op, false))
- {
- error ("in statement");
- print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
- goto err;
- }
- }
+ if (gimple_debug_bind_p (stmt)
+ && !gimple_debug_bind_has_value_p (stmt))
+ continue;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
{
free_dominance_info (CDI_DOMINATORS);
else
set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
-
+
BITMAP_FREE (names_defined_in_bb);
timevar_pop (TV_TREE_SSA_VERIFY);
return;
void
init_tree_ssa (struct function *fn)
{
- fn->gimple_df = GGC_CNEW (struct gimple_df);
- fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
+ fn->gimple_df = ggc_alloc_cleared_gimple_df ();
+ fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
uid_decl_map_eq, NULL);
- fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
+ fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
uid_ssaname_map_eq, NULL);
pt_solution_reset (&fn->gimple_df->escaped);
- pt_solution_reset (&fn->gimple_df->callused);
init_ssanames (fn, 0);
- init_phinodes ();
}
+/* Do the actions required to initialize internal data structures used
+ in tree-ssa optimization passes. */
+
+static unsigned int
+execute_init_datastructures (void)
+{
+ /* Allocate hash tables, arrays and other structures. */
+ init_tree_ssa (cfun);
+ return 0;
+}
+
+struct gimple_opt_pass pass_init_datastructures =
+{
+ {
+ GIMPLE_PASS,
+ "*init_datastructures", /* name */
+ NULL, /* gate */
+ execute_init_datastructures, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0 /* todo_flags_finish */
+ }
+};
/* Deallocate memory associated with SSA data structures for FNDECL. */
tree var;
/* Remove annotations from every referenced local variable. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
+ FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
{
if (is_global_var (var))
continue;
- if (var->base.ann)
- ggc_free (var->base.ann);
- var->base.ann = NULL;
+ if (var_ann (var))
+ {
+ ggc_free (var_ann (var));
+ *DECL_VAR_ANN_PTR (var) = NULL;
+ }
}
htab_delete (gimple_referenced_vars (cfun));
cfun->gimple_df->referenced_vars = NULL;
fini_ssanames ();
- fini_phinodes ();
/* We no longer maintain the SSA operand cache at this point. */
if (ssa_operands_active ())
fini_ssa_operands ();
- delete_alias_heapvars ();
-
htab_delete (cfun->gimple_df->default_defs);
cfun->gimple_df->default_defs = NULL;
pt_solution_reset (&cfun->gimple_df->escaped);
- pt_solution_reset (&cfun->gimple_df->callused);
if (cfun->gimple_df->decls_to_pointers != NULL)
pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
cfun->gimple_df->decls_to_pointers = NULL;
if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
!= TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
return false;
-
- /* If the outer type is (void *) or a pointer to an incomplete
- record type or a pointer to an unprototyped function,
- then the conversion is not necessary. */
- if (VOID_TYPE_P (TREE_TYPE (outer_type))
- || (AGGREGATE_TYPE_P (TREE_TYPE (outer_type))
- && TREE_CODE (TREE_TYPE (outer_type)) != ARRAY_TYPE
- && (TREE_CODE (TREE_TYPE (outer_type))
- == TREE_CODE (TREE_TYPE (inner_type)))
- && !COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
- || ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
- || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
- && (TREE_CODE (TREE_TYPE (outer_type))
- == TREE_CODE (TREE_TYPE (inner_type)))
- && !TYPE_ARG_TYPES (TREE_TYPE (outer_type))
- && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type)),
- TREE_TYPE (TREE_TYPE (inner_type)))))
- return true;
-
- /* Do not lose casts to restrict qualified pointers. */
- if ((TYPE_RESTRICT (outer_type)
- != TYPE_RESTRICT (inner_type))
- && TYPE_RESTRICT (outer_type))
- return false;
}
/* From now on qualifiers on value types do not matter. */
|| TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
return false;
+ /* Preserve conversions to/from BOOLEAN_TYPE if types are not
+ of precision one. */
+ if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
+ != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
+ && TYPE_PRECISION (outer_type) != 1)
+ return false;
+
/* We don't need to preserve changes in the types minimum or
maximum value in general as these do not generate code
unless the types precisions are different. */
else if (POINTER_TYPE_P (inner_type)
&& POINTER_TYPE_P (outer_type))
{
- /* Don't lose casts between pointers to volatile and non-volatile
- qualified types. Doing so would result in changing the semantics
- of later accesses. For function types the volatile qualifier
- is used to indicate noreturn functions. */
- if (TREE_CODE (TREE_TYPE (outer_type)) != FUNCTION_TYPE
- && TREE_CODE (TREE_TYPE (outer_type)) != METHOD_TYPE
- && TREE_CODE (TREE_TYPE (inner_type)) != FUNCTION_TYPE
- && TREE_CODE (TREE_TYPE (inner_type)) != METHOD_TYPE
- && (TYPE_VOLATILE (TREE_TYPE (outer_type))
- != TYPE_VOLATILE (TREE_TYPE (inner_type)))
- && TYPE_VOLATILE (TREE_TYPE (outer_type)))
- return false;
-
- /* We require explicit conversions from incomplete target types. */
- if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type))
- && COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
- return false;
-
- /* Do not lose casts between pointers that when dereferenced access
- memory with different alias sets. */
- if (get_deref_alias_set (inner_type) != get_deref_alias_set (outer_type))
+ /* Do not lose casts to function pointer types. */
+ if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
+ || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
+ && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
+ || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
return false;
/* We do not care for const qualification of the pointed-to types
as const qualification has no semantic value to the middle-end. */
- /* Otherwise pointers/references are equivalent if their pointed
- to types are effectively the same. We can strip qualifiers
- on pointed-to types for further comparison, which is done in
- the callee. Note we have to use true compatibility here
- because addresses are subject to propagation into dereferences
- and thus might get the original type exposed which is equivalent
- to a reverse conversion. */
- return types_compatible_p (TREE_TYPE (outer_type),
- TREE_TYPE (inner_type));
+ /* Otherwise pointers/references are equivalent. */
+ return true;
}
/* Recurse for complex types. */
return false;
/* A conversion to an unprototyped argument list is ok. */
- if (!TYPE_ARG_TYPES (outer_type))
+ if (!prototype_p (outer_type))
return true;
/* If the unqualified argument types are compatible the conversion
/* Defer to the target if necessary. */
if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
- return targetm.comp_type_attributes (outer_type, inner_type) != 0;
+ return comp_type_attributes (outer_type, inner_type) != 0;
return true;
}
else if (AGGREGATE_TYPE_P (inner_type)
&& TREE_CODE (inner_type) == TREE_CODE (outer_type))
return false;
-
+
return false;
}
/* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
described in walk_use_def_chains.
-
+
VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
infinite loops. We used to have a bitmap for this to just mark
SSA versions we had visited. But non-sparse bitmaps are way too
if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
return true;
}
-
+
return false;
}
-
+
/* Walk use-def chains starting at the SSA variable VAR. Call
arguments: VAR, its defining statement (DEF_STMT) and a generic
pointer to whatever state information that FN may want to maintain
(DATA). FN is able to stop the walk by returning true, otherwise
- in order to continue the walk, FN should return false.
+ in order to continue the walk, FN should return false.
Note, that if DEF_STMT is a PHI node, the semantics are slightly
different. The first argument to FN is no longer the original
}
\f
-/* Return true if T, an SSA_NAME, has an undefined value. */
-
-bool
-ssa_undefined_value_p (tree t)
-{
- tree var = SSA_NAME_VAR (t);
-
- /* Parameters get their initial value from the function entry. */
- if (TREE_CODE (var) == PARM_DECL)
- return false;
-
- /* Hard register variables get their initial value from the ether. */
- if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
- return false;
-
- /* The value is undefined iff its definition statement is empty. */
- return gimple_nop_p (SSA_NAME_DEF_STMT (t));
-}
-
/* Emit warnings for uninitialized variables. This is done in two passes.
The first pass notices real uses of SSA names with undefined values.
again for plain uninitialized variables, since optimization may have
changed conditionally uninitialized to unconditionally uninitialized. */
-/* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
- warning text is in MSGID and LOCUS may contain a location or be null. */
+/* Emit a warning for EXPR based on variable VAR at the point in the
+ program T, an SSA_NAME, is used being uninitialized. The exact
+ warning text is in MSGID and LOCUS may contain a location or be null.
+ WC is the warning code. */
-static void
-warn_uninit (tree t, const char *gmsgid, void *data)
+void
+warn_uninit (enum opt_code wc, tree t,
+ tree expr, tree var, const char *gmsgid, void *data)
{
- tree var = SSA_NAME_VAR (t);
gimple context = (gimple) data;
- location_t location;
+ location_t location, cfun_loc;
expanded_location xloc, floc;
if (!ssa_undefined_value_p (t))
/* TREE_NO_WARNING either means we already warned, or the front end
wishes to suppress the warning. */
- if (TREE_NO_WARNING (var))
+ if ((context
+ && (gimple_no_warning_p (context)
+ || (gimple_assign_single_p (context)
+ && TREE_NO_WARNING (gimple_assign_rhs1 (context)))))
+ || TREE_NO_WARNING (expr))
return;
- /* Do not warn if it can be initialized outside this module. */
- if (is_global_var (var))
- return;
-
location = (context != NULL && gimple_has_location (context))
? gimple_location (context)
: DECL_SOURCE_LOCATION (var);
+ location = linemap_resolve_location (line_table, location,
+ LRK_SPELLING_LOCATION,
+ NULL);
+ cfun_loc = DECL_SOURCE_LOCATION (cfun->decl);
xloc = expand_location (location);
- floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
- if (warning_at (location, OPT_Wuninitialized, gmsgid, var))
+ floc = expand_location (cfun_loc);
+ if (warning_at (location, wc, gmsgid, expr))
{
- TREE_NO_WARNING (var) = 1;
+ TREE_NO_WARNING (expr) = 1;
+ if (location == DECL_SOURCE_LOCATION (var))
+ return;
if (xloc.file != floc.file
- || xloc.line < floc.line
- || xloc.line > LOCATION_LINE (cfun->function_end_locus))
+ || linemap_location_before_p (line_table,
+ location, cfun_loc)
+ || linemap_location_before_p (line_table,
+ cfun->function_end_locus,
+ location))
inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
}
}
-struct walk_data {
- gimple stmt;
- bool always_executed;
- bool warn_possibly_uninitialized;
-};
-
-/* Called via walk_tree, look for SSA_NAMEs that have empty definitions
- and warn about them. */
-
-static tree
-warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
-{
- struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
- struct walk_data *data = (struct walk_data *) wi->info;
- tree t = *tp;
-
- /* We do not care about LHS. */
- if (wi->is_lhs)
- {
- /* Except for operands of INDIRECT_REF. */
- if (!INDIRECT_REF_P (t))
- return NULL_TREE;
- t = TREE_OPERAND (t, 0);
- }
-
- switch (TREE_CODE (t))
- {
- case ADDR_EXPR:
- /* Taking the address of an uninitialized variable does not
- count as using it. */
- *walk_subtrees = 0;
- break;
-
- case VAR_DECL:
- {
- /* A VAR_DECL in the RHS of a gimple statement may mean that
- this variable is loaded from memory. */
- use_operand_p vuse;
- tree op;
-
- /* If there is not gimple stmt,
- or alias information has not been computed,
- then we cannot check VUSE ops. */
- if (data->stmt == NULL)
- return NULL_TREE;
-
- /* If the load happens as part of a call do not warn about it. */
- if (is_gimple_call (data->stmt))
- return NULL_TREE;
-
- vuse = gimple_vuse_op (data->stmt);
- if (vuse == NULL_USE_OPERAND_P)
- return NULL_TREE;
-
- op = USE_FROM_PTR (vuse);
- if (t != SSA_NAME_VAR (op)
- || !SSA_NAME_IS_DEFAULT_DEF (op))
- return NULL_TREE;
- /* If this is a VUSE of t and it is the default definition,
- then warn about op. */
- t = op;
- /* Fall through into SSA_NAME. */
- }
-
- case SSA_NAME:
- /* We only do data flow with SSA_NAMEs, so that's all we
- can warn about. */
- if (data->always_executed)
- warn_uninit (t, "%qD is used uninitialized in this function",
- data->stmt);
- else if (data->warn_possibly_uninitialized)
- warn_uninit (t, "%qD may be used uninitialized in this function",
- data->stmt);
- *walk_subtrees = 0;
- break;
-
- case REALPART_EXPR:
- case IMAGPART_EXPR:
- /* The total store transformation performed during gimplification
- creates uninitialized variable uses. If all is well, these will
- be optimized away, so don't warn now. */
- if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
- *walk_subtrees = 0;
- break;
-
- default:
- if (IS_TYPE_OR_DECL_P (t))
- *walk_subtrees = 0;
- break;
- }
-
- return NULL_TREE;
-}
-
-/* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
- and warn about them. */
-
-static void
-warn_uninitialized_phi (gimple phi)
-{
- size_t i, n = gimple_phi_num_args (phi);
-
- /* Don't look at memory tags. */
- if (!is_gimple_reg (gimple_phi_result (phi)))
- return;
-
- for (i = 0; i < n; ++i)
- {
- tree op = gimple_phi_arg_def (phi, i);
- if (TREE_CODE (op) == SSA_NAME)
- warn_uninit (op, "%qD may be used uninitialized in this function",
- NULL);
- }
-}
-
-static unsigned int
+unsigned int
warn_uninitialized_vars (bool warn_possibly_uninitialized)
{
gimple_stmt_iterator gsi;
basic_block bb;
- struct walk_data data;
-
- data.warn_possibly_uninitialized = warn_possibly_uninitialized;
-
- calculate_dominance_info (CDI_POST_DOMINATORS);
FOR_EACH_BB (bb)
{
- data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
+ bool always_executed = dominated_by_p (CDI_POST_DOMINATORS,
single_succ (ENTRY_BLOCK_PTR), bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- struct walk_stmt_info wi;
- data.stmt = gsi_stmt (gsi);
- if (is_gimple_debug (data.stmt))
+ gimple stmt = gsi_stmt (gsi);
+ use_operand_p use_p;
+ ssa_op_iter op_iter;
+ tree use;
+
+ if (is_gimple_debug (stmt))
continue;
- memset (&wi, 0, sizeof (wi));
- wi.info = &data;
- walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
+
+ /* We only do data flow with SSA_NAMEs, so that's all we
+ can warn about. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, op_iter, SSA_OP_USE)
+ {
+ use = USE_FROM_PTR (use_p);
+ if (always_executed)
+ warn_uninit (OPT_Wuninitialized, use,
+ SSA_NAME_VAR (use), SSA_NAME_VAR (use),
+ "%qD is used uninitialized in this function",
+ stmt);
+ else if (warn_possibly_uninitialized)
+ warn_uninit (OPT_Wuninitialized, use,
+ SSA_NAME_VAR (use), SSA_NAME_VAR (use),
+ "%qD may be used uninitialized in this function",
+ stmt);
+ }
+
+ /* For memory the only cheap thing we can do is see if we
+ have a use of the default def of the virtual operand.
+ ??? Note that at -O0 we do not have virtual operands.
+ ??? Not so cheap would be to use the alias oracle via
+ walk_aliased_vdefs, if we don't find any aliasing vdef
+ warn as is-used-uninitialized, if we don't find an aliasing
+ vdef that kills our use (stmt_kills_ref_p), warn as
+ may-be-used-uninitialized. But this walk is quadratic and
+ so must be limited which means we would miss warning
+ opportunities. */
+ use = gimple_vuse (stmt);
+ if (use
+ && gimple_assign_single_p (stmt)
+ && !gimple_vdef (stmt)
+ && SSA_NAME_IS_DEFAULT_DEF (use))
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+ tree base = get_base_address (rhs);
+
+ /* Do not warn if it can be initialized outside this function. */
+ if (TREE_CODE (base) != VAR_DECL
+ || DECL_HARD_REGISTER (base)
+ || is_global_var (base))
+ continue;
+
+ if (always_executed)
+ warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
+ base,
+ "%qE is used uninitialized in this function",
+ stmt);
+ else if (warn_possibly_uninitialized)
+ warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
+ base,
+ "%qE may be used uninitialized in this function",
+ stmt);
+ }
}
}
- /* Post-dominator information can not be reliably updated. Free it
- after the use. */
-
- free_dominance_info (CDI_POST_DOMINATORS);
return 0;
}
as possible, thus don't do it here. However, without
optimization we need to warn here about "may be uninitialized".
*/
- warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
- return 0;
-}
-
-static unsigned int
-execute_late_warn_uninitialized (void)
-{
- basic_block bb;
- gimple_stmt_iterator gsi;
+ calculate_dominance_info (CDI_POST_DOMINATORS);
- /* Re-do the plain uninitialized variable check, as optimization may have
- straightened control flow. Do this first so that we don't accidentally
- get a "may be" warning when we'd have seen an "is" warning later. */
- warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
+ warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
- FOR_EACH_BB (bb)
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- warn_uninitialized_phi (gsi_stmt (gsi));
+ /* Post-dominator information can not be reliably updated. Free it
+ after the use. */
+ free_dominance_info (CDI_POST_DOMINATORS);
return 0;
}
{
{
GIMPLE_PASS,
- NULL, /* name */
+ "*early_warn_uninitialized", /* name */
gate_warn_uninitialized, /* gate */
execute_early_warn_uninitialized, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- TV_NONE, /* tv_id */
+ TV_TREE_UNINIT, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
}
};
-struct gimple_opt_pass pass_late_warn_uninitialized =
+
+/* If necessary, rewrite the base of the reference tree *TP from
+ a MEM_REF to a plain or converted symbol. */
+
+static void
+maybe_rewrite_mem_ref_base (tree *tp)
{
- {
- GIMPLE_PASS,
- NULL, /* name */
- gate_warn_uninitialized, /* gate */
- execute_late_warn_uninitialized, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_NONE, /* tv_id */
- PROP_ssa, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0 /* todo_flags_finish */
- }
-};
+ tree sym;
+
+ while (handled_component_p (*tp))
+ tp = &TREE_OPERAND (*tp, 0);
+ if (TREE_CODE (*tp) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
+ && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
+ && DECL_P (sym)
+ && !TREE_ADDRESSABLE (sym)
+ && symbol_marked_for_renaming (sym))
+ {
+ if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
+ && useless_type_conversion_p (TREE_TYPE (*tp),
+ TREE_TYPE (TREE_TYPE (sym)))
+ && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
+ TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
+ {
+ *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
+ TYPE_SIZE (TREE_TYPE (*tp)),
+ int_const_binop (MULT_EXPR,
+ bitsize_int (BITS_PER_UNIT),
+ TREE_OPERAND (*tp, 1)));
+ }
+ else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
+ && useless_type_conversion_p (TREE_TYPE (*tp),
+ TREE_TYPE (TREE_TYPE (sym))))
+ {
+ *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
+ ? REALPART_EXPR : IMAGPART_EXPR,
+ TREE_TYPE (*tp), sym);
+ }
+ else if (integer_zerop (TREE_OPERAND (*tp, 1)))
+ {
+ if (!useless_type_conversion_p (TREE_TYPE (*tp),
+ TREE_TYPE (sym)))
+ *tp = build1 (VIEW_CONVERT_EXPR,
+ TREE_TYPE (*tp), sym);
+ else
+ *tp = sym;
+ }
+ }
+}
+
+/* For a tree REF return its base if it is the base of a MEM_REF
+ that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
+
+static tree
+non_rewritable_mem_ref_base (tree ref)
+{
+ tree base = ref;
+
+ /* A plain decl does not need it set. */
+ if (DECL_P (ref))
+ return NULL_TREE;
+
+ while (handled_component_p (base))
+ base = TREE_OPERAND (base, 0);
+
+ /* But watch out for MEM_REFs we cannot lower to a
+ VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
+ if (TREE_CODE (base) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
+ {
+ tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
+ if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
+ || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
+ && useless_type_conversion_p (TREE_TYPE (base),
+ TREE_TYPE (TREE_TYPE (decl)))
+ && double_int_fits_in_uhwi_p (mem_ref_offset (base))
+ && double_int_ucmp
+ (tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
+ mem_ref_offset (base)) == 1
+ && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
+ TYPE_SIZE_UNIT (TREE_TYPE (base))))
+ return NULL_TREE;
+ if (DECL_P (decl)
+ && (!integer_zerop (TREE_OPERAND (base, 1))
+ || (DECL_SIZE (decl)
+ != TYPE_SIZE (TREE_TYPE (base)))
+ || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
+ return decl;
+ }
+
+ return NULL_TREE;
+}
+
+/* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
+ Otherwise return true. */
+
+static bool
+non_rewritable_lvalue_p (tree lhs)
+{
+ /* A plain decl is always rewritable. */
+ if (DECL_P (lhs))
+ return false;
+
+ /* A decl that is wrapped inside a MEM-REF that covers
+ it full is also rewritable.
+ ??? The following could be relaxed allowing component
+ references that do not change the access size. */
+ if (TREE_CODE (lhs) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
+ && integer_zerop (TREE_OPERAND (lhs, 1)))
+ {
+ tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
+ if (DECL_P (decl)
+ && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
+ && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
+ return false;
+ }
+
+ return true;
+}
+
+/* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
+ mark the variable VAR for conversion into SSA. Return true when updating
+ stmts is required. */
+
+static bool
+maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs)
+{
+ bool update_vops = false;
+
+ /* Global Variables, result decls cannot be changed. */
+ if (is_global_var (var)
+ || TREE_CODE (var) == RESULT_DECL
+ || bitmap_bit_p (addresses_taken, DECL_UID (var)))
+ return false;
+
+ /* If the variable is not in the list of referenced vars then we
+ do not need to touch it nor can we rename it. */
+ if (!referenced_var_lookup (cfun, DECL_UID (var)))
+ return false;
+
+ if (TREE_ADDRESSABLE (var)
+ /* Do not change TREE_ADDRESSABLE if we need to preserve var as
+ a non-register. Otherwise we are confused and forget to
+ add virtual operands for it. */
+ && (!is_gimple_reg_type (TREE_TYPE (var))
+ || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
+ || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
+ || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
+ {
+ TREE_ADDRESSABLE (var) = 0;
+ if (is_gimple_reg (var))
+ mark_sym_for_renaming (var);
+ update_vops = true;
+ if (dump_file)
+ {
+ fprintf (dump_file, "No longer having address taken: ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
+ }
+
+ if (!DECL_GIMPLE_REG_P (var)
+ && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
+ && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
+ && !TREE_THIS_VOLATILE (var)
+ && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
+ {
+ DECL_GIMPLE_REG_P (var) = 1;
+ mark_sym_for_renaming (var);
+ update_vops = true;
+ if (dump_file)
+ {
+ fprintf (dump_file, "Now a gimple register: ");
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
+ }
+
+ return update_vops;
+}
/* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
void
-execute_update_addresses_taken (bool do_optimize)
+execute_update_addresses_taken (void)
{
- tree var;
- referenced_var_iterator rvi;
gimple_stmt_iterator gsi;
basic_block bb;
bitmap addresses_taken = BITMAP_ALLOC (NULL);
bitmap not_reg_needs = BITMAP_ALLOC (NULL);
bool update_vops = false;
+ tree var;
+ unsigned i;
+
+ timevar_push (TV_ADDRESS_TAKEN);
/* Collect into ADDRESSES_TAKEN all variables whose address is taken within
the function body. */
{
gimple stmt = gsi_stmt (gsi);
enum gimple_code code = gimple_code (stmt);
+ tree decl;
/* Note all addresses taken by the stmt. */
gimple_ior_addresses_taken (addresses_taken, stmt);
if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
{
tree lhs = gimple_get_lhs (stmt);
-
- /* We may not rewrite TMR_SYMBOL to SSA. */
- if (lhs && TREE_CODE (lhs) == TARGET_MEM_REF
- && TMR_SYMBOL (lhs))
- bitmap_set_bit (not_reg_needs, DECL_UID (TMR_SYMBOL (lhs)));
-
- /* A plain decl does not need it set. */
- else if (lhs && handled_component_p (lhs))
- {
- var = get_base_address (lhs);
- if (DECL_P (var))
- bitmap_set_bit (not_reg_needs, DECL_UID (var));
+ if (lhs
+ && TREE_CODE (lhs) != SSA_NAME
+ && non_rewritable_lvalue_p (lhs))
+ {
+ decl = get_base_address (lhs);
+ if (DECL_P (decl))
+ bitmap_set_bit (not_reg_needs, DECL_UID (decl));
}
}
+
+ if (gimple_assign_single_p (stmt))
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+ if ((decl = non_rewritable_mem_ref_base (rhs)))
+ bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ }
+
+ else if (code == GIMPLE_CALL)
+ {
+ for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if ((decl = non_rewritable_mem_ref_base (arg)))
+ bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ }
+ }
+
+ else if (code == GIMPLE_ASM)
+ {
+ for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ {
+ tree link = gimple_asm_output_op (stmt, i);
+ tree lhs = TREE_VALUE (link);
+ if (TREE_CODE (lhs) != SSA_NAME)
+ {
+ decl = get_base_address (lhs);
+ if (DECL_P (decl)
+ && (non_rewritable_lvalue_p (lhs)
+ /* We cannot move required conversions from
+ the lhs to the rhs in asm statements, so
+ require we do not need any. */
+ || !useless_type_conversion_p
+ (TREE_TYPE (lhs), TREE_TYPE (decl))))
+ bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ }
+ }
+ for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ {
+ tree link = gimple_asm_input_op (stmt, i);
+ if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
+ bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+ }
+ }
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
}
}
- /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
- and mark variable for conversion into SSA. */
- if (optimize && do_optimize)
- FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- /* Global Variables, result decls cannot be changed. */
- if (is_global_var (var)
- || TREE_CODE (var) == RESULT_DECL
- || bitmap_bit_p (addresses_taken, DECL_UID (var)))
- continue;
-
- if (TREE_ADDRESSABLE (var)
- /* Do not change TREE_ADDRESSABLE if we need to preserve var as
- a non-register. Otherwise we are confused and forget to
- add virtual operands for it. */
- && (!is_gimple_reg_type (TREE_TYPE (var))
- || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
+ /* We cannot iterate over all referenced vars because that can contain
+ unused vars from BLOCK trees, which causes code generation differences
+ for -g vs. -g0. */
+ for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
+ update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
+
+ FOR_EACH_VEC_ELT (tree, cfun->local_decls, i, var)
+ update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
+
+ /* Operand caches need to be recomputed for operands referencing the updated
+ variables. */
+ if (update_vops)
+ {
+ FOR_EACH_BB (bb)
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- TREE_ADDRESSABLE (var) = 0;
- if (is_gimple_reg (var))
- mark_sym_for_renaming (var);
- update_vops = true;
- if (dump_file)
+ gimple stmt = gsi_stmt (gsi);
+
+ /* Re-write TARGET_MEM_REFs of symbols we want to
+ rewrite into SSA form. */
+ if (gimple_assign_single_p (stmt))
{
- fprintf (dump_file, "No longer having address taken ");
- print_generic_expr (dump_file, var, 0);
- fprintf (dump_file, "\n");
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
+ tree sym;
+
+ /* We shouldn't have any fancy wrapping of
+ component-refs on the LHS, but look through
+ VIEW_CONVERT_EXPRs as that is easy. */
+ while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
+ lhs = TREE_OPERAND (lhs, 0);
+ if (TREE_CODE (lhs) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
+ && integer_zerop (TREE_OPERAND (lhs, 1))
+ && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
+ && DECL_P (sym)
+ && !TREE_ADDRESSABLE (sym)
+ && symbol_marked_for_renaming (sym))
+ lhs = sym;
+ else
+ lhs = gimple_assign_lhs (stmt);
+
+ /* Rewrite the RHS and make sure the resulting assignment
+ is validly typed. */
+ maybe_rewrite_mem_ref_base (rhsp);
+ rhs = gimple_assign_rhs1 (stmt);
+ if (gimple_assign_lhs (stmt) != lhs
+ && !useless_type_conversion_p (TREE_TYPE (lhs),
+ TREE_TYPE (rhs)))
+ rhs = fold_build1 (VIEW_CONVERT_EXPR,
+ TREE_TYPE (lhs), rhs);
+
+ if (gimple_assign_lhs (stmt) != lhs)
+ gimple_assign_set_lhs (stmt, lhs);
+
+ /* For var ={v} {CLOBBER}; where var lost
+ TREE_ADDRESSABLE just remove the stmt. */
+ if (DECL_P (lhs)
+ && TREE_CLOBBER_P (rhs)
+ && symbol_marked_for_renaming (lhs))
+ {
+ unlink_stmt_vdef (stmt);
+ gsi_remove (&gsi, true);
+ release_defs (stmt);
+ continue;
+ }
+
+ if (gimple_assign_rhs1 (stmt) != rhs)
+ {
+ gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
+ gimple_assign_set_rhs_from_tree (&gsi, rhs);
+ }
}
- }
- if (!DECL_GIMPLE_REG_P (var)
- && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
- && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
- || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
- && !TREE_THIS_VOLATILE (var)
- && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
- {
- DECL_GIMPLE_REG_P (var) = 1;
- mark_sym_for_renaming (var);
- update_vops = true;
- if (dump_file)
+
+ else if (gimple_code (stmt) == GIMPLE_CALL)
{
- fprintf (dump_file, "Decl is now a gimple register ");
- print_generic_expr (dump_file, var, 0);
- fprintf (dump_file, "\n");
+ unsigned i;
+ for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ {
+ tree *argp = gimple_call_arg_ptr (stmt, i);
+ maybe_rewrite_mem_ref_base (argp);
+ }
}
- }
- }
- /* Operand caches needs to be recomputed for operands referencing the updated
- variables. */
- if (update_vops)
- {
- FOR_EACH_BB (bb)
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- {
- gimple stmt = gsi_stmt (gsi);
+ else if (gimple_code (stmt) == GIMPLE_ASM)
+ {
+ unsigned i;
+ for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ {
+ tree link = gimple_asm_output_op (stmt, i);
+ maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
+ }
+ for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ {
+ tree link = gimple_asm_input_op (stmt, i);
+ maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
+ }
+ }
- if (gimple_references_memory_p (stmt)
- || is_gimple_debug (stmt))
- update_stmt (stmt);
- }
+ else if (gimple_debug_bind_p (stmt)
+ && gimple_debug_bind_has_value_p (stmt))
+ {
+ tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
+ tree decl;
+ maybe_rewrite_mem_ref_base (valuep);
+ decl = non_rewritable_mem_ref_base (*valuep);
+ if (decl && symbol_marked_for_renaming (decl))
+ gimple_debug_bind_reset_value (stmt);
+ }
+
+ if (gimple_references_memory_p (stmt)
+ || is_gimple_debug (stmt))
+ update_stmt (stmt);
+
+ gsi_next (&gsi);
+ }
/* Update SSA form here, we are called as non-pass as well. */
- update_ssa (TODO_update_ssa);
+ if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
+ rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
+ else
+ update_ssa (TODO_update_ssa);
}
BITMAP_FREE (not_reg_needs);
BITMAP_FREE (addresses_taken);
+ timevar_pop (TV_ADDRESS_TAKEN);
}
struct gimple_opt_pass pass_update_address_taken =
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- TV_NONE, /* tv_id */
+ TV_ADDRESS_TAKEN, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_update_address_taken
- | TODO_dump_func /* todo_flags_finish */
+ TODO_update_address_taken /* todo_flags_finish */
}
};