+2015-09-19 Trevor Saunders <tbsaunde@tbsaunde.org>
+
+ * coretypes.h (gimple): Change typedef to be a forward
+ declaration.
+ * gimple.h (gimple_statement_base): rename to gimple.
+ * (all functions and types using gimple): Adjust.
+ * *.[ch]: Likewise.
+
2015-09-19 Andrew Dixie <andrewd@gentrack.com>
David Edelsohn <dje.gcc@gmail.com>
contains. */
static bool
-has_stmt_been_instrumented_p (gimple stmt)
+has_stmt_been_instrumented_p (gimple *stmt)
{
if (gimple_assign_single_p (stmt))
{
{
tree t, uintptr_type = TREE_TYPE (base_addr);
tree shadow_type = TREE_TYPE (shadow_ptr_type);
- gimple g;
+ gimple *g;
t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
{
if (TREE_CODE (base) == SSA_NAME)
return base;
- gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
+ gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
TREE_CODE (base), base);
gimple_set_location (g, loc);
if (before_p)
{
if (ptrofftype_p (len))
return len;
- gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, len);
gimple_set_location (g, loc);
if (before_p)
bool is_scalar_access, unsigned int align = 0)
{
gimple_stmt_iterator gsi = *iter;
- gimple g;
+ gimple *g;
gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
static bool
maybe_instrument_assignment (gimple_stmt_iterator *iter)
{
- gimple s = gsi_stmt (*iter);
+ gimple *s = gsi_stmt (*iter);
gcc_assert (gimple_assign_single_p (s));
static bool
maybe_instrument_call (gimple_stmt_iterator *iter)
{
- gimple stmt = gsi_stmt (*iter);
+ gimple *stmt = gsi_stmt (*iter);
bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
if (is_builtin && instrument_builtin_call (iter))
}
}
tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
- gimple g = gimple_build_call (decl, 0);
+ gimple *g = gimple_build_call (decl, 0);
gimple_set_location (g, gimple_location (stmt));
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
for (i = gsi_start_bb (bb); !gsi_end_p (i);)
{
- gimple s = gsi_stmt (i);
+ gimple *s = gsi_stmt (i);
if (has_stmt_been_instrumented_p (s))
gsi_next (&i);
bool
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
{
- gimple g = gsi_stmt (*iter);
+ gimple *g = gsi_stmt (*iter);
location_t loc = gimple_location (g);
bool recover_p
if (use_calls)
{
/* Instrument using callbacks. */
- gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, base);
gimple_set_location (g, loc);
gsi_insert_before (iter, g, GSI_SAME_STMT);
& ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
shadow_ptr_type);
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
/* Aligned (>= 8 bytes) can test just
tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
shadow_ptr_type);
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
/* Set of gimple stmts. Used to track if the stmt has already been promoted
to direct call. */
-typedef std::set<gimple> stmt_set;
+typedef std::set<gimple *> stmt_set;
/* Represent count info of an inline stack. */
struct count_info
/* Find count_info for a given gimple STMT. If found, store the count_info
in INFO and return true; otherwise return false. */
- bool get_count_info (gimple stmt, count_info *info) const;
+ bool get_count_info (gimple *stmt, count_info *info) const;
/* Find total count of the callee of EDGE. */
gcov_type get_callsite_total_count (struct cgraph_edge *edge) const;
of DECL, The lower 16 bits stores the discriminator. */
static unsigned
-get_relative_location_for_stmt (gimple stmt)
+get_relative_location_for_stmt (gimple *stmt)
{
location_t locus = gimple_location (stmt);
if (LOCATION_LOCUS (locus) == UNKNOWN_LOCATION)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_CALL && !gimple_call_internal_p (stmt)
&& (gimple_call_fn (stmt) == NULL
|| TREE_CODE (gimple_call_fn (stmt)) != FUNCTION_DECL))
in INFO and return true; otherwise return false. */
bool
-autofdo_source_profile::get_count_info (gimple stmt, count_info *info) const
+autofdo_source_profile::get_count_info (gimple *stmt, count_info *info) const
{
if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
return false;
afdo_indirect_call (gimple_stmt_iterator *gsi, const icall_target_map &map,
bool transform)
{
- gimple gs = gsi_stmt (*gsi);
+ gimple *gs = gsi_stmt (*gsi);
tree callee;
if (map.size () == 0)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
count_info info;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
continue;
if (afdo_source_profile->get_count_info (stmt, &info))
basic_block bb;
FOR_ALL_BB_FN (bb, cfun)
{
- gimple def_stmt;
+ gimple *def_stmt;
tree cmp_rhs, cmp_lhs;
- gimple cmp_stmt = last_stmt (bb);
+ gimple *cmp_stmt = last_stmt (bb);
edge e;
edge_iterator ei;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
count_info info;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (afdo_source_profile->get_count_info (stmt, &info))
bb_count = MAX (bb_count, info.count);
}
/* Return true if STMT is an alloca call. */
bool
-gimple_alloca_call_p (const_gimple stmt)
+gimple_alloca_call_p (const gimple *stmt)
{
tree fndecl;
extern int flags_from_decl_or_type (const_tree);
extern int call_expr_flags (const_tree);
extern int setjmp_call_p (const_tree);
-extern bool gimple_alloca_call_p (const_gimple);
+extern bool gimple_alloca_call_p (const gimple *);
extern bool alloca_call_p (const_tree);
extern bool must_pass_in_stack_var_size (machine_mode, const_tree);
extern bool must_pass_in_stack_var_size_or_pad (machine_mode, const_tree);
/* Check whether G is a potential conditional compare candidate. */
static bool
-ccmp_candidate_p (gimple g)
+ccmp_candidate_p (gimple *g)
{
tree rhs = gimple_assign_rhs_to_tree (g);
tree lhs, op0, op1;
- gimple gs0, gs1;
+ gimple *gs0, *gs1;
enum tree_code tcode, tcode0, tcode1;
tcode = TREE_CODE (rhs);
PREP_SEQ returns all insns to prepare opearands for compare.
GEN_SEQ returnss all compare insns. */
static rtx
-expand_ccmp_next (gimple g, enum tree_code code, rtx prev,
+expand_ccmp_next (gimple *g, enum tree_code code, rtx prev,
rtx *prep_seq, rtx *gen_seq)
{
enum rtx_code rcode;
PREP_SEQ returns all insns to prepare opearand.
GEN_SEQ returns all compare insns. */
static rtx
-expand_ccmp_expr_1 (gimple g, rtx *prep_seq, rtx *gen_seq)
+expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
{
tree exp = gimple_assign_rhs_to_tree (g);
enum tree_code code = TREE_CODE (exp);
- gimple gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0));
- gimple gs1 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 1));
+ gimple *gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0));
+ gimple *gs1 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 1));
rtx tmp;
enum tree_code code0 = gimple_assign_rhs_code (gs0);
enum tree_code code1 = gimple_assign_rhs_code (gs1);
Return NULL_RTX if G is not a legal candidate or expand fail.
Otherwise return the target. */
rtx
-expand_ccmp_expr (gimple g)
+expand_ccmp_expr (gimple *g)
{
rtx_insn *last;
rtx tmp;
#ifndef GCC_CCMP_H
#define GCC_CCMP_H
-extern rtx expand_ccmp_expr (gimple);
+extern rtx expand_ccmp_expr (gimple *);
#endif /* GCC_CCMP_H */
/* This variable holds the currently expanded gimple statement for purposes
of comminucating the profile info to the builtin expanders. */
-gimple currently_expanding_gimple_stmt;
+gimple *currently_expanding_gimple_stmt;
static rtx expand_debug_expr (tree);
statement STMT. */
tree
-gimple_assign_rhs_to_tree (gimple stmt)
+gimple_assign_rhs_to_tree (gimple *stmt)
{
tree t;
enum gimple_rhs_class grhs_class;
enter its partition number into bitmap DATA. */
static bool
-visit_op (gimple, tree op, tree, void *data)
+visit_op (gimple *, tree op, tree, void *data)
{
bitmap active = (bitmap)data;
op = get_base_address (op);
from bitmap DATA. */
static bool
-visit_conflict (gimple, tree op, tree, void *data)
+visit_conflict (gimple *, tree op, tree, void *data)
{
bitmap active = (bitmap)data;
op = get_base_address (op);
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
}
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_clobber_p (stmt))
{
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* This assumes that calls to internal-only functions never
use a return slot. */
if (is_gimple_call (stmt)
generated for STMT should have been appended. */
static void
-maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
+maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
&& integer_onep (op1)))
&& bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
{
- gimple second = SSA_NAME_DEF_STMT (op0);
+ gimple *second = SSA_NAME_DEF_STMT (op0);
if (gimple_code (second) == GIMPLE_ASSIGN)
{
enum tree_code code2 = gimple_assign_rhs_code (second);
/* Mark all calls that can have a transaction restart. */
static void
-mark_transaction_restart_calls (gimple stmt)
+mark_transaction_restart_calls (gimple *stmt)
{
struct tm_restart_node dummy;
tm_restart_node **slot;
for (i = 0; i < gimple_call_num_args (stmt); i++)
{
tree arg = gimple_call_arg (stmt, i);
- gimple def;
+ gimple *def;
/* TER addresses into arguments of builtin functions so we have a
chance to infer more correct alignment information. See PR39954. */
if (builtin_p
is no tailcalls and no GIMPLE_COND. */
static void
-expand_gimple_stmt_1 (gimple stmt)
+expand_gimple_stmt_1 (gimple *stmt)
{
tree op0;
location for diagnostics. */
static rtx_insn *
-expand_gimple_stmt (gimple stmt)
+expand_gimple_stmt (gimple *stmt)
{
location_t saved_location = input_location;
rtx_insn *last = get_last_insn ();
/* Split too deep TER chains for debug stmts using debug temporaries. */
static void
-avoid_deep_ter_for_debug (gimple stmt, int depth)
+avoid_deep_ter_for_debug (gimple *stmt, int depth)
{
use_operand_p use_p;
ssa_op_iter iter;
tree use = USE_FROM_PTR (use_p);
if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
continue;
- gimple g = get_gimple_for_ssa_name (use);
+ gimple *g = get_gimple_for_ssa_name (use);
if (g == NULL)
continue;
if (depth > 6 && !stmt_ends_bb_p (g))
if (vexpr != NULL)
continue;
vexpr = make_node (DEBUG_EXPR_DECL);
- gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
+ gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
DECL_ARTIFICIAL (vexpr) = 1;
TREE_TYPE (vexpr) = TREE_TYPE (use);
DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
case SSA_NAME:
{
- gimple g = get_gimple_for_ssa_name (exp);
+ gimple *g = get_gimple_for_ssa_name (exp);
if (g)
{
tree t = NULL_TREE;
unsigned int i = 0, n = 0;
gimple_stmt_iterator gsi;
gimple_seq stmts;
- gimple stmt;
+ gimple *stmt;
bool swap;
tree op0, op1;
ssa_op_iter iter;
use_operand_p use_p;
- gimple def0, def1;
+ gimple *def0, *def1;
/* Compute cost of each statement using estimate_num_insns. */
stmts = bb_seq (bb);
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree use = USE_FROM_PTR (use_p);
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (use) != SSA_NAME)
continue;
def_stmt = get_gimple_for_ssa_name (use);
{
gimple_stmt_iterator gsi;
gimple_seq stmts;
- gimple stmt = NULL;
+ gimple *stmt = NULL;
rtx_note *note;
rtx_insn *last;
edge e;
{
ssa_op_iter iter;
tree op;
- gimple def;
+ gimple *def;
location_t sloc = curr_insn_location ();
instructions. Generate a debug temporary, and
replace all uses of OP in debug insns with that
temporary. */
- gimple debugstmt;
+ gimple *debugstmt;
tree value = gimple_assign_rhs_to_tree (def);
tree vexpr = make_node (DEBUG_EXPR_DECL);
rtx val;
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt))
walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
}
#ifndef GCC_CFGEXPAND_H
#define GCC_CFGEXPAND_H
-extern tree gimple_assign_rhs_to_tree (gimple);
+extern tree gimple_assign_rhs_to_tree (gimple *);
extern HOST_WIDE_INT estimated_stack_frame_size (struct cgraph_node *);
extern bool parm_in_stack_slot_p (tree);
extern rtx get_rtl_for_parm_ssa_default_def (tree var);
}
edge
-split_block (basic_block bb, gimple i)
+split_block (basic_block bb, gimple *i)
{
return split_block_1 (bb, i);
}
extern void remove_branch (edge);
extern void remove_edge (edge);
extern edge split_block (basic_block, rtx);
-extern edge split_block (basic_block, gimple);
+extern edge split_block (basic_block, gimple *);
extern edge split_block_after_labels (basic_block);
extern bool move_block_after (basic_block, basic_block);
extern void delete_basic_block (basic_block);
struct GTY ((chain_next ("%h.next"))) nb_iter_bound {
/* The statement STMT is executed at most ... */
- gimple stmt;
+ gimple *stmt;
/* ... BOUND + 1 times (BOUND must be an unsigned constant).
The + 1 is added for the following reasons:
/* Returns a hash value for X (which really is a cgraph_edge). */
hashval_t
-cgraph_edge_hasher::hash (gimple call_stmt)
+cgraph_edge_hasher::hash (gimple *call_stmt)
{
/* This is a really poor hash function, but it is what htab_hash_pointer
uses. */
/* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
inline bool
-cgraph_edge_hasher::equal (cgraph_edge *x, gimple y)
+cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
{
return x->call_stmt == y;
}
static inline void
cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
{
- gimple call = e->call_stmt;
+ gimple *call = e->call_stmt;
*e->caller->call_site_hash->find_slot_with_hash
(call, cgraph_edge_hasher::hash (call), INSERT) = e;
}
CALL_STMT. */
cgraph_edge *
-cgraph_node::get_edge (gimple call_stmt)
+cgraph_node::get_edge (gimple *call_stmt)
{
cgraph_edge *e, *e2;
int n = 0;
/* If necessary, change the function declaration in the call statement
associated with E so that it corresponds to the edge callee. */
-gimple
+gimple *
cgraph_edge::redirect_call_stmt_to_callee (void)
{
cgraph_edge *e = this;
TREE_TYPE (lhs), NULL);
var = get_or_create_ssa_default_def
(DECL_STRUCT_FUNCTION (e->caller->decl), var);
- gimple set_stmt = gimple_build_assign (lhs, var);
+ gimple *set_stmt = gimple_build_assign (lhs, var);
gsi = gsi_for_stmt (new_stmt);
gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
static void
cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
- gimple old_stmt, tree old_call,
- gimple new_stmt)
+ gimple *old_stmt, tree old_call,
+ gimple *new_stmt)
{
tree new_call = (new_stmt && is_gimple_call (new_stmt))
? gimple_call_fndecl (new_stmt) : 0;
of OLD_STMT before it was updated (updating can happen inplace). */
void
-cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
+cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
+ gimple *new_stmt)
{
cgraph_node *orig = cgraph_node::get (cfun->decl);
cgraph_node *node;
/* Switch to THIS_CFUN if needed and print STMT to stderr. */
static void
-cgraph_debug_gimple_stmt (function *this_cfun, gimple stmt)
+cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
{
bool fndecl_was_null = false;
/* debug_gimple_stmt needs correct cfun */
{
if (this_cfun->cfg)
{
- hash_set<gimple> stmts;
+ hash_set<gimple *> stmts;
int i;
ipa_ref *ref = NULL;
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
stmts.add (stmt);
if (is_gimple_call (stmt))
{
return false. */
static bool
-gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
+gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
{
tree parms, p;
unsigned int i, nargs;
If we cannot verify this or there is a mismatch, return false. */
bool
-gimple_check_call_matching_types (gimple call_stmt, tree callee,
+gimple_check_call_matching_types (gimple *call_stmt, tree callee,
bool args_count_match)
{
tree lhs;
REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
of the use and STMT the statement (if it exists). */
ipa_ref *create_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type, gimple stmt);
+ enum ipa_ref_use use_type, gimple *stmt);
/* If VAL is a reference to a function or a variable, add a reference from
this symtab_node to the corresponding symbol table node. USE_TYPE specify
type of the use and STMT the statement (if it exists). Return the new
reference or NULL if none was created. */
ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
- gimple stmt);
+ gimple *stmt);
/* Clone all references from symtab NODE to this symtab_node. */
void clone_references (symtab_node *node);
void clone_referring (symtab_node *node);
/* Clone reference REF to this symtab_node and set its stmt to STMT. */
- ipa_ref *clone_reference (ipa_ref *ref, gimple stmt);
+ ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
/* Find the structure describing a reference to REFERRED_NODE
and associated with statement STMT. */
- ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
+ ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
unsigned int lto_stmt_uid);
/* Remove all references that are associated with statement STMT. */
- void remove_stmt_references (gimple stmt);
+ void remove_stmt_references (gimple *stmt);
/* Remove all stmt references in non-speculative references.
Those are not maintained during inlining & clonning.
struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
{
- typedef gimple compare_type;
+ typedef gimple *compare_type;
static hashval_t hash (cgraph_edge *);
- static hashval_t hash (gimple);
- static bool equal (cgraph_edge *, gimple);
+ static hashval_t hash (gimple *);
+ static bool equal (cgraph_edge *, gimple *);
};
/* The cgraph data structure.
/* Record all references from cgraph_node that are taken
in statement STMT. */
- void record_stmt_references (gimple stmt);
+ void record_stmt_references (gimple *stmt);
/* Like cgraph_set_call_stmt but walk the clone tree and update all
clones sharing the same function body.
When WHOLE_SPECULATIVE_EDGES is true, all three components of
speculative edge gets updated. Otherwise we update only direct
call. */
- void set_call_stmt_including_clones (gimple old_stmt, gcall *new_stmt,
+ void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
bool update_speculative = true);
/* Walk the alias chain to return the function cgraph_node is alias of.
same function body. If clones already have edge for OLD_STMT; only
update the edge same way as cgraph_set_call_stmt_including_clones does. */
void create_edge_including_clones (cgraph_node *callee,
- gimple old_stmt, gcall *stmt,
+ gimple *old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason);
/* Return the callgraph edge representing the GIMPLE_CALL statement
CALL_STMT. */
- cgraph_edge *get_edge (gimple call_stmt);
+ cgraph_edge *get_edge (gimple *call_stmt);
/* Collect all callers of cgraph_node and its aliases that are known to lead
to NODE (i.e. are not overwritable). */
/* Build context for pointer REF contained in FNDECL at statement STMT.
if INSTANCE is non-NULL, return pointer to the object described by
the context. */
- ipa_polymorphic_call_context (tree fndecl, tree ref, gimple stmt,
+ ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
tree *instance = NULL);
/* Look for vtable stores or constructor calls to work out dynamic type
of memory location. */
- bool get_dynamic_type (tree, tree, tree, gimple);
+ bool get_dynamic_type (tree, tree, tree, gimple *);
/* Make context non-speculative. */
void clear_speculation ();
/* If necessary, change the function declaration in the call statement
associated with the edge so that it corresponds to the edge callee. */
- gimple redirect_call_stmt_to_callee (void);
+ gimple *redirect_call_stmt_to_callee (void);
/* Create clone of edge in the node N represented
by CALL_EXPR the callgraph. */
void release_function_body (tree);
cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
-void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
+void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
bool cgraph_function_possibly_inlined_p (tree);
const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
-extern bool gimple_check_call_matching_types (gimple, tree, bool);
+extern bool gimple_check_call_matching_types (gimple *, tree, bool);
/* In cgraphunit.c */
void cgraphunit_c_finalize (void);
/* Mark address taken in STMT. */
static bool
-mark_address (gimple stmt, tree addr, tree, void *data)
+mark_address (gimple *stmt, tree addr, tree, void *data)
{
addr = get_base_address (addr);
if (TREE_CODE (addr) == FUNCTION_DECL)
/* Mark load of T. */
static bool
-mark_load (gimple stmt, tree t, tree, void *data)
+mark_load (gimple *stmt, tree t, tree, void *data)
{
t = get_base_address (t);
if (t && TREE_CODE (t) == FUNCTION_DECL)
/* Mark store of T. */
static bool
-mark_store (gimple stmt, tree t, tree, void *data)
+mark_store (gimple *stmt, tree t, tree, void *data)
{
t = get_base_address (t);
if (t && TREE_CODE (t) == VAR_DECL
/* Record all references from cgraph_node that are taken in statement STMT. */
void
-cgraph_node::record_stmt_references (gimple stmt)
+cgraph_node::record_stmt_references (gimple *stmt)
{
walk_stmt_load_store_addr_ops (stmt, this, mark_load, mark_store,
mark_address);
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl;
if (is_gimple_debug (stmt))
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl;
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
call. */
void
-cgraph_node::set_call_stmt_including_clones (gimple old_stmt,
+cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
gcall *new_stmt,
bool update_speculative)
{
void
cgraph_node::create_edge_including_clones (cgraph_node *callee,
- gimple old_stmt, gcall *stmt,
+ gimple *old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason)
{
tmp = create_tmp_reg (TYPE_MAIN_VARIANT
(TREE_TYPE (arg)), "arg");
- gimple stmt = gimple_build_assign (tmp, arg);
+ gimple *stmt = gimple_build_assign (tmp, arg);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
}
vargs.quick_push (tmp);
if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
{
- gimple stmt;
+ gimple *stmt;
edge e;
/* If the return type is a pointer, we need to
protect against NULL. We know there will be an
fixed_offset, virtual_offset);
if (true_label)
{
- gimple stmt;
+ gimple *stmt;
bsi = gsi_last_bb (else_bb);
stmt = gimple_build_assign (restmp,
build_zero_cst (TREE_TYPE (restmp)));
aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
{
bool changed = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree call = gimple_call_fn (stmt);
tree fndecl;
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
if (call)
{
/* Helper function for alpha_stdarg_optimize_hook. Skip over casts
and constant additions. */
-static gimple
+static gimple *
va_list_skip_additions (tree lhs)
{
- gimple stmt;
+ gimple *stmt;
for (;;)
{
current statement. */
static bool
-alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
+alpha_stdarg_optimize_hook (struct stdarg_info *si, const gimple *stmt)
{
tree base, offset, rhs;
int offset_arg = 1;
- gimple base_stmt;
+ gimple *base_stmt;
if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
!= GIMPLE_SINGLE_RHS)
offset = gimple_op (stmt, 1 + offset_arg);
if (TREE_CODE (offset) == SSA_NAME)
{
- gimple offset_stmt = va_list_skip_additions (offset);
+ gimple *offset_stmt = va_list_skip_additions (offset);
if (offset_stmt
&& gimple_code (offset_stmt) == GIMPLE_PHI)
{
HOST_WIDE_INT sub;
- gimple arg1_stmt, arg2_stmt;
+ gimple *arg1_stmt, *arg2_stmt;
tree arg1, arg2;
enum tree_code code1, code2;
else if (code2 == COMPONENT_REF
&& (code1 == MINUS_EXPR || code1 == PLUS_EXPR))
{
- gimple tem = arg1_stmt;
+ gimple *tem = arg1_stmt;
code2 = code1;
arg1_stmt = arg2_stmt;
arg2_stmt = tem;
alpha_gimple_fold_builtin (gimple_stmt_iterator *gsi)
{
bool changed = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree call = gimple_call_fn (stmt);
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
if (call)
{
add_condition_to_bb (tree function_decl, tree version_decl,
tree predicate_chain, basic_block new_bb)
{
- gimple return_stmt;
+ gimple *return_stmt;
tree convert_expr, result_var;
- gimple convert_stmt;
- gimple call_cond_stmt;
- gimple if_else_stmt;
+ gimple *convert_stmt;
+ gimple *call_cond_stmt;
+ gimple *if_else_stmt;
basic_block bb1, bb2, bb3;
edge e12, e23;
and_expr_var = cond_var;
else
{
- gimple assign_stmt;
+ gimple *assign_stmt;
/* Use MIN_EXPR to check if any integer is zero?.
and_expr_var = min_expr <cond_var, and_expr_var> */
assign_stmt = gimple_build_assign (and_expr_var,
basic_block *empty_bb)
{
tree default_decl;
- gimple ifunc_cpu_init_stmt;
+ gimple *ifunc_cpu_init_stmt;
gimple_seq gseq;
int ix;
tree ele;
as that is a cheaper way to load all ones into
a register than having to load a constant from
memory. */
- gimple def_stmt = SSA_NAME_DEF_STMT (arg3);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg3);
if (is_gimple_call (def_stmt))
{
tree fndecl = gimple_call_fndecl (def_stmt);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!STMT_VINFO_RELEVANT_P (stmt_info)
int align;
tree ptrtype = build_pointer_type_for_mode (type, ptr_mode, true);
int regalign = 0;
- gimple stmt;
+ gimple *stmt;
if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
{
union tree_node;
typedef union tree_node *tree;
typedef const union tree_node *const_tree;
-typedef struct gimple_statement_base *gimple;
-typedef const struct gimple_statement_base *const_gimple;
-typedef gimple gimple_seq;
+struct gimple;
+typedef gimple *gimple_seq;
struct gimple_stmt_iterator;
/* Forward decls for leaf gimple subclasses (for individual gimple codes).
+2015-09-19 Trevor Saunders <tbsaunde@tbsaunde.org>
+
+ * cp-gimplify.c (gimplify_must_not_throw_expr): Adjust.
+
2015-09-18 Ville Voutilainen <ville.voutilainen@gmail.com>
Implement nested namespace definitions.
tree body = TREE_OPERAND (stmt, 0);
gimple_seq try_ = NULL;
gimple_seq catch_ = NULL;
- gimple mnt;
+ gimple *mnt;
gimplify_and_add (body, &try_);
mnt = gimple_build_eh_must_not_throw (terminate_node);
EXTRA_DUMP_FLAGS on the dump streams if DUMP_KIND is enabled. */
void
-dump_gimple_stmt (int dump_kind, int extra_dump_flags, gimple gs, int spc)
+dump_gimple_stmt (int dump_kind, int extra_dump_flags, gimple *gs, int spc)
{
if (dump_file && (dump_kind & pflags))
print_gimple_stmt (dump_file, gs, spc, dump_flags | extra_dump_flags);
void
dump_gimple_stmt_loc (int dump_kind, source_location loc, int extra_dump_flags,
- gimple gs, int spc)
+ gimple *gs, int spc)
{
if (dump_file && (dump_kind & pflags))
{
extern void dump_basic_block (int, basic_block, int);
extern void dump_generic_expr_loc (int, source_location, int, tree);
extern void dump_generic_expr (int, int, tree);
-extern void dump_gimple_stmt_loc (int, source_location, int, gimple, int);
-extern void dump_gimple_stmt (int, int, gimple, int);
+extern void dump_gimple_stmt_loc (int, source_location, int, gimple *, int);
+extern void dump_gimple_stmt (int, int, gimple *, int);
extern void print_combine_total_stats (void);
extern bool enable_rtl_dump_file (void);
}
void
-set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
+set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
{
fun->eh->throw_stmt_table = table;
}
-hash_map<gimple, int> *
+hash_map<gimple *, int> *
get_eh_throw_stmt_table (struct function *fun)
{
return fun->eh->throw_stmt_table;
/* At the gimple level, a mapping from gimple statement to landing pad
or must-not-throw region. See record_stmt_eh_region. */
- hash_map<gimple, int> *GTY(()) throw_stmt_table;
+ hash_map<gimple *, int> *GTY(()) throw_stmt_table;
/* All of the runtime type data used by the function. These objects
are emitted to the lang-specific-data-area for the function. */
extern void finish_eh_generation (void);
struct GTY(()) throw_stmt_node {
- gimple stmt;
+ gimple *stmt;
int lp_nr;
};
-extern hash_map<gimple, int> *get_eh_throw_stmt_table (struct function *);
-extern void set_eh_throw_stmt_table (function *, hash_map<gimple, int> *);
+extern hash_map<gimple *, int> *get_eh_throw_stmt_table (struct function *);
+extern void set_eh_throw_stmt_table (function *, hash_map<gimple *, int> *);
enum eh_personality_kind {
eh_personality_none,
assigment and the code of the expresion on the RHS is CODE. Return
NULL otherwise. */
-static gimple
+static gimple *
get_def_for_expr (tree name, enum tree_code code)
{
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (name) != SSA_NAME)
return NULL;
assigment and the class of the expresion on the RHS is CLASS. Return
NULL otherwise. */
-static gimple
+static gimple *
get_def_for_expr_class (tree name, enum tree_code_class tclass)
{
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (name) != SSA_NAME)
return NULL;
tree op0, op1;
rtx value, result;
optab binop;
- gimple srcstmt;
+ gimple *srcstmt;
enum tree_code code;
if (mode1 != VOIDmode
be from a bitfield load. */
if (TREE_CODE (op0) == SSA_NAME)
{
- gimple op0stmt = get_gimple_for_ssa_name (op0);
+ gimple *op0stmt = get_gimple_for_ssa_name (op0);
/* We want to eventually have OP0 be the same as TO, which
should be a bitfield. */
&& DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
{
rtx temp;
- gimple nop_def;
+ gimple *nop_def;
/* If EXP is a NOP_EXPR of precision less than its mode, then that
implies a mask operation. If the precision is the same size as
rtx op00, op01, op1, op2;
enum rtx_code comparison_code;
machine_mode comparison_mode;
- gimple srcstmt;
+ gimple *srcstmt;
rtx temp;
tree type = TREE_TYPE (treeop1);
int unsignedp = TYPE_UNSIGNED (type);
&& TYPE_MODE (TREE_TYPE (treeop0))
== TYPE_MODE (TREE_TYPE (treeop1)))
{
- gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
+ gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
if (def)
{
treeop1 = gimple_assign_rhs1 (def);
case FMA_EXPR:
{
optab opt = fma_optab;
- gimple def0, def2;
+ gimple *def0, *def2;
/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
call. */
&& TREE_CONSTANT (treeop1)
&& TREE_CODE (treeop0) == SSA_NAME)
{
- gimple def = SSA_NAME_DEF_STMT (treeop0);
+ gimple *def = SSA_NAME_DEF_STMT (treeop0);
if (is_gimple_assign (def)
&& gimple_assign_rhs_code (def) == NOP_EXPR)
{
into constant expressions. */
static bool
-stmt_is_replaceable_p (gimple stmt)
+stmt_is_replaceable_p (gimple *stmt)
{
if (ssa_is_replaceable_p (stmt))
{
struct separate_ops ops;
tree treeop0, treeop1, treeop2;
tree ssa_name = NULL_TREE;
- gimple g;
+ gimple *g;
type = TREE_TYPE (exp);
mode = TYPE_MODE (type);
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
machine_mode address_mode;
tree base = TREE_OPERAND (exp, 0);
- gimple def_stmt;
+ gimple *def_stmt;
enum insn_code icode;
unsigned align;
/* Handle expansion of non-aliased memory with non-BLKmode. That
&& integer_zerop (arg1)
&& (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
{
- gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
+ gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
if (srcstmt
&& integer_pow2p (gimple_assign_rhs2 (srcstmt)))
{
deferred code. */
void
-fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
+fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
{
const char *warnmsg;
location_t locus;
extern tree fold_abs_const (tree, tree);
extern tree fold_indirect_ref_1 (location_t, tree, tree);
extern void fold_defer_overflow_warnings (void);
-extern void fold_undefer_overflow_warnings (bool, const_gimple, int);
+extern void fold_undefer_overflow_warnings (bool, const gimple *, int);
extern void fold_undefer_and_ignore_overflow_warnings (void);
extern bool fold_deferring_overflow_warnings_p (void);
extern int operand_equal_p (const_tree, const_tree, unsigned int);
fprintf_indent (f, indent,
" {\n");
fprintf_indent (f, indent,
- " gimple def_stmt = SSA_NAME_DEF_STMT (%s);\n",
+ " gimple *def_stmt = SSA_NAME_DEF_STMT (%s);\n",
kid_opname);
indent += 6;
return (union tree_node *) ggc_internal_cleared_alloc (s PASS_MEM_STAT);
}
-static inline struct gimple_statement_base *
+static inline gimple *
ggc_alloc_cleared_gimple_statement_stat (size_t s CXX_MEM_STAT_INFO)
{
- return (struct gimple_statement_base *)
- ggc_internal_cleared_alloc (s PASS_MEM_STAT);
+ return (gimple *) ggc_internal_cleared_alloc (s PASS_MEM_STAT);
}
static inline void
}
gassign *
-build_assign (enum tree_code code, gimple g, int val, tree lhs )
+build_assign (enum tree_code code, gimple *g, int val, tree lhs )
{
return build_assign (code, gimple_assign_lhs (g), val, lhs);
}
}
gassign *
-build_assign (enum tree_code code, gimple op1, tree op2, tree lhs)
+build_assign (enum tree_code code, gimple *op1, tree op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), op2, lhs);
}
gassign *
-build_assign (enum tree_code code, tree op1, gimple op2, tree lhs)
+build_assign (enum tree_code code, tree op1, gimple *op2, tree lhs)
{
return build_assign (code, op1, gimple_assign_lhs (op2), lhs);
}
gassign *
-build_assign (enum tree_code code, gimple op1, gimple op2, tree lhs)
+build_assign (enum tree_code code, gimple *op1, gimple *op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), gimple_assign_lhs (op2),
lhs);
}
gassign *
-build_type_cast (tree to_type, gimple op, tree lhs)
+build_type_cast (tree to_type, gimple *op, tree lhs)
{
return build_type_cast (to_type, gimple_assign_lhs (op), lhs);
}
#define GCC_GIMPLE_BUILDER_H
gassign *build_assign (enum tree_code, tree, int, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, gimple, int, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple *, int, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, tree, tree, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, gimple, tree, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, tree, gimple, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, gimple, gimple, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple *, tree, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, tree, gimple *, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple *, gimple *,
+ tree lhs = NULL_TREE);
gassign *build_type_cast (tree, tree, tree lhs = NULL_TREE);
-gassign *build_type_cast (tree, gimple, tree lhs = NULL_TREE);
+gassign *build_type_cast (tree, gimple *, tree lhs = NULL_TREE);
#endif /* GCC_GIMPLE_BUILDER_H */
static tree
fold_gimple_assign (gimple_stmt_iterator *si)
{
- gimple stmt = gsi_stmt (*si);
+ gimple *stmt = gsi_stmt (*si);
enum tree_code subcode = gimple_assign_rhs_code (stmt);
location_t loc = gimple_location (stmt);
static void
gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
{
- gimple stmt = gsi_stmt (*si_p);
+ gimple *stmt = gsi_stmt (*si_p);
if (gimple_has_location (stmt))
annotate_all_with_location (stmts, gimple_location (stmt));
/* First iterate over the replacement statements backward, assigning
virtual operands to their defining statements. */
- gimple laststore = NULL;
+ gimple *laststore = NULL;
for (gimple_stmt_iterator i = gsi_last (stmts);
!gsi_end_p (i); gsi_prev (&i))
{
- gimple new_stmt = gsi_stmt (i);
+ gimple *new_stmt = gsi_stmt (i);
if ((gimple_assign_single_p (new_stmt)
&& !is_gimple_reg (gimple_assign_lhs (new_stmt)))
|| (is_gimple_call (new_stmt)
for (gimple_stmt_iterator i = gsi_start (stmts);
!gsi_end_p (i); gsi_next (&i))
{
- gimple new_stmt = gsi_stmt (i);
+ gimple *new_stmt = gsi_stmt (i);
/* If the new statement possibly has a VUSE, update it with exact SSA
name we know will reach this one. */
if (gimple_has_mem_ops (new_stmt))
gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
{
tree lhs;
- gimple stmt, new_stmt;
+ gimple *stmt, *new_stmt;
gimple_stmt_iterator i;
gimple_seq stmts = NULL;
static void
replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
- gimple repl;
+ gimple *repl;
if (lhs)
{
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
again. */
static void
-replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
+replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
gimple_set_location (repl, gimple_location (stmt));
if (gimple_vdef (stmt)
gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
tree dest, tree src, int endp)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
tree len = gimple_call_arg (stmt, 2);
tree destvar, srcvar;
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
{
- gimple repl;
+ gimple *repl;
if (gimple_call_lhs (stmt))
repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
else
srcmem = NULL_TREE;
if (srcmem)
{
- gimple new_stmt;
+ gimple *new_stmt;
if (is_gimple_reg_type (TREE_TYPE (srcmem)))
{
new_stmt = gimple_build_assign (NULL_TREE, srcmem);
}
}
- gimple new_stmt;
+ gimple *new_stmt;
if (is_gimple_reg_type (TREE_TYPE (srcvar)))
{
new_stmt = gimple_build_assign (NULL_TREE, srcvar);
dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
GSI_SAME_STMT);
- gimple repl = gimple_build_assign (lhs, dest);
+ gimple *repl = gimple_build_assign (lhs, dest);
gsi_replace (gsi, repl, false);
return true;
}
static bool
gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree etype;
unsigned HOST_WIDE_INT length, cval;
}
var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
- gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
+ gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
gimple_set_vuse (store, gimple_vuse (stmt));
tree vdef = gimple_vdef (stmt);
if (vdef && TREE_CODE (vdef) == SSA_NAME)
gsi_insert_before (gsi, store, GSI_SAME_STMT);
if (gimple_call_lhs (stmt))
{
- gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
+ gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
gsi_replace (gsi, asgn, false);
}
else
get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
{
tree var, val;
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (arg) != SSA_NAME)
{
len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
len = force_gimple_operand_gsi (gsi, len, true,
NULL_TREE, true, GSI_SAME_STMT);
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
len = fold_convert_loc (loc, size_type_node, len);
len = force_gimple_operand_gsi (gsi, len, true,
NULL_TREE, true, GSI_SAME_STMT);
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
static bool
gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
const char *p = c_getstr (src);
/* Create strlen (dst). */
gimple_seq stmts = NULL, stmts2;
- gimple repl = gimple_build_call (strlen_fn, 1, dst);
+ gimple *repl = gimple_build_call (strlen_fn, 1, dst);
gimple_set_location (repl, loc);
if (gimple_in_ssa_p (cfun))
newdst = make_ssa_name (size_type_node);
static bool
gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree dest = gimple_call_arg (stmt, 0);
tree src = gimple_call_arg (stmt, 1);
tree size = gimple_call_arg (stmt, 2);
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 2, dest, src);
+ gimple *repl = gimple_build_call (fn, 2, dest, src);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
static bool
gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree dest = gimple_call_arg (stmt, 0);
tree src = gimple_call_arg (stmt, 1);
tree len = gimple_call_arg (stmt, 2);
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, size);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
tree arg0, tree arg1,
bool unlocked)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* If we're using an unlocked function, assume the other unlocked
functions exist explicitly. */
if (!fn_fputc)
return false;
- gimple repl = gimple_build_call (fn_fputc, 2,
+ gimple *repl = gimple_build_call (fn_fputc, 2,
build_int_cst
(integer_type_node, p[0]), arg1);
replace_call_with_call_and_fold (gsi, repl);
if (!fn_fwrite)
return false;
- gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
+ gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
size_one_node, len, arg1);
replace_call_with_call_and_fold (gsi, repl);
return true;
tree dest, tree src, tree len, tree size,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
tree fn;
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
+ gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
tree src, tree size,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
tree len, fn;
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, size);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
build_int_cst (size_type_node, 1));
len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
true, GSI_SAME_STMT);
- gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
+ gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 2, dest, src);
+ gimple *repl = gimple_build_call (fn, 2, dest, src);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
tree len, tree size,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
tree fn;
fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
if (fn)
{
- gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
+ gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
static bool
gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree dest = gimple_call_arg (stmt, 0);
tree fmt = gimple_call_arg (stmt, 1);
tree orig = NULL_TREE;
/* Convert sprintf (str, fmt) into strcpy (str, fmt) when
'format' is known to contain no % formats. */
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, fmt);
+ gimple *repl = gimple_build_call (fn, 2, dest, fmt);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
/* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, orig);
+ gimple *repl = gimple_build_call (fn, 2, dest, orig);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
return false;
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, fmt);
+ gimple *repl = gimple_build_call (fn, 2, dest, fmt);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
/* Convert snprintf (str1, cst, "%s", str2) into
strcpy (str1, str2) if strlen (str2) < cst. */
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, orig);
+ gimple *repl = gimple_build_call (fn, 2, dest, orig);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
static bool
gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
if (!len)
return false;
{
tree var = create_tmp_var (TREE_TYPE (lhs));
tree def = get_or_create_ssa_default_def (cfun, var);
- gimple new_stmt = gimple_build_assign (lhs, def);
+ gimple *new_stmt = gimple_build_assign (lhs, def);
gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
}
gimple_call_set_lhs (stmt, NULL_TREE);
else
{
tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
- gimple new_stmt = gimple_build_call (fndecl, 0);
+ gimple *new_stmt = gimple_build_call (fndecl, 0);
gimple_set_location (new_stmt, gimple_location (stmt));
if (lhs && TREE_CODE (lhs) == SSA_NAME)
{
/* Return true whether NAME has a use on STMT. */
static bool
-has_use_on_stmt (tree name, gimple stmt)
+has_use_on_stmt (tree name, gimple *stmt)
{
imm_use_iterator iter;
use_operand_p use_p;
code_helper rcode, tree *ops,
gimple_seq *seq, bool inplace)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* Play safe and do not allow abnormals to be mentioned in
newly created statements. See also maybe_push_res_to_seq.
fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
{
bool changed = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
unsigned i;
/* First do required canonicalization of [TARGET_]MEM_REF addresses
bool
fold_stmt_inplace (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
gcc_assert (gsi_stmt (*gsi) == stmt);
return changed;
same_bool_comparison_p (const_tree expr, enum tree_code code,
const_tree op1, const_tree op2)
{
- gimple s;
+ gimple *s;
/* The obvious case. */
if (TREE_CODE (expr) == code
and_var_with_comparison (tree var, bool invert,
enum tree_code code2, tree op2a, tree op2b);
static tree
-and_var_with_comparison_1 (gimple stmt,
+and_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b);
static tree
or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
or_var_with_comparison (tree var, bool invert,
enum tree_code code2, tree op2a, tree op2b);
static tree
-or_var_with_comparison_1 (gimple stmt,
+or_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b);
/* Helper function for and_comparisons_1: try to simplify the AND of the
enum tree_code code2, tree op2a, tree op2b)
{
tree t;
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
/* We can only deal with variables whose definitions are assignments. */
if (!is_gimple_assign (stmt))
Return NULL_EXPR if we can't simplify this to a single expression. */
static tree
-and_var_with_comparison_1 (gimple stmt,
+and_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b)
{
tree var = gimple_assign_lhs (stmt);
{
tree inner1 = gimple_assign_rhs1 (stmt);
tree inner2 = gimple_assign_rhs2 (stmt);
- gimple s;
+ gimple *s;
tree t;
tree partial = NULL_TREE;
bool is_and = (innercode == BIT_AND_EXPR);
{
bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
|| (code1 == NE_EXPR && integer_onep (op1b)));
- gimple stmt = SSA_NAME_DEF_STMT (op1a);
+ gimple *stmt = SSA_NAME_DEF_STMT (op1a);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
{
tree temp;
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
/* In simple cases we can look through PHI nodes,
but we have to be careful with loops.
See PR49073. */
enum tree_code code2, tree op2a, tree op2b)
{
tree t;
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
/* We can only deal with variables whose definitions are assignments. */
if (!is_gimple_assign (stmt))
Return NULL_EXPR if we can't simplify this to a single expression. */
static tree
-or_var_with_comparison_1 (gimple stmt,
+or_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b)
{
tree var = gimple_assign_lhs (stmt);
{
tree inner1 = gimple_assign_rhs1 (stmt);
tree inner2 = gimple_assign_rhs2 (stmt);
- gimple s;
+ gimple *s;
tree t;
tree partial = NULL_TREE;
bool is_or = (innercode == BIT_IOR_EXPR);
{
bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
|| (code1 == NE_EXPR && integer_onep (op1b)));
- gimple stmt = SSA_NAME_DEF_STMT (op1a);
+ gimple *stmt = SSA_NAME_DEF_STMT (op1a);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
{
tree temp;
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
/* In simple cases we can look through PHI nodes,
but we have to be careful with loops.
See PR49073. */
to avoid the indirect function call overhead. */
tree
-gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
+gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
tree (*gvalueize) (tree))
{
code_helper rcode;
returns a constant according to is_gimple_min_invariant. */
tree
-gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
+gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
{
tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
if (res && is_gimple_min_invariant (res))
bool
gimple_val_nonnegative_real_p (tree val)
{
- gimple def_stmt;
+ gimple *def_stmt;
gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
a modified form of STMT itself. */
gimple_seq
-rewrite_to_defined_overflow (gimple stmt)
+rewrite_to_defined_overflow (gimple *stmt)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
gimple_seq_add_stmt (&stmts, stmt);
- gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
+ gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
gimple_seq_add_stmt (&stmts, cvt);
return stmts;
res = make_ssa_name (type);
else
res = create_tmp_reg (type);
- gimple stmt;
+ gimple *stmt;
if (code == REALPART_EXPR
|| code == IMAGPART_EXPR
|| code == VIEW_CONVERT_EXPR)
res = make_ssa_name (type);
else
res = create_tmp_reg (type);
- gimple stmt = gimple_build_assign (res, code, op0, op1);
+ gimple *stmt = gimple_build_assign (res, code, op0, op1);
gimple_set_location (stmt, loc);
gimple_seq_add_stmt_without_update (seq, stmt);
}
res = make_ssa_name (type);
else
res = create_tmp_reg (type);
- gimple stmt;
+ gimple *stmt;
if (code == BIT_FIELD_REF)
stmt = gimple_build_assign (res, code,
build3 (code, type, op0, op1, op2));
if (!res)
{
tree decl = builtin_decl_implicit (fn);
- gimple stmt = gimple_build_call (decl, 1, arg0);
+ gimple *stmt = gimple_build_call (decl, 1, arg0);
if (!VOID_TYPE_P (type))
{
if (gimple_in_ssa_p (cfun))
if (!res)
{
tree decl = builtin_decl_implicit (fn);
- gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
+ gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
if (!VOID_TYPE_P (type))
{
if (gimple_in_ssa_p (cfun))
if (!res)
{
tree decl = builtin_decl_implicit (fn);
- gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
+ gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
if (!VOID_TYPE_P (type))
{
if (gimple_in_ssa_p (cfun))
const_tree);
extern tree no_follow_ssa_edges (tree);
extern tree follow_single_use_edges (tree);
-extern tree gimple_fold_stmt_to_constant_1 (gimple, tree (*) (tree),
+extern tree gimple_fold_stmt_to_constant_1 (gimple *, tree (*) (tree),
tree (*) (tree) = no_follow_ssa_edges);
-extern tree gimple_fold_stmt_to_constant (gimple, tree (*) (tree));
+extern tree gimple_fold_stmt_to_constant (gimple *, tree (*) (tree));
extern tree fold_ctor_reference (tree, tree, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, tree);
extern tree fold_const_aggregate_ref_1 (tree, tree (*) (tree));
extern bool gimple_val_nonnegative_real_p (tree);
extern tree gimple_fold_indirect_ref (tree);
extern bool arith_code_with_undefined_signed_overflow (tree_code);
-extern gimple_seq rewrite_to_defined_overflow (gimple);
+extern gimple_seq rewrite_to_defined_overflow (gimple *);
/* gimple_build, functionally matching fold_buildN, outputs stmts
int the provided sequence, matching and simplifying them on-the-fly.
/* Mark the statement STMT as modified, and update it. */
static inline void
-update_modified_stmt (gimple stmt)
+update_modified_stmt (gimple *stmt)
{
if (!ssa_operands_active (cfun))
return;
of gsi_replace. */
void
-gsi_set_stmt (gimple_stmt_iterator *gsi, gimple stmt)
+gsi_set_stmt (gimple_stmt_iterator *gsi, gimple *stmt)
{
- gimple orig_stmt = gsi_stmt (*gsi);
- gimple prev, next;
+ gimple *orig_stmt = gsi_stmt (*gsi);
+ gimple *prev, *next;
stmt->next = next = orig_stmt->next;
stmt->prev = prev = orig_stmt->prev;
cleanup is required. */
bool
-gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
+gsi_replace (gimple_stmt_iterator *gsi, gimple *stmt, bool update_eh_info)
{
- gimple orig_stmt = gsi_stmt (*gsi);
+ gimple *orig_stmt = gsi_stmt (*gsi);
bool require_eh_edge_purge = false;
if (stmt == orig_stmt)
bool update_eh_info)
{
gimple_stmt_iterator seqi;
- gimple last;
+ gimple *last;
if (gimple_seq_empty_p (seq))
{
gsi_remove (gsi, true);
should use gsi_insert_before. */
void
-gsi_insert_before_without_update (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_before_without_update (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
gsi_insert_seq_nodes_before (i, stmt, stmt, m);
gsi_iterator_update). */
void
-gsi_insert_before (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_before (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
update_modified_stmt (stmt);
should use gsi_insert_after. */
void
-gsi_insert_after_without_update (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_after_without_update (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
gsi_insert_seq_nodes_after (i, stmt, stmt, m);
gsi_iterator_update). */
void
-gsi_insert_after (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_after (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
update_modified_stmt (stmt);
gsi_remove (gimple_stmt_iterator *i, bool remove_permanently)
{
gimple_seq_node cur, next, prev;
- gimple stmt = gsi_stmt (*i);
+ gimple *stmt = gsi_stmt (*i);
bool require_eh_edge_purge = false;
if (gimple_code (stmt) != GIMPLE_PHI)
/* Finds iterator for STMT. */
gimple_stmt_iterator
-gsi_for_stmt (gimple stmt)
+gsi_for_stmt (gimple *stmt)
{
gimple_stmt_iterator i;
basic_block bb = gimple_bb (stmt);
void
gsi_move_after (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
{
- gimple stmt = gsi_stmt (*from);
+ gimple *stmt = gsi_stmt (*from);
gsi_remove (from, false);
/* We must have GSI_NEW_STMT here, as gsi_move_after is sometimes used to
void
gsi_move_before (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
{
- gimple stmt = gsi_stmt (*from);
+ gimple *stmt = gsi_stmt (*from);
gsi_remove (from, false);
/* For consistency with gsi_move_after, it might be better to have
made until a call to gsi_commit_edge_inserts () is made. */
void
-gsi_insert_on_edge (edge e, gimple stmt)
+gsi_insert_on_edge (edge e, gimple *stmt)
{
gimple_seq_add_stmt (&PENDING_STMT (e), stmt);
}
basic_block *new_bb)
{
basic_block dest, src;
- gimple tmp;
+ gimple *tmp;
dest = e->dest;
block has to be created, it is returned. */
basic_block
-gsi_insert_on_edge_immediate (edge e, gimple stmt)
+gsi_insert_on_edge_immediate (edge e, gimple *stmt)
{
gimple_stmt_iterator gsi;
basic_block new_bb = NULL;
extern void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq,
enum gsi_iterator_update);
extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator);
-extern void gsi_set_stmt (gimple_stmt_iterator *, gimple);
+extern void gsi_set_stmt (gimple_stmt_iterator *, gimple *);
extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *);
-extern bool gsi_replace (gimple_stmt_iterator *, gimple, bool);
+extern bool gsi_replace (gimple_stmt_iterator *, gimple *, bool);
extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, bool);
-extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
-extern void gsi_insert_before (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_before (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
-extern void gsi_insert_after_without_update (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_after_without_update (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
-extern void gsi_insert_after (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_after (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
extern bool gsi_remove (gimple_stmt_iterator *, bool);
-extern gimple_stmt_iterator gsi_for_stmt (gimple);
+extern gimple_stmt_iterator gsi_for_stmt (gimple *);
extern gphi_iterator gsi_for_phi (gphi *);
extern void gsi_move_after (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_before (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_to_bb_end (gimple_stmt_iterator *, basic_block);
-extern void gsi_insert_on_edge (edge, gimple);
+extern void gsi_insert_on_edge (edge, gimple *);
extern void gsi_insert_seq_on_edge (edge, gimple_seq);
-extern basic_block gsi_insert_on_edge_immediate (edge, gimple);
+extern basic_block gsi_insert_on_edge_immediate (edge, gimple *);
extern basic_block gsi_insert_seq_on_edge_immediate (edge, gimple_seq);
extern void gsi_commit_edge_inserts (void);
extern void gsi_commit_one_edge_insert (edge, basic_block *);
static inline void
gsi_prev (gimple_stmt_iterator *i)
{
- gimple prev = i->ptr->prev;
+ gimple *prev = i->ptr->prev;
if (prev->next)
i->ptr = prev;
else
/* Return the current stmt. */
-static inline gimple
+static inline gimple *
gsi_stmt (gimple_stmt_iterator i)
{
return i.ptr;
{
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_assign (stmt)
|| gimple_assign_rhs_code (stmt) != ADDR_EXPR
|| is_gimple_invariant_address (gimple_assign_rhs1 (stmt)))
base = build_fold_addr_expr (base);
base = force_gimple_operand_gsi (&gsi, base, true, NULL,
true, GSI_SAME_STMT);
- gimple g = gimple_build_assign (gimple_assign_lhs (stmt),
+ gimple *g = gimple_build_assign (gimple_assign_lhs (stmt),
POINTER_PLUS_EXPR, base, offset);
gsi_replace (&gsi, g, false);
}
gimple_seq body = gimple_body (current_function_decl);
gimple_seq lowered_body;
gimple_stmt_iterator i;
- gimple bind;
- gimple x;
+ gimple *bind;
+ gimple *x;
/* The gimplifier should've left a body of exactly one statement,
namely a GIMPLE_BIND. */
static void
lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt;
+ gimple *stmt;
stmt = gsi_stmt (*gsi);
static void
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_set_block (stmt, data->block);
lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
{
bool cannot_fallthru;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_stmt_iterator i;
/* We don't handle GIMPLE_TRY_FINALLY. */
we'll just delete the extra code later. */
bool
-gimple_stmt_may_fallthru (gimple stmt)
+gimple_stmt_may_fallthru (gimple *stmt)
{
if (!stmt)
return true;
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
{
greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
- gimple t;
+ gimple *t;
int i;
return_statements_t tmp_rs;
static void
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree cont_label = create_artificial_label (loc);
tree next_label = create_artificial_label (loc);
tree dest, t, arg;
- gimple g;
+ gimple *g;
/* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
these builtins are modelled as non-local label jumps to the label
static void
lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
{
- gimple stmt, call = gsi_stmt (*gsi);
+ gimple *stmt, *call = gsi_stmt (*gsi);
tree pptr = gimple_call_arg (call, 0);
tree align = gimple_call_arg (call, 1);
tree res = gimple_call_lhs (call);
}
tree align_label = create_artificial_label (UNKNOWN_LOCATION);
tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
+ gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
align_label, noalign_label);
gsi_insert_after (gsi, cond, GSI_NEW_STMT);
gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
#ifndef GCC_GIMPLE_LOW_H
#define GCC_GIMPLE_LOW_H
-extern bool gimple_stmt_may_fallthru (gimple);
+extern bool gimple_stmt_may_fallthru (gimple *);
extern bool gimple_seq_may_fallthru (gimple_seq);
extern void record_vars_into (tree, tree);
extern void record_vars (tree);
if (!res)
res = make_ssa_name (type);
maybe_build_generic_op (rcode, type, &ops[0], ops[1], ops[2]);
- gimple new_stmt = gimple_build_assign (res, rcode,
+ gimple *new_stmt = gimple_build_assign (res, rcode,
ops[0], ops[1], ops[2]);
gimple_seq_add_stmt_without_update (seq, new_stmt);
return res;
gcc_assert (nargs != 0);
if (!res)
res = make_ssa_name (type);
- gimple new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
+ gimple *new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
gimple_call_set_lhs (new_stmt, res);
gimple_seq_add_stmt_without_update (seq, new_stmt);
return res;
and the fold_stmt_to_constant APIs. */
bool
-gimple_simplify (gimple stmt,
+gimple_simplify (gimple *stmt,
code_helper *rcode, tree *ops,
gimple_seq *seq,
tree (*valueize)(tree), tree (*top_valueize)(tree))
int rep;
};
-bool gimple_simplify (gimple, code_helper *, tree *, gimple_seq *,
+bool gimple_simplify (gimple *, code_helper *, tree *, gimple_seq *,
tree (*)(tree), tree (*)(tree));
tree maybe_push_res_to_seq (code_helper, tree, tree *,
gimple_seq *, tree res = NULL_TREE);
/* Return the predictor of GIMPLE_PREDICT statement GS. */
static inline enum br_predictor
-gimple_predict_predictor (gimple gs)
+gimple_predict_predictor (gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
return (enum br_predictor) (gs->subcode & ~GF_PREDICT_TAKEN);
/* Set the predictor of GIMPLE_PREDICT statement GS to PREDICT. */
static inline void
-gimple_predict_set_predictor (gimple gs, enum br_predictor predictor)
+gimple_predict_set_predictor (gimple *gs, enum br_predictor predictor)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
gs->subcode = (gs->subcode & GF_PREDICT_TAKEN)
/* Return the outcome of GIMPLE_PREDICT statement GS. */
static inline enum prediction
-gimple_predict_outcome (gimple gs)
+gimple_predict_outcome (gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
return (gs->subcode & GF_PREDICT_TAKEN) ? TAKEN : NOT_TAKEN;
/* Set the outcome of GIMPLE_PREDICT statement GS to OUTCOME. */
static inline void
-gimple_predict_set_outcome (gimple gs, enum prediction outcome)
+gimple_predict_set_outcome (gimple *gs, enum prediction outcome)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
if (outcome == TAKEN)
/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
-inline gimple
+inline gimple *
gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
{
- gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
+ gimple *p = gimple_alloc (GIMPLE_PREDICT, 0);
/* Ensure all the predictors fit into the lower bits of the subcode. */
gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
gimple_predict_set_predictor (p, predictor);
gimple statement GS. */
static void
-do_niy (pretty_printer *buffer, gimple gs)
+do_niy (pretty_printer *buffer, gimple *gs)
{
pp_printf (buffer, "<<< Unknown GIMPLE statement: %s >>>\n",
gimple_code_name[(int) gimple_code (gs)]);
/* Print the GIMPLE statement GS on stderr. */
DEBUG_FUNCTION void
-debug_gimple_stmt (gimple gs)
+debug_gimple_stmt (gimple *gs)
{
print_gimple_stmt (stderr, gs, 0, TDF_VOPS|TDF_MEMSYMS);
}
FLAGS as in pp_gimple_stmt_1. */
void
-print_gimple_stmt (FILE *file, gimple g, int spc, int flags)
+print_gimple_stmt (FILE *file, gimple *g, int spc, int flags)
{
pretty_printer buffer;
pp_needs_newline (&buffer) = true;
}
DEBUG_FUNCTION void
-debug (gimple_statement_base &ref)
+debug (gimple &ref)
{
print_gimple_stmt (stderr, &ref, 0, 0);
}
DEBUG_FUNCTION void
-debug (gimple_statement_base *ptr)
+debug (gimple *ptr)
{
if (ptr)
debug (*ptr);
of the statement. */
void
-print_gimple_expr (FILE *file, gimple g, int spc, int flags)
+print_gimple_expr (FILE *file, gimple *g, int spc, int flags)
{
flags |= TDF_RHS_ONLY;
pretty_printer buffer;
for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
{
- gimple gs = gsi_stmt (i);
+ gimple *gs = gsi_stmt (i);
INDENT (spc);
pp_gimple_stmt_1 (buffer, gs, spc, flags);
if (!gsi_one_before_end_p (i))
{
gimple_seq seq;
tree t;
- gimple g;
+ gimple *g;
switch (*++c)
{
case 'G':
- g = va_arg (args, gimple);
+ g = va_arg (args, gimple *);
tmp = gimple_code_name[gimple_code (g)];
pp_string (buffer, tmp);
break;
pretty_printer BUFFER. */
static void
-dump_gimple_omp_block (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_block (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S> >", gs,
/* Dump a GIMPLE_OMP_RETURN tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_return (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_return (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
FLAGS are as in pp_gimple_stmt_1. */
static void
-dump_gimple_mem_ops (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_mem_ops (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
tree vdef = gimple_vdef (gs);
tree vuse = gimple_vuse (gs);
pp_flush on BUFFER to finalize the pretty printer. */
void
-pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
+pp_gimple_stmt_1 (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
if (!gs)
return;
}
else
{
- gimple stmt = first_stmt (bb);
+ gimple *stmt = first_stmt (bb);
if (!stmt || gimple_code (stmt) != GIMPLE_LABEL)
fprintf (outf, "%*s<bb %d>:\n", indent, "", bb->index);
}
static void
pp_cfg_jump (pretty_printer *buffer, basic_block bb)
{
- gimple stmt;
+ gimple *stmt;
stmt = first_stmt (bb);
int flags)
{
edge e;
- gimple stmt;
+ gimple *stmt;
stmt = last_stmt (bb);
int flags)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
int label_indent = indent - 2;
if (label_indent < 0)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
pp_bar (pp);
pp_write_text_to_stream (pp);
pp_gimple_stmt_1 (pp, stmt, 0, dump_flags);
#include "tree-pretty-print.h"
/* In gimple-pretty-print.c */
-extern void debug_gimple_stmt (gimple);
+extern void debug_gimple_stmt (gimple *);
extern void debug_gimple_seq (gimple_seq);
extern void print_gimple_seq (FILE *, gimple_seq, int, int);
-extern void print_gimple_stmt (FILE *, gimple, int, int);
-extern void debug (gimple_statement_base &ref);
-extern void debug (gimple_statement_base *ptr);
-extern void print_gimple_expr (FILE *, gimple, int, int);
-extern void pp_gimple_stmt_1 (pretty_printer *, gimple, int, int);
+extern void print_gimple_stmt (FILE *, gimple *, int, int);
+extern void debug (gimple &ref);
+extern void debug (gimple *ptr);
+extern void print_gimple_expr (FILE *, gimple *, int, int);
+extern void pp_gimple_stmt_1 (pretty_printer *, gimple *, int, int);
extern void gimple_dump_bb (FILE *, basic_block, int, int);
extern void gimple_dump_bb_for_graph (pretty_printer *, basic_block);
This routine only makes a superficial check for a dereference. Thus,
it must only be used if it is safe to return a false negative. */
static bool
-check_loadstore (gimple stmt, tree op, tree, void *data)
+check_loadstore (gimple *stmt, tree op, tree, void *data)
{
if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
&& operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
If the dereference is a store and we can easily transform the RHS,
then simplify the RHS to enable more DCE. Note that we require the
statement to be a GIMPLE_ASSIGN which filters out calls on the RHS. */
- gimple stmt = gsi_stmt (*si_p);
+ gimple *stmt = gsi_stmt (*si_p);
if (walk_stmt_load_store_ops (stmt, (void *)op, NULL, check_loadstore)
&& is_gimple_assign (stmt)
&& INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
basic_block
isolate_path (basic_block bb, basic_block duplicate,
- edge e, gimple stmt, tree op, bool ret_zero)
+ edge e, gimple *stmt, tree op, bool ret_zero)
{
gimple_stmt_iterator si, si2;
edge_iterator ei;
tree op = gimple_phi_arg_def (phi, i);
edge e = gimple_phi_arg_edge (phi, i);
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
next_i = i + 1;
because of jump threading and constant propagation. */
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
/* By passing null_pointer_node, we can use the
infer_nonnull_range functions to detect explicit NULL
struct slsr_cand_d
{
/* The candidate statement S1. */
- gimple cand_stmt;
+ gimple *cand_stmt;
/* The base expression B: often an SSA name, but not always. */
tree base_expr;
};
/* Pointer map embodying a mapping from statements to candidates. */
-static hash_map<gimple, slsr_cand_t> *stmt_cand_map;
+static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
/* Obstack for candidates. */
static struct obstack cand_obstack;
a2[i + 20][j] = 2; */
static slsr_cand_t
-alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
+alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
const widest_int &index, tree stride, tree ctype,
unsigned savings)
{
to SPEED. */
static int
-stmt_cost (gimple gs, bool speed)
+stmt_cost (gimple *gs, bool speed)
{
tree lhs, rhs1, rhs2;
machine_mode lhs_mode;
{
slsr_cand_t *result;
- gimple def = SSA_NAME_DEF_STMT (base_in);
+ gimple *def = SSA_NAME_DEF_STMT (base_in);
if (!def)
return (slsr_cand_t) NULL;
/* Add an entry to the statement-to-candidate mapping. */
static void
-add_cand_for_stmt (gimple gs, slsr_cand_t c)
+add_cand_for_stmt (gimple *gs, slsr_cand_t c)
{
gcc_assert (!stmt_cand_map->put (gs, c));
}
slsr_cand_t arg_cand;
tree arg = gimple_phi_arg_def (phi, i);
tree derived_base_name = NULL_TREE;
- gimple arg_stmt = NULL;
+ gimple *arg_stmt = NULL;
basic_block arg_bb = NULL;
if (TREE_CODE (arg) != SSA_NAME)
the candidate table and attempt to find a basis. */
static void
-slsr_process_ref (gimple gs)
+slsr_process_ref (gimple *gs)
{
tree ref_expr, base, offset, type;
HOST_WIDE_INT bitsize, bitpos;
candidate. */
static slsr_cand_t
-create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
+create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
{
tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
widest_int index;
candidate. */
static slsr_cand_t
-create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
+create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
{
tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
widest_int index, temp;
find a basis. */
static void
-slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
+slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
{
slsr_cand_t c, c2;
Return the new candidate. */
static slsr_cand_t
-create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
+create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
bool subtract_p, bool speed)
{
tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
about BASE_IN into the new candidate. Return the new candidate. */
static slsr_cand_t
-create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
+create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
bool speed)
{
enum cand_kind kind = CAND_ADD;
make at least one appropriate entry in the candidate table. */
static void
-slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
+slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
{
bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
slsr_cand_t c = NULL, c2;
by -1. */
static void
-slsr_process_neg (gimple gs, tree rhs1, bool speed)
+slsr_process_neg (gimple *gs, tree rhs1, bool speed)
{
/* Record a CAND_MULT interpretation for the multiply by -1. */
slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
have different semantics. */
static bool
-legal_cast_p (gimple gs, tree rhs)
+legal_cast_p (gimple *gs, tree rhs)
{
if (!is_gimple_assign (gs)
|| !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
appropriate entry in the candidate table. */
static void
-slsr_process_cast (gimple gs, tree rhs1, bool speed)
+slsr_process_cast (gimple *gs, tree rhs1, bool speed)
{
tree lhs, ctype;
slsr_cand_t base_cand, c, c2;
propagation, such as DOM. */
static void
-slsr_process_copy (gimple gs, tree rhs1, bool speed)
+slsr_process_copy (gimple *gs, tree rhs1, bool speed)
{
slsr_cand_t base_cand, c, c2;
unsigned savings = 0;
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple gs = gsi_stmt (gsi);
+ gimple *gs = gsi_stmt (gsi);
if (gimple_vuse (gs) && gimple_assign_single_p (gs))
slsr_process_ref (gs);
{
enum tree_code code = PLUS_EXPR;
tree bump_tree;
- gimple stmt_to_print = NULL;
+ gimple *stmt_to_print = NULL;
/* If the basis name and the candidate's LHS have incompatible
types, introduce a cast. */
constant. */
static tree
-create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
+create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
location_t loc, bool known_stride)
{
int i;
}
else
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
/* If there is another phi along this incoming edge, we must
process it in the same fashion to ensure that all basis
for those phis as well. */
static int
-phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
+phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
{
unsigned i;
int cost = 0;
if (arg != phi_cand->base_expr)
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
cost += phi_add_costs (arg_def, c, one_add_cost);
savings to determine profitability. */
bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
int mult_savings = stmt_cost (c->cand_stmt, speed);
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
tree phi_result = gimple_phi_result (phi);
int one_add_cost = add_cost (speed,
TYPE_MODE (TREE_TYPE (phi_result)));
index of the basis. */
static void
-record_phi_increments (slsr_cand_t basis, gimple phi)
+record_phi_increments (slsr_cand_t basis, gimple *phi)
{
unsigned i;
slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
record_phi_increments (basis, arg_def);
uses. */
static int
-phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
+phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
+ int *savings)
{
unsigned i;
int cost = 0;
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
{
&& phi_dependent_cand_p (c)
&& !cand_already_replaced (c))
{
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
local_cost += phi_incr_cost (c, incr, phi, &savings);
if (has_single_use (gimple_phi_result (phi)))
&& !cand_already_replaced (c))
{
int phi_savings = 0;
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
savings -= phi_incr_cost (c, incr, phi, &phi_savings);
if (has_single_use (gimple_phi_result (phi)))
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
else
{
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
+ gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
are profitable to replace on behalf of candidate C. */
static bool
-all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
+all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
{
unsigned i;
slsr_cand_t basis = lookup_cand (c->basis);
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
{
If the replacement was made and we are doing a details dump,
return the revised statement, else NULL. */
-static gimple
+static gimple *
replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
enum tree_code old_code, tree old_rhs1, tree old_rhs2,
slsr_cand_t c)
static void
replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
{
- gimple stmt_to_print = NULL;
+ gimple *stmt_to_print = NULL;
tree orig_rhs1, orig_rhs2;
tree rhs2;
enum tree_code orig_code, repl_code;
{
if (phi_dependent_cand_p (c))
{
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
if (all_phi_incrs_profitable (c, phi))
{
cand_vec.create (128);
/* Allocate the mapping from statements to candidate indices. */
- stmt_cand_map = new hash_map<gimple, slsr_cand_t>;
+ stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
/* Create the obstack where candidate chains will reside. */
gcc_obstack_init (&chain_obstack);
or list of labels to represent transaction restart. */
struct GTY((for_user)) tm_restart_node {
- gimple stmt;
+ gimple *stmt;
tree label_or_list;
};
indirect call has been turned into a noreturn call. When this
happens, all the instructions after the call are no longer
reachable and must be deleted as dead. */
- vec<gimple, va_gc> *modified_noreturn_calls;
+ vec<gimple *, va_gc> *modified_noreturn_calls;
/* Array of all SSA_NAMEs used in the function. */
vec<tree, va_gc> *ssa_names;
/* Return the set of VUSE operand for statement G. */
static inline use_operand_p
-gimple_vuse_op (const_gimple g)
+gimple_vuse_op (const gimple *g)
{
struct use_optype_d *ops;
const gimple_statement_with_memory_ops *mem_ops_stmt =
/* Return the set of VDEF operand for statement G. */
static inline def_operand_p
-gimple_vdef_op (gimple g)
+gimple_vdef_op (gimple *g)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <gimple_statement_with_memory_ops *> (g);
/* Mark statement S as modified, and update it. */
static inline void
-update_stmt (gimple s)
+update_stmt (gimple *s)
{
if (gimple_has_ops (s))
{
/* Update statement S if it has been optimized. */
static inline void
-update_stmt_if_modified (gimple s)
+update_stmt_if_modified (gimple *s)
{
if (gimple_modified_p (s))
update_stmt_operands (cfun, s);
/* Mark statement S as modified, and update it. */
static inline void
-update_stmt_fn (struct function *fn, gimple s)
+update_stmt_fn (struct function *fn, gimple *s)
{
if (gimple_has_ops (s))
{
/* Read a statement with tag TAG in function FN from block IB using
descriptors in DATA_IN. */
-static gimple
+static gimple *
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
enum LTO_tags tag)
{
- gimple stmt;
+ gimple *stmt;
enum gimple_code code;
unsigned HOST_WIDE_INT num_ops;
size_t i;
tag = streamer_read_record_start (ib);
while (tag)
{
- gimple stmt = input_gimple_stmt (ib, data_in, tag);
+ gimple *stmt = input_gimple_stmt (ib, data_in, tag);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
/* After the statement, expect a 0 delimiter or the EH region
/* Emit statement STMT on the main stream of output block OB. */
static void
-output_gimple_stmt (struct output_block *ob, gimple stmt)
+output_gimple_stmt (struct output_block *ob, gimple *stmt)
{
unsigned i;
enum gimple_code code;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
int region;
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
output_gimple_stmt (ob, stmt);
Otherwise, all the statements are walked and NULL returned. */
-gimple
+gimple *
walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
changed by the callbacks. */
-gimple
+gimple *
walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
gimple_seq seq2 = seq;
- gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
+ gimple *ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
gcc_assert (seq2 == seq);
return ret;
}
NULL_TREE if no CALLBACK_OP is specified. */
tree
-walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
+walk_gimple_op (gimple *stmt, walk_tree_fn callback_op,
struct walk_stmt_info *wi)
{
hash_set<tree> *pset = (wi) ? wi->pset : NULL;
walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
- gimple ret;
+ gimple *ret;
tree tree_ret;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (wi)
{
Returns the results of these callbacks or'ed. */
bool
-walk_stmt_load_store_addr_ops (gimple stmt, void *data,
+walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
walk_stmt_load_store_addr_fn visit_load,
walk_stmt_load_store_addr_fn visit_store,
walk_stmt_load_store_addr_fn visit_addr)
should make a faster clone for this case. */
bool
-walk_stmt_load_store_ops (gimple stmt, void *data,
+walk_stmt_load_store_ops (gimple *stmt, void *data,
walk_stmt_load_store_addr_fn visit_load,
walk_stmt_load_store_addr_fn visit_store)
{
typedef tree (*walk_stmt_fn) (gimple_stmt_iterator *, bool *,
struct walk_stmt_info *);
-extern gimple walk_gimple_seq_mod (gimple_seq *, walk_stmt_fn, walk_tree_fn,
+extern gimple *walk_gimple_seq_mod (gimple_seq *, walk_stmt_fn, walk_tree_fn,
struct walk_stmt_info *);
-extern gimple walk_gimple_seq (gimple_seq, walk_stmt_fn, walk_tree_fn,
+extern gimple *walk_gimple_seq (gimple_seq, walk_stmt_fn, walk_tree_fn,
struct walk_stmt_info *);
-extern tree walk_gimple_op (gimple, walk_tree_fn, struct walk_stmt_info *);
+extern tree walk_gimple_op (gimple *, walk_tree_fn, struct walk_stmt_info *);
extern tree walk_gimple_stmt (gimple_stmt_iterator *, walk_stmt_fn,
walk_tree_fn, struct walk_stmt_info *);
-typedef bool (*walk_stmt_load_store_addr_fn) (gimple, tree, tree, void *);
-extern bool walk_stmt_load_store_addr_ops (gimple, void *,
+typedef bool (*walk_stmt_load_store_addr_fn) (gimple *, tree, tree, void *);
+extern bool walk_stmt_load_store_addr_ops (gimple *, void *,
walk_stmt_load_store_addr_fn,
walk_stmt_load_store_addr_fn,
walk_stmt_load_store_addr_fn);
-extern bool walk_stmt_load_store_ops (gimple, void *,
+extern bool walk_stmt_load_store_ops (gimple *, void *,
walk_stmt_load_store_addr_fn,
walk_stmt_load_store_addr_fn);
#endif /* GCC_GIMPLE_WALK_H */
/* Set the code for statement G to CODE. */
static inline void
-gimple_set_code (gimple g, enum gimple_code code)
+gimple_set_code (gimple *g, enum gimple_code code)
{
g->code = code;
}
/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
operands. */
-gimple
+gimple *
gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
{
size_t size;
- gimple stmt;
+ gimple *stmt;
size = gimple_size (code);
if (num_ops > 0)
/* Set SUBCODE to be the code of the expression computed by statement G. */
static inline void
-gimple_set_subcode (gimple g, unsigned subcode)
+gimple_set_subcode (gimple *g, unsigned subcode)
{
/* We only have 16 bits for the RHS code. Assert that we are not
overflowing it. */
#define gimple_build_with_ops(c, s, n) \
gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
-static gimple
+static gimple *
gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
unsigned num_ops MEM_STAT_DECL)
{
- gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
+ gimple *s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
gimple_set_subcode (s, subcode);
return s;
/* Build a GIMPLE_NOP statement. */
-gimple
+gimple *
gimple_build_nop (void)
{
return gimple_alloc (GIMPLE_NOP, 0);
CLEANUP is the cleanup expression. */
-gimple
+gimple *
gimple_build_wce (gimple_seq cleanup)
{
- gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
+ gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
if (cleanup)
gimple_wce_set_cleanup (p, cleanup);
VAR is bound to VALUE; block and location are taken from STMT. */
gdebug *
-gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
+gimple_build_debug_bind_stat (tree var, tree value, gimple *stmt MEM_STAT_DECL)
{
gdebug *p
= as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
gdebug *
gimple_build_debug_source_bind_stat (tree var, tree value,
- gimple stmt MEM_STAT_DECL)
+ gimple *stmt MEM_STAT_DECL)
{
gdebug *p
= as_a <gdebug *> (
BODY is the sequence of statements in the section. */
-gimple
+gimple *
gimple_build_omp_section (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
if (body)
gimple_omp_set_body (p, body);
BODY is the sequence of statements to be executed by just the master. */
-gimple
+gimple *
gimple_build_omp_master (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
if (body)
gimple_omp_set_body (p, body);
BODY is the sequence of statements to be executed by the taskgroup
construct. */
-gimple
+gimple *
gimple_build_omp_taskgroup (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
if (body)
gimple_omp_set_body (p, body);
BODY is the sequence of statements inside a loop that will executed in
sequence. */
-gimple
+gimple *
gimple_build_omp_ordered (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
if (body)
gimple_omp_set_body (p, body);
/* Build a GIMPLE_OMP_RETURN statement.
WAIT_P is true if this is a non-waiting return. */
-gimple
+gimple *
gimple_build_omp_return (bool wait_p)
{
- gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
if (wait_p)
gimple_omp_return_set_nowait (p);
/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
-gimple
+gimple *
gimple_build_omp_sections_switch (void)
{
return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
/* Complain of a gimple type mismatch and die. */
void
-gimple_check_failed (const_gimple gs, const char *file, int line,
+gimple_check_failed (const gimple *gs, const char *file, int line,
const char *function, enum gimple_code code,
enum tree_code subcode)
{
*SEQ_P is NULL, a new sequence is allocated. */
void
-gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
{
gimple_stmt_iterator si;
if (gs == NULL)
before the def/use vectors have been constructed. */
void
-gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
{
gimple_stmt_iterator si;
/* Determine whether to assign a location to the statement GS. */
static bool
-should_carry_location_p (gimple gs)
+should_carry_location_p (gimple *gs)
{
/* Don't emit a line note for a label. We particularly don't want to
emit one for the break label, since it doesn't actually correspond
/* Set the location for gimple statement GS to LOCATION. */
static void
-annotate_one_with_location (gimple gs, location_t location)
+annotate_one_with_location (gimple *gs, location_t location)
{
if (!gimple_has_location (gs)
&& !gimple_do_not_emit_location_p (gs)
for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
{
- gimple gs = gsi_stmt (i);
+ gimple *gs = gsi_stmt (i);
annotate_one_with_location (gs, location);
}
}
statement. */
static bool
-empty_stmt_p (gimple stmt)
+empty_stmt_p (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_NOP)
return true;
{
gimple_stmt_iterator gsi;
gimple_seq new_seq = NULL;
- gimple stmt;
+ gimple *stmt;
for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
{
/* Return true if calls C1 and C2 are known to go to the same function. */
bool
-gimple_call_same_target_p (const_gimple c1, const_gimple c2)
+gimple_call_same_target_p (const gimple *c1, const gimple *c2)
{
if (gimple_call_internal_p (c1))
return (gimple_call_internal_p (c2)
call_expr_flags, but for gimple tuples. */
int
-gimple_call_flags (const_gimple stmt)
+gimple_call_flags (const gimple *stmt)
{
int flags;
tree decl = gimple_call_fndecl (stmt);
/* Return true if GS is a copy assignment. */
bool
-gimple_assign_copy_p (gimple gs)
+gimple_assign_copy_p (gimple *gs)
{
return (gimple_assign_single_p (gs)
&& is_gimple_val (gimple_op (gs, 1)));
/* Return true if GS is a SSA_NAME copy assignment. */
bool
-gimple_assign_ssa_name_copy_p (gimple gs)
+gimple_assign_ssa_name_copy_p (gimple *gs)
{
return (gimple_assign_single_p (gs)
&& TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
treatment of unary NOPs is appropriate. */
bool
-gimple_assign_unary_nop_p (gimple gs)
+gimple_assign_unary_nop_p (gimple *gs)
{
return (is_gimple_assign (gs)
&& (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
/* Set BB to be the basic block holding G. */
void
-gimple_set_bb (gimple stmt, basic_block bb)
+gimple_set_bb (gimple *stmt, basic_block bb)
{
stmt->bb = bb;
tree op1, tree op2, tree op3)
{
unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* If the new CODE needs more operands, allocate a new statement. */
if (gimple_num_ops (stmt) < new_rhs_ops + 1)
{
tree lhs = gimple_assign_lhs (stmt);
- gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
+ gimple *new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
gimple_init_singleton (new_stmt);
gsi_replace (gsi, new_stmt, true);
statement other than an assignment or a call. */
tree
-gimple_get_lhs (const_gimple stmt)
+gimple_get_lhs (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
void
-gimple_set_lhs (gimple stmt, tree lhs)
+gimple_set_lhs (gimple *stmt, tree lhs)
{
enum gimple_code code = gimple_code (stmt);
and VUSE operand arrays are set to empty in the new copy. The new
copy isn't part of any sequence. */
-gimple
-gimple_copy (gimple stmt)
+gimple *
+gimple_copy (gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
unsigned num_ops = gimple_num_ops (stmt);
- gimple copy = gimple_alloc (code, num_ops);
+ gimple *copy = gimple_alloc (code, num_ops);
unsigned i;
/* Shallow copy all the fields from STMT. */
- Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
bool
-gimple_has_side_effects (const_gimple s)
+gimple_has_side_effects (const gimple *s)
{
if (is_gimple_debug (s))
return false;
S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
bool
-gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
+gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
{
tree t, div = NULL_TREE;
enum tree_code op;
/* Return true if statement S can trap. */
bool
-gimple_could_trap_p (gimple s)
+gimple_could_trap_p (gimple *s)
{
return gimple_could_trap_p_1 (s, true, true);
}
/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
bool
-gimple_assign_rhs_could_trap_p (gimple s)
+gimple_assign_rhs_could_trap_p (gimple *s)
{
gcc_assert (is_gimple_assign (s));
return gimple_could_trap_p_1 (s, true, false);
/* Helper for gimple_ior_addresses_taken_1. */
static bool
-gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
+gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
{
bitmap addresses_taken = (bitmap)data;
addr = get_base_address (addr);
were any in this stmt. */
bool
-gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
+gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
{
return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
gimple_ior_addresses_taken_1);
a decl of a builtin function. */
bool
-gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
+gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
{
gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
/* Return true when STMT is builtins call. */
bool
-gimple_call_builtin_p (const_gimple stmt)
+gimple_call_builtin_p (const gimple *stmt)
{
tree fndecl;
if (is_gimple_call (stmt)
/* Return true when STMT is builtins call to CLASS. */
bool
-gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
+gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
{
tree fndecl;
if (is_gimple_call (stmt)
/* Return true when STMT is builtins call to CODE of CLASS. */
bool
-gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
+gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
{
tree fndecl;
if (is_gimple_call (stmt)
/* Return true when CALL is a call stmt that definitely doesn't
free any memory or makes it unavailable otherwise. */
bool
-nonfreeing_call_p (gimple call)
+nonfreeing_call_p (gimple *call)
{
if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
&& gimple_call_flags (call) & ECF_LEAF)
This routine only makes a superficial check for a dereference. Thus
it must only be used if it is safe to return a false negative. */
static bool
-check_loadstore (gimple, tree op, tree, void *data)
+check_loadstore (gimple *, tree op, tree, void *data)
{
if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
&& operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
/* Return true if OP can be inferred to be non-NULL after STMT executes,
either by using a pointer dereference or attributes. */
bool
-infer_nonnull_range (gimple stmt, tree op)
+infer_nonnull_range (gimple *stmt, tree op)
{
return infer_nonnull_range_by_dereference (stmt, op)
|| infer_nonnull_range_by_attribute (stmt, op);
/* Return true if OP can be inferred to be non-NULL after STMT
executes by using a pointer dereference. */
bool
-infer_nonnull_range_by_dereference (gimple stmt, tree op)
+infer_nonnull_range_by_dereference (gimple *stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
/* Return true if OP can be inferred to be a non-NULL after STMT
executes by using attributes. */
bool
-infer_nonnull_range_by_attribute (gimple stmt, tree op)
+infer_nonnull_range_by_attribute (gimple *stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gsi_remove (&gsi, true);
release_defs (stmt);
ggc_free (stmt);
to __builtiln_unreacahble or __cxa_pure_virutal. */
void
-maybe_remove_unused_call_args (struct function *fn, gimple stmt)
+maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
{
tree decl = gimple_call_fndecl (stmt);
if (TYPE_ARG_TYPES (TREE_TYPE (decl))
#include "tree-ssa-alias.h"
#include "gimple-expr.h"
-typedef gimple gimple_seq_node;
+typedef gimple *gimple_seq_node;
enum gimple_code {
#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
/* Error out if a gimple tuple is addressed incorrectly. */
#if defined ENABLE_GIMPLE_CHECKING
#define gcc_gimple_checking_assert(EXPR) gcc_assert (EXPR)
-extern void gimple_check_failed (const_gimple, const char *, int, \
+extern void gimple_check_failed (const gimple *, const char *, int, \
const char *, enum gimple_code, \
enum tree_code) ATTRIBUTE_NORETURN;
#define GIMPLE_CHECK(GS, CODE) \
do { \
- const_gimple __gs = (GS); \
+ const gimple *__gs = (GS); \
if (gimple_code (__gs) != (CODE)) \
gimple_check_failed (__gs, __FILE__, __LINE__, __FUNCTION__, \
(CODE), ERROR_MARK); \
} while (0)
template <typename T>
static inline T
-GIMPLE_CHECK2(const_gimple gs,
+GIMPLE_CHECK2(const gimple *gs,
#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
const char *file = __builtin_FILE (),
int line = __builtin_LINE (),
}
template <typename T>
static inline T
-GIMPLE_CHECK2(gimple gs,
+GIMPLE_CHECK2(gimple *gs,
#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
const char *file = __builtin_FILE (),
int line = __builtin_LINE (),
#define GIMPLE_CHECK(GS, CODE) (void)0
template <typename T>
static inline T
-GIMPLE_CHECK2(gimple gs)
+GIMPLE_CHECK2(gimple *gs)
{
return as_a <T> (gs);
}
template <typename T>
static inline T
-GIMPLE_CHECK2(const_gimple gs)
+GIMPLE_CHECK2(const gimple *gs)
{
return as_a <T> (gs);
}
struct GTY((desc ("gimple_statement_structure (&%h)"), tag ("GSS_BASE"),
chain_next ("%h.next"), variable_size))
- gimple_statement_base
+ gimple
{
/* [ WORD 1 ]
Main identifying code for a tuple. */
A gimple statement is hence also a double-ended list of
statements, with the pointer itself being the first element,
and the prev pointer being the last. */
- gimple next;
- gimple GTY((skip)) prev;
+ gimple *next;
+ gimple *GTY((skip)) prev;
};
/* This gimple subclass has no tag value. */
struct GTY(())
- gimple_statement_with_ops_base : public gimple_statement_base
+ gimple_statement_with_ops_base : public gimple
{
/* [ WORD 1-6 ] : base class */
/* OMP statements. */
struct GTY((tag("GSS_OMP")))
- gimple_statement_omp : public gimple_statement_base
+ gimple_statement_omp : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_BIND */
struct GTY((tag("GSS_BIND")))
- gbind : public gimple_statement_base
+ gbind : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_CATCH */
struct GTY((tag("GSS_CATCH")))
- gcatch : public gimple_statement_base
+ gcatch : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_EH_FILTER */
struct GTY((tag("GSS_EH_FILTER")))
- geh_filter : public gimple_statement_base
+ geh_filter : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_EH_ELSE */
struct GTY((tag("GSS_EH_ELSE")))
- geh_else : public gimple_statement_base
+ geh_else : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_EH_MUST_NOT_THROW */
struct GTY((tag("GSS_EH_MNT")))
- geh_mnt : public gimple_statement_base
+ geh_mnt : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_PHI */
struct GTY((tag("GSS_PHI")))
- gphi : public gimple_statement_base
+ gphi : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_RESX, GIMPLE_EH_DISPATCH */
struct GTY((tag("GSS_EH_CTRL")))
- gimple_statement_eh_ctrl : public gimple_statement_base
+ gimple_statement_eh_ctrl : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_TRY */
struct GTY((tag("GSS_TRY")))
- gtry : public gimple_statement_base
+ gtry : public gimple
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_WITH_CLEANUP_EXPR */
struct GTY((tag("GSS_WCE")))
- gimple_statement_wce : public gimple_statement_base
+ gimple_statement_wce : public gimple
{
/* [ WORD 1-6 ] : base class */
do not need the body field. */
struct GTY((tag("GSS_OMP_CONTINUE")))
- gomp_continue : public gimple_statement_base
+ gomp_continue : public gimple
{
/* [ WORD 1-6 ] : base class */
contains a sequence, which we don't need here. */
struct GTY((tag("GSS_OMP_ATOMIC_LOAD")))
- gomp_atomic_load : public gimple_statement_base
+ gomp_atomic_load : public gimple
{
/* [ WORD 1-6 ] : base class */
See note on GIMPLE_OMP_ATOMIC_LOAD. */
struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
- gimple_statement_omp_atomic_store_layout : public gimple_statement_base
+ gimple_statement_omp_atomic_store_layout : public gimple
{
/* [ WORD 1-6 ] : base class */
template <>
template <>
inline bool
-is_a_helper <gasm *>::test (gimple gs)
+is_a_helper <gasm *>::test (gimple *gs)
{
return gs->code == GIMPLE_ASM;
}
template <>
template <>
inline bool
-is_a_helper <gassign *>::test (gimple gs)
+is_a_helper <gassign *>::test (gimple *gs)
{
return gs->code == GIMPLE_ASSIGN;
}
template <>
template <>
inline bool
-is_a_helper <const gassign *>::test (const_gimple gs)
+is_a_helper <const gassign *>::test (const gimple *gs)
{
return gs->code == GIMPLE_ASSIGN;
}
template <>
template <>
inline bool
-is_a_helper <gbind *>::test (gimple gs)
+is_a_helper <gbind *>::test (gimple *gs)
{
return gs->code == GIMPLE_BIND;
}
template <>
template <>
inline bool
-is_a_helper <gcall *>::test (gimple gs)
+is_a_helper <gcall *>::test (gimple *gs)
{
return gs->code == GIMPLE_CALL;
}
template <>
template <>
inline bool
-is_a_helper <gcatch *>::test (gimple gs)
+is_a_helper <gcatch *>::test (gimple *gs)
{
return gs->code == GIMPLE_CATCH;
}
template <>
template <>
inline bool
-is_a_helper <gcond *>::test (gimple gs)
+is_a_helper <gcond *>::test (gimple *gs)
{
return gs->code == GIMPLE_COND;
}
template <>
template <>
inline bool
-is_a_helper <const gcond *>::test (const_gimple gs)
+is_a_helper <const gcond *>::test (const gimple *gs)
{
return gs->code == GIMPLE_COND;
}
template <>
template <>
inline bool
-is_a_helper <gdebug *>::test (gimple gs)
+is_a_helper <gdebug *>::test (gimple *gs)
{
return gs->code == GIMPLE_DEBUG;
}
template <>
template <>
inline bool
-is_a_helper <ggoto *>::test (gimple gs)
+is_a_helper <ggoto *>::test (gimple *gs)
{
return gs->code == GIMPLE_GOTO;
}
template <>
template <>
inline bool
-is_a_helper <glabel *>::test (gimple gs)
+is_a_helper <glabel *>::test (gimple *gs)
{
return gs->code == GIMPLE_LABEL;
}
template <>
template <>
inline bool
-is_a_helper <gresx *>::test (gimple gs)
+is_a_helper <gresx *>::test (gimple *gs)
{
return gs->code == GIMPLE_RESX;
}
template <>
template <>
inline bool
-is_a_helper <geh_dispatch *>::test (gimple gs)
+is_a_helper <geh_dispatch *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_DISPATCH;
}
template <>
template <>
inline bool
-is_a_helper <geh_else *>::test (gimple gs)
+is_a_helper <geh_else *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_ELSE;
}
template <>
template <>
inline bool
-is_a_helper <geh_filter *>::test (gimple gs)
+is_a_helper <geh_filter *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_FILTER;
}
template <>
template <>
inline bool
-is_a_helper <geh_mnt *>::test (gimple gs)
+is_a_helper <geh_mnt *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_MUST_NOT_THROW;
}
template <>
template <>
inline bool
-is_a_helper <gomp_atomic_load *>::test (gimple gs)
+is_a_helper <gomp_atomic_load *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
}
template <>
template <>
inline bool
-is_a_helper <gomp_atomic_store *>::test (gimple gs)
+is_a_helper <gomp_atomic_store *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_STORE;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_return *>::test (gimple gs)
+is_a_helper <gimple_statement_omp_return *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_RETURN;
}
template <>
template <>
inline bool
-is_a_helper <gomp_continue *>::test (gimple gs)
+is_a_helper <gomp_continue *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_CONTINUE;
}
template <>
template <>
inline bool
-is_a_helper <gomp_critical *>::test (gimple gs)
+is_a_helper <gomp_critical *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_CRITICAL;
}
template <>
template <>
inline bool
-is_a_helper <gomp_for *>::test (gimple gs)
+is_a_helper <gomp_for *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_FOR;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_taskreg *>::test (gimple gs)
+is_a_helper <gimple_statement_omp_taskreg *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL || gs->code == GIMPLE_OMP_TASK;
}
template <>
template <>
inline bool
-is_a_helper <gomp_parallel *>::test (gimple gs)
+is_a_helper <gomp_parallel *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL;
}
template <>
template <>
inline bool
-is_a_helper <gomp_target *>::test (gimple gs)
+is_a_helper <gomp_target *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_TARGET;
}
template <>
template <>
inline bool
-is_a_helper <gomp_sections *>::test (gimple gs)
+is_a_helper <gomp_sections *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_SECTIONS;
}
template <>
template <>
inline bool
-is_a_helper <gomp_single *>::test (gimple gs)
+is_a_helper <gomp_single *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_SINGLE;
}
template <>
template <>
inline bool
-is_a_helper <gomp_teams *>::test (gimple gs)
+is_a_helper <gomp_teams *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_TEAMS;
}
template <>
template <>
inline bool
-is_a_helper <gomp_task *>::test (gimple gs)
+is_a_helper <gomp_task *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_TASK;
}
template <>
template <>
inline bool
-is_a_helper <gphi *>::test (gimple gs)
+is_a_helper <gphi *>::test (gimple *gs)
{
return gs->code == GIMPLE_PHI;
}
template <>
template <>
inline bool
-is_a_helper <greturn *>::test (gimple gs)
+is_a_helper <greturn *>::test (gimple *gs)
{
return gs->code == GIMPLE_RETURN;
}
template <>
template <>
inline bool
-is_a_helper <gswitch *>::test (gimple gs)
+is_a_helper <gswitch *>::test (gimple *gs)
{
return gs->code == GIMPLE_SWITCH;
}
template <>
template <>
inline bool
-is_a_helper <gtransaction *>::test (gimple gs)
+is_a_helper <gtransaction *>::test (gimple *gs)
{
return gs->code == GIMPLE_TRANSACTION;
}
template <>
template <>
inline bool
-is_a_helper <gtry *>::test (gimple gs)
+is_a_helper <gtry *>::test (gimple *gs)
{
return gs->code == GIMPLE_TRY;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_wce *>::test (gimple gs)
+is_a_helper <gimple_statement_wce *>::test (gimple *gs)
{
return gs->code == GIMPLE_WITH_CLEANUP_EXPR;
}
template <>
template <>
inline bool
-is_a_helper <const gasm *>::test (const_gimple gs)
+is_a_helper <const gasm *>::test (const gimple *gs)
{
return gs->code == GIMPLE_ASM;
}
template <>
template <>
inline bool
-is_a_helper <const gbind *>::test (const_gimple gs)
+is_a_helper <const gbind *>::test (const gimple *gs)
{
return gs->code == GIMPLE_BIND;
}
template <>
template <>
inline bool
-is_a_helper <const gcall *>::test (const_gimple gs)
+is_a_helper <const gcall *>::test (const gimple *gs)
{
return gs->code == GIMPLE_CALL;
}
template <>
template <>
inline bool
-is_a_helper <const gcatch *>::test (const_gimple gs)
+is_a_helper <const gcatch *>::test (const gimple *gs)
{
return gs->code == GIMPLE_CATCH;
}
template <>
template <>
inline bool
-is_a_helper <const gresx *>::test (const_gimple gs)
+is_a_helper <const gresx *>::test (const gimple *gs)
{
return gs->code == GIMPLE_RESX;
}
template <>
template <>
inline bool
-is_a_helper <const geh_dispatch *>::test (const_gimple gs)
+is_a_helper <const geh_dispatch *>::test (const gimple *gs)
{
return gs->code == GIMPLE_EH_DISPATCH;
}
template <>
template <>
inline bool
-is_a_helper <const geh_filter *>::test (const_gimple gs)
+is_a_helper <const geh_filter *>::test (const gimple *gs)
{
return gs->code == GIMPLE_EH_FILTER;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_atomic_load *>::test (const_gimple gs)
+is_a_helper <const gomp_atomic_load *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_atomic_store *>::test (const_gimple gs)
+is_a_helper <const gomp_atomic_store *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_STORE;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_return *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_omp_return *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_RETURN;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_continue *>::test (const_gimple gs)
+is_a_helper <const gomp_continue *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_CONTINUE;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_critical *>::test (const_gimple gs)
+is_a_helper <const gomp_critical *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_CRITICAL;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_for *>::test (const_gimple gs)
+is_a_helper <const gomp_for *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_FOR;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_taskreg *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_omp_taskreg *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL || gs->code == GIMPLE_OMP_TASK;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_parallel *>::test (const_gimple gs)
+is_a_helper <const gomp_parallel *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_target *>::test (const_gimple gs)
+is_a_helper <const gomp_target *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_TARGET;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_sections *>::test (const_gimple gs)
+is_a_helper <const gomp_sections *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_SECTIONS;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_single *>::test (const_gimple gs)
+is_a_helper <const gomp_single *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_SINGLE;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_teams *>::test (const_gimple gs)
+is_a_helper <const gomp_teams *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_TEAMS;
}
template <>
template <>
inline bool
-is_a_helper <const gomp_task *>::test (const_gimple gs)
+is_a_helper <const gomp_task *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_TASK;
}
template <>
template <>
inline bool
-is_a_helper <const gphi *>::test (const_gimple gs)
+is_a_helper <const gphi *>::test (const gimple *gs)
{
return gs->code == GIMPLE_PHI;
}
template <>
template <>
inline bool
-is_a_helper <const gtransaction *>::test (const_gimple gs)
+is_a_helper <const gtransaction *>::test (const gimple *gs)
{
return gs->code == GIMPLE_TRANSACTION;
}
/* This variable holds the currently expanded gimple statement for purposes
of comminucating the profile info to the builtin expanders. */
-extern gimple currently_expanding_gimple_stmt;
+extern gimple *currently_expanding_gimple_stmt;
#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
-gimple gimple_alloc_stat (enum gimple_code, unsigned MEM_STAT_DECL);
+gimple *gimple_alloc_stat (enum gimple_code, unsigned MEM_STAT_DECL);
greturn *gimple_build_return (tree);
void gimple_call_reset_alias_info (gcall *);
gcall *gimple_build_call_vec (tree, vec<tree> );
void gimple_cond_set_condition_from_tree (gcond *, tree);
glabel *gimple_build_label (tree label);
ggoto *gimple_build_goto (tree dest);
-gimple gimple_build_nop (void);
+gimple *gimple_build_nop (void);
gbind *gimple_build_bind (tree, gimple_seq, tree);
gasm *gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
vec<tree, va_gc> *, vec<tree, va_gc> *,
geh_else *gimple_build_eh_else (gimple_seq, gimple_seq);
gtry *gimple_build_try (gimple_seq, gimple_seq,
enum gimple_try_flags);
-gimple gimple_build_wce (gimple_seq);
+gimple *gimple_build_wce (gimple_seq);
gresx *gimple_build_resx (int);
gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
gswitch *gimple_build_switch (tree, tree, vec<tree> );
geh_dispatch *gimple_build_eh_dispatch (int);
-gdebug *gimple_build_debug_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+gdebug *gimple_build_debug_bind_stat (tree, tree, gimple * MEM_STAT_DECL);
#define gimple_build_debug_bind(var,val,stmt) \
gimple_build_debug_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
-gdebug *gimple_build_debug_source_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+gdebug *gimple_build_debug_source_bind_stat (tree, tree, gimple * MEM_STAT_DECL);
#define gimple_build_debug_source_bind(var,val,stmt) \
gimple_build_debug_source_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
gomp_critical *gimple_build_omp_critical (gimple_seq, tree);
gomp_parallel *gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
gomp_task *gimple_build_omp_task (gimple_seq, tree, tree, tree, tree,
tree, tree);
-gimple gimple_build_omp_section (gimple_seq);
-gimple gimple_build_omp_master (gimple_seq);
-gimple gimple_build_omp_taskgroup (gimple_seq);
+gimple *gimple_build_omp_section (gimple_seq);
+gimple *gimple_build_omp_master (gimple_seq);
+gimple *gimple_build_omp_taskgroup (gimple_seq);
gomp_continue *gimple_build_omp_continue (tree, tree);
-gimple gimple_build_omp_ordered (gimple_seq);
-gimple gimple_build_omp_return (bool);
+gimple *gimple_build_omp_ordered (gimple_seq);
+gimple *gimple_build_omp_return (bool);
gomp_sections *gimple_build_omp_sections (gimple_seq, tree);
-gimple gimple_build_omp_sections_switch (void);
+gimple *gimple_build_omp_sections_switch (void);
gomp_single *gimple_build_omp_single (gimple_seq, tree);
gomp_target *gimple_build_omp_target (gimple_seq, int, tree);
gomp_teams *gimple_build_omp_teams (gimple_seq, tree);
gomp_atomic_load *gimple_build_omp_atomic_load (tree, tree);
gomp_atomic_store *gimple_build_omp_atomic_store (tree);
gtransaction *gimple_build_transaction (gimple_seq, tree);
-extern void gimple_seq_add_stmt (gimple_seq *, gimple);
-extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple);
+extern void gimple_seq_add_stmt (gimple_seq *, gimple *);
+extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple *);
void gimple_seq_add_seq (gimple_seq *, gimple_seq);
void gimple_seq_add_seq_without_update (gimple_seq *, gimple_seq);
extern void annotate_all_with_location_after (gimple_seq, gimple_stmt_iterator,
extern void annotate_all_with_location (gimple_seq, location_t);
bool empty_body_p (gimple_seq);
gimple_seq gimple_seq_copy (gimple_seq);
-bool gimple_call_same_target_p (const_gimple, const_gimple);
-int gimple_call_flags (const_gimple);
+bool gimple_call_same_target_p (const gimple *, const gimple *);
+int gimple_call_flags (const gimple *);
int gimple_call_arg_flags (const gcall *, unsigned);
int gimple_call_return_flags (const gcall *);
-bool gimple_assign_copy_p (gimple);
-bool gimple_assign_ssa_name_copy_p (gimple);
-bool gimple_assign_unary_nop_p (gimple);
-void gimple_set_bb (gimple, basic_block);
+bool gimple_assign_copy_p (gimple *);
+bool gimple_assign_ssa_name_copy_p (gimple *);
+bool gimple_assign_unary_nop_p (gimple *);
+void gimple_set_bb (gimple *, basic_block);
void gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *, tree);
void gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *, enum tree_code,
tree, tree, tree);
-tree gimple_get_lhs (const_gimple);
-void gimple_set_lhs (gimple, tree);
-gimple gimple_copy (gimple);
-bool gimple_has_side_effects (const_gimple);
-bool gimple_could_trap_p_1 (gimple, bool, bool);
-bool gimple_could_trap_p (gimple);
-bool gimple_assign_rhs_could_trap_p (gimple);
+tree gimple_get_lhs (const gimple *);
+void gimple_set_lhs (gimple *, tree);
+gimple *gimple_copy (gimple *);
+bool gimple_has_side_effects (const gimple *);
+bool gimple_could_trap_p_1 (gimple *, bool, bool);
+bool gimple_could_trap_p (gimple *);
+bool gimple_assign_rhs_could_trap_p (gimple *);
extern void dump_gimple_statistics (void);
unsigned get_gimple_rhs_num_ops (enum tree_code);
extern tree canonicalize_cond_expr_cond (tree);
extern tree gimple_unsigned_type (tree);
extern tree gimple_signed_type (tree);
extern alias_set_type gimple_get_alias_set (tree);
-extern bool gimple_ior_addresses_taken (bitmap, gimple);
-extern bool gimple_builtin_call_types_compatible_p (const_gimple, tree);
-extern bool gimple_call_builtin_p (const_gimple);
-extern bool gimple_call_builtin_p (const_gimple, enum built_in_class);
-extern bool gimple_call_builtin_p (const_gimple, enum built_in_function);
+extern bool gimple_ior_addresses_taken (bitmap, gimple *);
+extern bool gimple_builtin_call_types_compatible_p (const gimple *, tree);
+extern bool gimple_call_builtin_p (const gimple *);
+extern bool gimple_call_builtin_p (const gimple *, enum built_in_class);
+extern bool gimple_call_builtin_p (const gimple *, enum built_in_function);
extern bool gimple_asm_clobbers_memory_p (const gasm *);
extern void dump_decl_set (FILE *, bitmap);
-extern bool nonfreeing_call_p (gimple);
-extern bool infer_nonnull_range (gimple, tree);
-extern bool infer_nonnull_range_by_dereference (gimple, tree);
-extern bool infer_nonnull_range_by_attribute (gimple, tree);
+extern bool nonfreeing_call_p (gimple *);
+extern bool infer_nonnull_range (gimple *, tree);
+extern bool infer_nonnull_range_by_dereference (gimple *, tree);
+extern bool infer_nonnull_range_by_attribute (gimple *, tree);
extern void sort_case_labels (vec<tree>);
extern void preprocess_case_label_vec_for_gimple (vec<tree>, tree, tree *);
extern void gimple_seq_set_location (gimple_seq, location_t);
extern void gimple_seq_discard (gimple_seq);
-extern void maybe_remove_unused_call_args (struct function *, gimple);
+extern void maybe_remove_unused_call_args (struct function *, gimple *);
/* Formal (expression) temporary table handling: multiple occurrences of
the same scalar expression are evaluated into the same temporary. */
/* Return the first statement in GIMPLE sequence S. */
-static inline gimple
+static inline gimple *
gimple_seq_first_stmt (gimple_seq s)
{
gimple_seq_node n = gimple_seq_first (s);
/* Return the last statement in GIMPLE sequence S. */
-static inline gimple
+static inline gimple *
gimple_seq_last_stmt (gimple_seq s)
{
gimple_seq_node n = gimple_seq_last (s);
/* Allocate a new sequence and initialize its first element with STMT. */
static inline gimple_seq
-gimple_seq_alloc_with_stmt (gimple stmt)
+gimple_seq_alloc_with_stmt (gimple *stmt)
{
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, stmt);
/* Return the code for GIMPLE statement G. */
static inline enum gimple_code
-gimple_code (const_gimple g)
+gimple_code (const gimple *g)
{
return g->code;
}
/* Return which GSS code is used by GS. */
static inline enum gimple_statement_structure_enum
-gimple_statement_structure (gimple gs)
+gimple_statement_structure (gimple *gs)
{
return gss_for_code (gimple_code (gs));
}
High GIMPLE statements. */
static inline bool
-gimple_has_substatements (gimple g)
+gimple_has_substatements (gimple *g)
{
switch (gimple_code (g))
{
/* Return the basic block holding statement G. */
static inline basic_block
-gimple_bb (const_gimple g)
+gimple_bb (const gimple *g)
{
return g->bb;
}
/* Return the lexical scope block holding statement G. */
static inline tree
-gimple_block (const_gimple g)
+gimple_block (const gimple *g)
{
return LOCATION_BLOCK (g->location);
}
/* Set BLOCK to be the lexical scope block holding statement G. */
static inline void
-gimple_set_block (gimple g, tree block)
+gimple_set_block (gimple *g, tree block)
{
if (block)
g->location =
/* Return location information for statement G. */
static inline location_t
-gimple_location (const_gimple g)
+gimple_location (const gimple *g)
{
return g->location;
}
Otherwise, UNKNOWN_LOCATION is returned. */
static inline location_t
-gimple_location_safe (const_gimple g)
+gimple_location_safe (const gimple *g)
{
return g ? gimple_location (g) : UNKNOWN_LOCATION;
}
/* Return pointer to location information for statement G. */
static inline const location_t *
-gimple_location_ptr (const_gimple g)
+gimple_location_ptr (const gimple *g)
{
return &g->location;
}
/* Set location information for statement G. */
static inline void
-gimple_set_location (gimple g, location_t location)
+gimple_set_location (gimple *g, location_t location)
{
g->location = location;
}
/* Return true if G contains location information. */
static inline bool
-gimple_has_location (const_gimple g)
+gimple_has_location (const gimple *g)
{
return LOCATION_LOCUS (gimple_location (g)) != UNKNOWN_LOCATION;
}
/* Return the file name of the location of STMT. */
static inline const char *
-gimple_filename (const_gimple stmt)
+gimple_filename (const gimple *stmt)
{
return LOCATION_FILE (gimple_location (stmt));
}
/* Return the line number of the location of STMT. */
static inline int
-gimple_lineno (const_gimple stmt)
+gimple_lineno (const gimple *stmt)
{
return LOCATION_LINE (gimple_location (stmt));
}
/* Return true if no warnings should be emitted for statement STMT. */
static inline bool
-gimple_no_warning_p (const_gimple stmt)
+gimple_no_warning_p (const gimple *stmt)
{
return stmt->no_warning;
}
/* Set the no_warning flag of STMT to NO_WARNING. */
static inline void
-gimple_set_no_warning (gimple stmt, bool no_warning)
+gimple_set_no_warning (gimple *stmt, bool no_warning)
{
stmt->no_warning = (unsigned) no_warning;
}
*/
static inline void
-gimple_set_visited (gimple stmt, bool visited_p)
+gimple_set_visited (gimple *stmt, bool visited_p)
{
stmt->visited = (unsigned) visited_p;
}
struct gimple statement_base. */
static inline bool
-gimple_visited_p (gimple stmt)
+gimple_visited_p (gimple *stmt)
{
return stmt->visited;
}
the 'plf' data member of struct gimple_statement_structure. */
static inline void
-gimple_set_plf (gimple stmt, enum plf_mask plf, bool val_p)
+gimple_set_plf (gimple *stmt, enum plf_mask plf, bool val_p)
{
if (val_p)
stmt->plf |= (unsigned int) plf;
the 'plf' data member of struct gimple_statement_structure. */
static inline unsigned int
-gimple_plf (gimple stmt, enum plf_mask plf)
+gimple_plf (gimple *stmt, enum plf_mask plf)
{
return stmt->plf & ((unsigned int) plf);
}
to any value it sees fit. */
static inline void
-gimple_set_uid (gimple g, unsigned uid)
+gimple_set_uid (gimple *g, unsigned uid)
{
g->uid = uid;
}
to any value it sees fit. */
static inline unsigned
-gimple_uid (const_gimple g)
+gimple_uid (const gimple *g)
{
return g->uid;
}
/* Make statement G a singleton sequence. */
static inline void
-gimple_init_singleton (gimple g)
+gimple_init_singleton (gimple *g)
{
g->next = NULL;
g->prev = g;
/* Return true if GIMPLE statement G has register or memory operands. */
static inline bool
-gimple_has_ops (const_gimple g)
+gimple_has_ops (const gimple *g)
{
return gimple_code (g) >= GIMPLE_COND && gimple_code (g) <= GIMPLE_RETURN;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_with_ops *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_with_ops *>::test (const gimple *gs)
{
return gimple_has_ops (gs);
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_with_ops *>::test (gimple gs)
+is_a_helper <gimple_statement_with_ops *>::test (gimple *gs)
{
return gimple_has_ops (gs);
}
/* Return true if GIMPLE statement G has memory operands. */
static inline bool
-gimple_has_mem_ops (const_gimple g)
+gimple_has_mem_ops (const gimple *g)
{
return gimple_code (g) >= GIMPLE_ASSIGN && gimple_code (g) <= GIMPLE_RETURN;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_with_memory_ops *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_with_memory_ops *>::test (const gimple *gs)
{
return gimple_has_mem_ops (gs);
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_with_memory_ops *>::test (gimple gs)
+is_a_helper <gimple_statement_with_memory_ops *>::test (gimple *gs)
{
return gimple_has_mem_ops (gs);
}
/* Return the set of USE operands for statement G. */
static inline struct use_optype_d *
-gimple_use_ops (const_gimple g)
+gimple_use_ops (const gimple *g)
{
const gimple_statement_with_ops *ops_stmt =
dyn_cast <const gimple_statement_with_ops *> (g);
/* Set USE to be the set of USE operands for statement G. */
static inline void
-gimple_set_use_ops (gimple g, struct use_optype_d *use)
+gimple_set_use_ops (gimple *g, struct use_optype_d *use)
{
gimple_statement_with_ops *ops_stmt =
as_a <gimple_statement_with_ops *> (g);
/* Return the single VUSE operand of the statement G. */
static inline tree
-gimple_vuse (const_gimple g)
+gimple_vuse (const gimple *g)
{
const gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <const gimple_statement_with_memory_ops *> (g);
/* Return the single VDEF operand of the statement G. */
static inline tree
-gimple_vdef (const_gimple g)
+gimple_vdef (const gimple *g)
{
const gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <const gimple_statement_with_memory_ops *> (g);
/* Return the single VUSE operand of the statement G. */
static inline tree *
-gimple_vuse_ptr (gimple g)
+gimple_vuse_ptr (gimple *g)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <gimple_statement_with_memory_ops *> (g);
/* Return the single VDEF operand of the statement G. */
static inline tree *
-gimple_vdef_ptr (gimple g)
+gimple_vdef_ptr (gimple *g)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <gimple_statement_with_memory_ops *> (g);
/* Set the single VUSE operand of the statement G. */
static inline void
-gimple_set_vuse (gimple g, tree vuse)
+gimple_set_vuse (gimple *g, tree vuse)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
as_a <gimple_statement_with_memory_ops *> (g);
/* Set the single VDEF operand of the statement G. */
static inline void
-gimple_set_vdef (gimple g, tree vdef)
+gimple_set_vdef (gimple *g, tree vdef)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
as_a <gimple_statement_with_memory_ops *> (g);
been set. */
static inline bool
-gimple_modified_p (const_gimple g)
+gimple_modified_p (const gimple *g)
{
return (gimple_has_ops (g)) ? (bool) g->modified : false;
}
a MODIFIED field. */
static inline void
-gimple_set_modified (gimple s, bool modifiedp)
+gimple_set_modified (gimple *s, bool modifiedp)
{
if (gimple_has_ops (s))
s->modified = (unsigned) modifiedp;
three kinds of computation that GIMPLE supports. */
static inline enum tree_code
-gimple_expr_code (const_gimple stmt)
+gimple_expr_code (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
/* Return true if statement STMT contains volatile operands. */
static inline bool
-gimple_has_volatile_ops (const_gimple stmt)
+gimple_has_volatile_ops (const gimple *stmt)
{
if (gimple_has_mem_ops (stmt))
return stmt->has_volatile_ops;
/* Set the HAS_VOLATILE_OPS flag to VOLATILEP. */
static inline void
-gimple_set_has_volatile_ops (gimple stmt, bool volatilep)
+gimple_set_has_volatile_ops (gimple *stmt, bool volatilep)
{
if (gimple_has_mem_ops (stmt))
stmt->has_volatile_ops = (unsigned) volatilep;
/* Return true if STMT is in a transaction. */
static inline bool
-gimple_in_transaction (gimple stmt)
+gimple_in_transaction (const gimple *stmt)
{
return bb_in_transaction (gimple_bb (stmt));
}
/* Return true if statement STMT may access memory. */
static inline bool
-gimple_references_memory_p (gimple stmt)
+gimple_references_memory_p (gimple *stmt)
{
return gimple_has_mem_ops (stmt) && gimple_vuse (stmt);
}
/* Return the subcode for OMP statement S. */
static inline unsigned
-gimple_omp_subcode (const_gimple s)
+gimple_omp_subcode (const gimple *s)
{
gcc_gimple_checking_assert (gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD
&& gimple_code (s) <= GIMPLE_OMP_TEAMS);
/* Set the subcode for OMP statement S to SUBCODE. */
static inline void
-gimple_omp_set_subcode (gimple s, unsigned int subcode)
+gimple_omp_set_subcode (gimple *s, unsigned int subcode)
{
/* We only have 16 bits for the subcode. Assert that we are not
overflowing it. */
/* Set the nowait flag on OMP_RETURN statement S. */
static inline void
-gimple_omp_return_set_nowait (gimple s)
+gimple_omp_return_set_nowait (gimple *s)
{
GIMPLE_CHECK (s, GIMPLE_OMP_RETURN);
s->subcode |= GF_OMP_RETURN_NOWAIT;
flag set. */
static inline bool
-gimple_omp_return_nowait_p (const_gimple g)
+gimple_omp_return_nowait_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_RETURN);
return (gimple_omp_subcode (g) & GF_OMP_RETURN_NOWAIT) != 0;
/* Set the LHS of OMP return. */
static inline void
-gimple_omp_return_set_lhs (gimple g, tree lhs)
+gimple_omp_return_set_lhs (gimple *g, tree lhs)
{
gimple_statement_omp_return *omp_return_stmt =
as_a <gimple_statement_omp_return *> (g);
/* Get the LHS of OMP return. */
static inline tree
-gimple_omp_return_lhs (const_gimple g)
+gimple_omp_return_lhs (const gimple *g)
{
const gimple_statement_omp_return *omp_return_stmt =
as_a <const gimple_statement_omp_return *> (g);
/* Return a pointer to the LHS of OMP return. */
static inline tree *
-gimple_omp_return_lhs_ptr (gimple g)
+gimple_omp_return_lhs_ptr (gimple *g)
{
gimple_statement_omp_return *omp_return_stmt =
as_a <gimple_statement_omp_return *> (g);
flag set. */
static inline bool
-gimple_omp_section_last_p (const_gimple g)
+gimple_omp_section_last_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_SECTION);
return (gimple_omp_subcode (g) & GF_OMP_SECTION_LAST) != 0;
/* Set the GF_OMP_SECTION_LAST flag on G. */
static inline void
-gimple_omp_section_set_last (gimple g)
+gimple_omp_section_set_last (gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_SECTION);
g->subcode |= GF_OMP_SECTION_LAST;
GF_OMP_PARALLEL_COMBINED flag set. */
static inline bool
-gimple_omp_parallel_combined_p (const_gimple g)
+gimple_omp_parallel_combined_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL);
return (gimple_omp_subcode (g) & GF_OMP_PARALLEL_COMBINED) != 0;
value of COMBINED_P. */
static inline void
-gimple_omp_parallel_set_combined_p (gimple g, bool combined_p)
+gimple_omp_parallel_set_combined_p (gimple *g, bool combined_p)
{
GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL);
if (combined_p)
GF_OMP_ATOMIC_NEED_VALUE flag set. */
static inline bool
-gimple_omp_atomic_need_value_p (const_gimple g)
+gimple_omp_atomic_need_value_p (const gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
/* Set the GF_OMP_ATOMIC_NEED_VALUE flag on G. */
static inline void
-gimple_omp_atomic_set_need_value (gimple g)
+gimple_omp_atomic_set_need_value (gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
GF_OMP_ATOMIC_SEQ_CST flag set. */
static inline bool
-gimple_omp_atomic_seq_cst_p (const_gimple g)
+gimple_omp_atomic_seq_cst_p (const gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
/* Set the GF_OMP_ATOMIC_SEQ_CST flag on G. */
static inline void
-gimple_omp_atomic_set_seq_cst (gimple g)
+gimple_omp_atomic_set_seq_cst (gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
/* Return the number of operands for statement GS. */
static inline unsigned
-gimple_num_ops (const_gimple gs)
+gimple_num_ops (const gimple *gs)
{
return gs->num_ops;
}
/* Set the number of operands for statement GS. */
static inline void
-gimple_set_num_ops (gimple gs, unsigned num_ops)
+gimple_set_num_ops (gimple *gs, unsigned num_ops)
{
gs->num_ops = num_ops;
}
/* Return the array of operands for statement GS. */
static inline tree *
-gimple_ops (gimple gs)
+gimple_ops (gimple *gs)
{
size_t off;
/* Return operand I for statement GS. */
static inline tree
-gimple_op (const_gimple gs, unsigned i)
+gimple_op (const gimple *gs, unsigned i)
{
if (gimple_has_ops (gs))
{
/* Return a pointer to operand I for statement GS. */
static inline tree *
-gimple_op_ptr (const_gimple gs, unsigned i)
+gimple_op_ptr (const gimple *gs, unsigned i)
{
if (gimple_has_ops (gs))
{
/* Set operand I of statement GS to OP. */
static inline void
-gimple_set_op (gimple gs, unsigned i, tree op)
+gimple_set_op (gimple *gs, unsigned i, tree op)
{
gcc_gimple_checking_assert (gimple_has_ops (gs) && i < gimple_num_ops (gs));
/* Return true if GS is a GIMPLE_ASSIGN. */
static inline bool
-is_gimple_assign (const_gimple gs)
+is_gimple_assign (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_ASSIGN;
}
}
static inline tree
-gimple_assign_lhs (const_gimple gs)
+gimple_assign_lhs (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_lhs (ass);
}
static inline tree *
-gimple_assign_lhs_ptr (const_gimple gs)
+gimple_assign_lhs_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_lhs_ptr (ass);
}
static inline void
-gimple_assign_set_lhs (gimple gs, tree lhs)
+gimple_assign_set_lhs (gimple *gs, tree lhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
gimple_assign_set_lhs (ass, lhs);
}
static inline tree
-gimple_assign_rhs1 (const_gimple gs)
+gimple_assign_rhs1 (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs1 (ass);
}
static inline tree *
-gimple_assign_rhs1_ptr (const_gimple gs)
+gimple_assign_rhs1_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs1_ptr (ass);
}
static inline void
-gimple_assign_set_rhs1 (gimple gs, tree rhs)
+gimple_assign_set_rhs1 (gimple *gs, tree rhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
gimple_assign_set_rhs1 (ass, rhs);
}
static inline tree
-gimple_assign_rhs2 (const_gimple gs)
+gimple_assign_rhs2 (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs2 (ass);
}
static inline tree *
-gimple_assign_rhs2_ptr (const_gimple gs)
+gimple_assign_rhs2_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs2_ptr (ass);
}
static inline void
-gimple_assign_set_rhs2 (gimple gs, tree rhs)
+gimple_assign_set_rhs2 (gimple *gs, tree rhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
return gimple_assign_set_rhs2 (ass, rhs);
}
static inline tree
-gimple_assign_rhs3 (const_gimple gs)
+gimple_assign_rhs3 (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs3 (ass);
statement GS. */
static inline tree *
-gimple_assign_rhs3_ptr (const_gimple gs)
+gimple_assign_rhs3_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
gcc_gimple_checking_assert (gimple_num_ops (gs) >= 4);
}
static inline void
-gimple_assign_set_rhs3 (gimple gs, tree rhs)
+gimple_assign_set_rhs3 (gimple *gs, tree rhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
gimple_assign_set_rhs3 (ass, rhs);
/* Sets nontemporal move flag of GS to NONTEMPORAL. */
static inline void
-gimple_assign_set_nontemporal_move (gimple gs, bool nontemporal)
+gimple_assign_set_nontemporal_move (gimple *gs, bool nontemporal)
{
GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
gs->nontemporal_move = nontemporal;
}
static inline enum tree_code
-gimple_assign_rhs_code (const_gimple gs)
+gimple_assign_rhs_code (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs_code (ass);
assignment S. */
static inline void
-gimple_assign_set_rhs_code (gimple s, enum tree_code code)
+gimple_assign_set_rhs_code (gimple *s, enum tree_code code)
{
GIMPLE_CHECK (s, GIMPLE_ASSIGN);
s->subcode = code;
This will never return GIMPLE_INVALID_RHS. */
static inline enum gimple_rhs_class
-gimple_assign_rhs_class (const_gimple gs)
+gimple_assign_rhs_class (const gimple *gs)
{
return get_gimple_rhs_class (gimple_assign_rhs_code (gs));
}
and do not have the semantics of a copy, such as COND_EXPR. */
static inline bool
-gimple_assign_single_p (const_gimple gs)
+gimple_assign_single_p (const gimple *gs)
{
return (is_gimple_assign (gs)
&& gimple_assign_rhs_class (gs) == GIMPLE_SINGLE_RHS);
/* Return true if GS performs a store to its lhs. */
static inline bool
-gimple_store_p (const_gimple gs)
+gimple_store_p (const gimple *gs)
{
tree lhs = gimple_get_lhs (gs);
return lhs && !is_gimple_reg (lhs);
/* Return true if GS is an assignment that loads from its rhs1. */
static inline bool
-gimple_assign_load_p (const_gimple gs)
+gimple_assign_load_p (const gimple *gs)
{
tree rhs;
if (!gimple_assign_single_p (gs))
/* Return true if S is a type-cast assignment. */
static inline bool
-gimple_assign_cast_p (const_gimple s)
+gimple_assign_cast_p (const gimple *s)
{
if (is_gimple_assign (s))
{
/* Return true if S is a clobber statement. */
static inline bool
-gimple_clobber_p (const_gimple s)
+gimple_clobber_p (const gimple *s)
{
return gimple_assign_single_p (s)
&& TREE_CLOBBER_P (gimple_assign_rhs1 (s));
/* Return true if GS is a GIMPLE_CALL. */
static inline bool
-is_gimple_call (const_gimple gs)
+is_gimple_call (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_CALL;
}
}
static inline tree
-gimple_call_lhs (const_gimple gs)
+gimple_call_lhs (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_lhs (gc);
}
static inline tree *
-gimple_call_lhs_ptr (const_gimple gs)
+gimple_call_lhs_ptr (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_lhs_ptr (gc);
}
static inline void
-gimple_call_set_lhs (gimple gs, tree lhs)
+gimple_call_set_lhs (gimple *gs, tree lhs)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_lhs (gc, lhs);
}
static inline bool
-gimple_call_internal_p (const_gimple gs)
+gimple_call_internal_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_internal_p (gc);
}
static inline bool
-gimple_call_with_bounds_p (const_gimple gs)
+gimple_call_with_bounds_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_with_bounds_p (gc);
}
static inline void
-gimple_call_set_with_bounds (gimple gs, bool with_bounds)
+gimple_call_set_with_bounds (gimple *gs, bool with_bounds)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_with_bounds (gc, with_bounds);
}
static inline enum internal_fn
-gimple_call_internal_fn (const_gimple gs)
+gimple_call_internal_fn (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_internal_fn (gc);
}
static inline void
-gimple_call_set_ctrl_altering (gimple s, bool ctrl_altering_p)
+gimple_call_set_ctrl_altering (gimple *s, bool ctrl_altering_p)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (s);
gimple_call_set_ctrl_altering (gc, ctrl_altering_p);
}
static inline bool
-gimple_call_ctrl_altering_p (const_gimple gs)
+gimple_call_ctrl_altering_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_ctrl_altering_p (gc);
}
static inline tree
-gimple_call_fntype (const_gimple gs)
+gimple_call_fntype (const gimple *gs)
{
const gcall *call_stmt = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fntype (call_stmt);
}
static inline tree
-gimple_call_fn (const_gimple gs)
+gimple_call_fn (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fn (gc);
}
static inline tree *
-gimple_call_fn_ptr (const_gimple gs)
+gimple_call_fn_ptr (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fn_ptr (gc);
}
static inline void
-gimple_call_set_fndecl (gimple gs, tree decl)
+gimple_call_set_fndecl (gimple *gs, tree decl)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_fndecl (gc, decl);
}
static inline tree
-gimple_call_fndecl (const_gimple gs)
+gimple_call_fndecl (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fndecl (gc);
}
static inline tree
-gimple_call_chain (const_gimple gs)
+gimple_call_chain (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_chain (gc);
}
static inline unsigned
-gimple_call_num_args (const_gimple gs)
+gimple_call_num_args (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_num_args (gc);
}
static inline tree
-gimple_call_arg (const_gimple gs, unsigned index)
+gimple_call_arg (const gimple *gs, unsigned index)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_arg (gc, index);
}
static inline tree *
-gimple_call_arg_ptr (const_gimple gs, unsigned index)
+gimple_call_arg_ptr (const gimple *gs, unsigned index)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_arg_ptr (gc, index);
}
static inline void
-gimple_call_set_arg (gimple gs, unsigned index, tree arg)
+gimple_call_set_arg (gimple *gs, unsigned index, tree arg)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_arg (gc, index, arg);
}
static inline bool
-gimple_call_noreturn_p (const_gimple s)
+gimple_call_noreturn_p (const gimple *s)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (s);
return gimple_call_noreturn_p (gc);
non-NULL lhs. */
static inline bool
-gimple_has_lhs (gimple stmt)
+gimple_has_lhs (gimple *stmt)
{
if (is_gimple_assign (stmt))
return true;
}
static inline enum tree_code
-gimple_cond_code (const_gimple gs)
+gimple_cond_code (const gimple *gs)
{
const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
return gimple_cond_code (gc);
}
static inline tree
-gimple_cond_lhs (const_gimple gs)
+gimple_cond_lhs (const gimple *gs)
{
const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
return gimple_cond_lhs (gc);
}
static inline tree
-gimple_cond_rhs (const_gimple gs)
+gimple_cond_rhs (const gimple *gs)
{
const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
return gimple_cond_rhs (gc);
/* Return the destination of the unconditional jump GS. */
static inline tree
-gimple_goto_dest (const_gimple gs)
+gimple_goto_dest (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_GOTO);
return gimple_op (gs, 0);
/* Append a statement to the end of a GIMPLE_BIND's body. */
static inline void
-gimple_bind_add_stmt (gbind *bind_stmt, gimple stmt)
+gimple_bind_add_stmt (gbind *bind_stmt, gimple *stmt)
{
gimple_seq_add_stmt (&bind_stmt->body, stmt);
}
/* Return the types handled by GIMPLE_EH_FILTER statement GS. */
static inline tree
-gimple_eh_filter_types (const_gimple gs)
+gimple_eh_filter_types (const gimple *gs)
{
const geh_filter *eh_filter_stmt = as_a <const geh_filter *> (gs);
return eh_filter_stmt->types;
GS. */
static inline tree *
-gimple_eh_filter_types_ptr (gimple gs)
+gimple_eh_filter_types_ptr (gimple *gs)
{
geh_filter *eh_filter_stmt = as_a <geh_filter *> (gs);
return &eh_filter_stmt->types;
GIMPLE_EH_FILTER statement fails. */
static inline gimple_seq *
-gimple_eh_filter_failure_ptr (gimple gs)
+gimple_eh_filter_failure_ptr (gimple *gs)
{
geh_filter *eh_filter_stmt = as_a <geh_filter *> (gs);
return &eh_filter_stmt->failure;
statement fails. */
static inline gimple_seq
-gimple_eh_filter_failure (gimple gs)
+gimple_eh_filter_failure (gimple *gs)
{
return *gimple_eh_filter_failure_ptr (gs);
}
either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY. */
static inline enum gimple_try_flags
-gimple_try_kind (const_gimple gs)
+gimple_try_kind (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_TRY);
return (enum gimple_try_flags) (gs->subcode & GIMPLE_TRY_KIND);
/* Return the GIMPLE_TRY_CATCH_IS_CLEANUP flag. */
static inline bool
-gimple_try_catch_is_cleanup (const_gimple gs)
+gimple_try_catch_is_cleanup (const gimple *gs)
{
gcc_gimple_checking_assert (gimple_try_kind (gs) == GIMPLE_TRY_CATCH);
return (gs->subcode & GIMPLE_TRY_CATCH_IS_CLEANUP) != 0;
body for GIMPLE_TRY GS. */
static inline gimple_seq *
-gimple_try_eval_ptr (gimple gs)
+gimple_try_eval_ptr (gimple *gs)
{
gtry *try_stmt = as_a <gtry *> (gs);
return &try_stmt->eval;
/* Return the sequence of statements used as the body for GIMPLE_TRY GS. */
static inline gimple_seq
-gimple_try_eval (gimple gs)
+gimple_try_eval (gimple *gs)
{
return *gimple_try_eval_ptr (gs);
}
GIMPLE_TRY GS. */
static inline gimple_seq *
-gimple_try_cleanup_ptr (gimple gs)
+gimple_try_cleanup_ptr (gimple *gs)
{
gtry *try_stmt = as_a <gtry *> (gs);
return &try_stmt->cleanup;
GIMPLE_TRY GS. */
static inline gimple_seq
-gimple_try_cleanup (gimple gs)
+gimple_try_cleanup (gimple *gs)
{
return *gimple_try_cleanup_ptr (gs);
}
/* Return a pointer to the cleanup sequence for cleanup statement GS. */
static inline gimple_seq *
-gimple_wce_cleanup_ptr (gimple gs)
+gimple_wce_cleanup_ptr (gimple *gs)
{
gimple_statement_wce *wce_stmt = as_a <gimple_statement_wce *> (gs);
return &wce_stmt->cleanup;
/* Return the cleanup sequence for cleanup statement GS. */
static inline gimple_seq
-gimple_wce_cleanup (gimple gs)
+gimple_wce_cleanup (gimple *gs)
{
return *gimple_wce_cleanup_ptr (gs);
}
/* Set CLEANUP to be the cleanup sequence for GS. */
static inline void
-gimple_wce_set_cleanup (gimple gs, gimple_seq cleanup)
+gimple_wce_set_cleanup (gimple *gs, gimple_seq cleanup)
{
gimple_statement_wce *wce_stmt = as_a <gimple_statement_wce *> (gs);
wce_stmt->cleanup = cleanup;
/* Return the CLEANUP_EH_ONLY flag for a WCE tuple. */
static inline bool
-gimple_wce_cleanup_eh_only (const_gimple gs)
+gimple_wce_cleanup_eh_only (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
return gs->subcode != 0;
/* Set the CLEANUP_EH_ONLY flag for a WCE tuple. */
static inline void
-gimple_wce_set_cleanup_eh_only (gimple gs, bool eh_only_p)
+gimple_wce_set_cleanup_eh_only (gimple *gs, bool eh_only_p)
{
GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
gs->subcode = (unsigned int) eh_only_p;
/* Return the maximum number of arguments supported by GIMPLE_PHI GS. */
static inline unsigned
-gimple_phi_capacity (const_gimple gs)
+gimple_phi_capacity (const gimple *gs)
{
const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->capacity;
GS. */
static inline unsigned
-gimple_phi_num_args (const_gimple gs)
+gimple_phi_num_args (const gimple *gs)
{
const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->nargs;
/* Return the SSA name created by GIMPLE_PHI GS. */
static inline tree
-gimple_phi_result (const_gimple gs)
+gimple_phi_result (const gimple *gs)
{
const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->result;
/* Return a pointer to the SSA name created by GIMPLE_PHI GS. */
static inline tree *
-gimple_phi_result_ptr (gimple gs)
+gimple_phi_result_ptr (gimple *gs)
{
gphi *phi_stmt = as_a <gphi *> (gs);
return &phi_stmt->result;
GIMPLE_PHI GS. */
static inline struct phi_arg_d *
-gimple_phi_arg (gimple gs, unsigned index)
+gimple_phi_arg (gimple *gs, unsigned index)
{
gphi *phi_stmt = as_a <gphi *> (gs);
gcc_gimple_checking_assert (index <= phi_stmt->capacity);
/* Return the tree operand for argument I of PHI node GS. */
static inline tree
-gimple_phi_arg_def (gimple gs, size_t index)
+gimple_phi_arg_def (gimple *gs, size_t index)
{
return gimple_phi_arg (gs, index)->def;
}
/* Return true if GS is a GIMPLE_DEBUG statement. */
static inline bool
-is_gimple_debug (const_gimple gs)
+is_gimple_debug (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_DEBUG;
}
/* Return true if S is a GIMPLE_DEBUG BIND statement. */
static inline bool
-gimple_debug_bind_p (const_gimple s)
+gimple_debug_bind_p (const gimple *s)
{
if (is_gimple_debug (s))
return s->subcode == GIMPLE_DEBUG_BIND;
/* Return the variable bound in a GIMPLE_DEBUG bind statement. */
static inline tree
-gimple_debug_bind_get_var (gimple dbg)
+gimple_debug_bind_get_var (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
statement. */
static inline tree
-gimple_debug_bind_get_value (gimple dbg)
+gimple_debug_bind_get_value (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
GIMPLE_DEBUG bind statement. */
static inline tree *
-gimple_debug_bind_get_value_ptr (gimple dbg)
+gimple_debug_bind_get_value_ptr (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
/* Set the variable bound in a GIMPLE_DEBUG bind statement. */
static inline void
-gimple_debug_bind_set_var (gimple dbg, tree var)
+gimple_debug_bind_set_var (gimple *dbg, tree var)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
statement. */
static inline void
-gimple_debug_bind_set_value (gimple dbg, tree value)
+gimple_debug_bind_set_value (gimple *dbg, tree value)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
statement. */
static inline void
-gimple_debug_bind_reset_value (gimple dbg)
+gimple_debug_bind_reset_value (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
value. */
static inline bool
-gimple_debug_bind_has_value_p (gimple dbg)
+gimple_debug_bind_has_value_p (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
/* Return true if S is a GIMPLE_DEBUG SOURCE BIND statement. */
static inline bool
-gimple_debug_source_bind_p (const_gimple s)
+gimple_debug_source_bind_p (const gimple *s)
{
if (is_gimple_debug (s))
return s->subcode == GIMPLE_DEBUG_SOURCE_BIND;
/* Return the variable bound in a GIMPLE_DEBUG source bind statement. */
static inline tree
-gimple_debug_source_bind_get_var (gimple dbg)
+gimple_debug_source_bind_get_var (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
statement. */
static inline tree
-gimple_debug_source_bind_get_value (gimple dbg)
+gimple_debug_source_bind_get_value (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
GIMPLE_DEBUG source bind statement. */
static inline tree *
-gimple_debug_source_bind_get_value_ptr (gimple dbg)
+gimple_debug_source_bind_get_value_ptr (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
/* Set the variable bound in a GIMPLE_DEBUG source bind statement. */
static inline void
-gimple_debug_source_bind_set_var (gimple dbg, tree var)
+gimple_debug_source_bind_set_var (gimple *dbg, tree var)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
statement. */
static inline void
-gimple_debug_source_bind_set_value (gimple dbg, tree value)
+gimple_debug_source_bind_set_value (gimple *dbg, tree value)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
/* Return the line number for EXPR, or return -1 if we have no line
number information for it. */
static inline int
-get_lineno (const_gimple stmt)
+get_lineno (const gimple *stmt)
{
location_t loc;
/* Return a pointer to the body for the OMP statement GS. */
static inline gimple_seq *
-gimple_omp_body_ptr (gimple gs)
+gimple_omp_body_ptr (gimple *gs)
{
return &static_cast <gimple_statement_omp *> (gs)->body;
}
/* Return the body for the OMP statement GS. */
static inline gimple_seq
-gimple_omp_body (gimple gs)
+gimple_omp_body (gimple *gs)
{
return *gimple_omp_body_ptr (gs);
}
/* Set BODY to be the body for the OMP statement GS. */
static inline void
-gimple_omp_set_body (gimple gs, gimple_seq body)
+gimple_omp_set_body (gimple *gs, gimple_seq body)
{
static_cast <gimple_statement_omp *> (gs)->body = body;
}
/* Return the kind of the OMP_FOR statemement G. */
static inline int
-gimple_omp_for_kind (const_gimple g)
+gimple_omp_for_kind (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
return (gimple_omp_subcode (g) & GF_OMP_FOR_KIND_MASK);
GF_OMP_FOR_COMBINED flag set. */
static inline bool
-gimple_omp_for_combined_p (const_gimple g)
+gimple_omp_for_combined_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
return (gimple_omp_subcode (g) & GF_OMP_FOR_COMBINED) != 0;
GF_OMP_FOR_COMBINED_INTO flag set. */
static inline bool
-gimple_omp_for_combined_into_p (const_gimple g)
+gimple_omp_for_combined_into_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
return (gimple_omp_subcode (g) & GF_OMP_FOR_COMBINED_INTO) != 0;
/* Return the clauses associated with the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_clauses (const_gimple gs)
+gimple_omp_for_clauses (const gimple *gs)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
return omp_for_stmt->clauses;
GS. */
static inline tree *
-gimple_omp_for_clauses_ptr (gimple gs)
+gimple_omp_for_clauses_ptr (gimple *gs)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return &omp_for_stmt->clauses;
GS. */
static inline void
-gimple_omp_for_set_clauses (gimple gs, tree clauses)
+gimple_omp_for_set_clauses (gimple *gs, tree clauses)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
omp_for_stmt->clauses = clauses;
/* Get the collapse count of the OMP_FOR statement GS. */
static inline size_t
-gimple_omp_for_collapse (gimple gs)
+gimple_omp_for_collapse (gimple *gs)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return omp_for_stmt->collapse;
/* Return the condition code associated with the OMP_FOR statement GS. */
static inline enum tree_code
-gimple_omp_for_cond (const_gimple gs, size_t i)
+gimple_omp_for_cond (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Set COND to be the condition code for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_cond (gimple gs, size_t i, enum tree_code cond)
+gimple_omp_for_set_cond (gimple *gs, size_t i, enum tree_code cond)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (TREE_CODE_CLASS (cond) == tcc_comparison
/* Return the index variable for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_index (const_gimple gs, size_t i)
+gimple_omp_for_index (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return a pointer to the index variable for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_index_ptr (gimple gs, size_t i)
+gimple_omp_for_index_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Set INDEX to be the index variable for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_index (gimple gs, size_t i, tree index)
+gimple_omp_for_set_index (gimple *gs, size_t i, tree index)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return the initial value for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_initial (const_gimple gs, size_t i)
+gimple_omp_for_initial (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return a pointer to the initial value for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_initial_ptr (gimple gs, size_t i)
+gimple_omp_for_initial_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Set INITIAL to be the initial value for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_initial (gimple gs, size_t i, tree initial)
+gimple_omp_for_set_initial (gimple *gs, size_t i, tree initial)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return the final value for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_final (const_gimple gs, size_t i)
+gimple_omp_for_final (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return a pointer to the final value for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_final_ptr (gimple gs, size_t i)
+gimple_omp_for_final_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Set FINAL to be the final value for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_final (gimple gs, size_t i, tree final)
+gimple_omp_for_set_final (gimple *gs, size_t i, tree final)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return the increment value for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_incr (const_gimple gs, size_t i)
+gimple_omp_for_incr (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Return a pointer to the increment value for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_incr_ptr (gimple gs, size_t i)
+gimple_omp_for_incr_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
/* Set INCR to be the increment value for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_incr (gimple gs, size_t i, tree incr)
+gimple_omp_for_set_incr (gimple *gs, size_t i, tree incr)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
statement GS starts. */
static inline gimple_seq *
-gimple_omp_for_pre_body_ptr (gimple gs)
+gimple_omp_for_pre_body_ptr (gimple *gs)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return &omp_for_stmt->pre_body;
statement GS starts. */
static inline gimple_seq
-gimple_omp_for_pre_body (gimple gs)
+gimple_omp_for_pre_body (gimple *gs)
{
return *gimple_omp_for_pre_body_ptr (gs);
}
OMP_FOR statement GS starts. */
static inline void
-gimple_omp_for_set_pre_body (gimple gs, gimple_seq pre_body)
+gimple_omp_for_set_pre_body (gimple *gs, gimple_seq pre_body)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
omp_for_stmt->pre_body = pre_body;
/* Return the clauses associated with OMP_PARALLEL GS. */
static inline tree
-gimple_omp_parallel_clauses (const_gimple gs)
+gimple_omp_parallel_clauses (const gimple *gs)
{
const gomp_parallel *omp_parallel_stmt = as_a <const gomp_parallel *> (gs);
return omp_parallel_stmt->clauses;
/* Return the clauses associated with OMP_TASK GS. */
static inline tree
-gimple_omp_task_clauses (const_gimple gs)
+gimple_omp_task_clauses (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->clauses;
/* Return a pointer to the clauses associated with OMP_TASK GS. */
static inline tree *
-gimple_omp_task_clauses_ptr (gimple gs)
+gimple_omp_task_clauses_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->clauses;
GS. */
static inline void
-gimple_omp_task_set_clauses (gimple gs, tree clauses)
+gimple_omp_task_set_clauses (gimple *gs, tree clauses)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->clauses = clauses;
/* Return the child function used to hold the body of OMP_TASK GS. */
static inline tree
-gimple_omp_task_child_fn (const_gimple gs)
+gimple_omp_task_child_fn (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->child_fn;
OMP_TASK GS. */
static inline tree *
-gimple_omp_task_child_fn_ptr (gimple gs)
+gimple_omp_task_child_fn_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->child_fn;
/* Set CHILD_FN to be the child function for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_child_fn (gimple gs, tree child_fn)
+gimple_omp_task_set_child_fn (gimple *gs, tree child_fn)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->child_fn = child_fn;
from the parent to the children threads in OMP_TASK GS. */
static inline tree
-gimple_omp_task_data_arg (const_gimple gs)
+gimple_omp_task_data_arg (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->data_arg;
/* Return a pointer to the data argument for OMP_TASK GS. */
static inline tree *
-gimple_omp_task_data_arg_ptr (gimple gs)
+gimple_omp_task_data_arg_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->data_arg;
/* Set DATA_ARG to be the data argument for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_data_arg (gimple gs, tree data_arg)
+gimple_omp_task_set_data_arg (gimple *gs, tree data_arg)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->data_arg = data_arg;
/* Return the clauses associated with OMP_TASK GS. */
static inline tree
-gimple_omp_taskreg_clauses (const_gimple gs)
+gimple_omp_taskreg_clauses (const gimple *gs)
{
const gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <const gimple_statement_omp_taskreg *> (gs);
/* Return a pointer to the clauses associated with OMP_TASK GS. */
static inline tree *
-gimple_omp_taskreg_clauses_ptr (gimple gs)
+gimple_omp_taskreg_clauses_ptr (gimple *gs)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
GS. */
static inline void
-gimple_omp_taskreg_set_clauses (gimple gs, tree clauses)
+gimple_omp_taskreg_set_clauses (gimple *gs, tree clauses)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
/* Return the child function used to hold the body of OMP_TASK GS. */
static inline tree
-gimple_omp_taskreg_child_fn (const_gimple gs)
+gimple_omp_taskreg_child_fn (const gimple *gs)
{
const gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <const gimple_statement_omp_taskreg *> (gs);
OMP_TASK GS. */
static inline tree *
-gimple_omp_taskreg_child_fn_ptr (gimple gs)
+gimple_omp_taskreg_child_fn_ptr (gimple *gs)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
/* Set CHILD_FN to be the child function for OMP_TASK GS. */
static inline void
-gimple_omp_taskreg_set_child_fn (gimple gs, tree child_fn)
+gimple_omp_taskreg_set_child_fn (gimple *gs, tree child_fn)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
from the parent to the children threads in OMP_TASK GS. */
static inline tree
-gimple_omp_taskreg_data_arg (const_gimple gs)
+gimple_omp_taskreg_data_arg (const gimple *gs)
{
const gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <const gimple_statement_omp_taskreg *> (gs);
/* Return a pointer to the data argument for OMP_TASK GS. */
static inline tree *
-gimple_omp_taskreg_data_arg_ptr (gimple gs)
+gimple_omp_taskreg_data_arg_ptr (gimple *gs)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
/* Set DATA_ARG to be the data argument for OMP_TASK GS. */
static inline void
-gimple_omp_taskreg_set_data_arg (gimple gs, tree data_arg)
+gimple_omp_taskreg_set_data_arg (gimple *gs, tree data_arg)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
/* Return the copy function used to hold the body of OMP_TASK GS. */
static inline tree
-gimple_omp_task_copy_fn (const_gimple gs)
+gimple_omp_task_copy_fn (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->copy_fn;
OMP_TASK GS. */
static inline tree *
-gimple_omp_task_copy_fn_ptr (gimple gs)
+gimple_omp_task_copy_fn_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->copy_fn;
/* Set CHILD_FN to be the copy function for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_copy_fn (gimple gs, tree copy_fn)
+gimple_omp_task_set_copy_fn (gimple *gs, tree copy_fn)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->copy_fn = copy_fn;
/* Return size of the data block in bytes in OMP_TASK GS. */
static inline tree
-gimple_omp_task_arg_size (const_gimple gs)
+gimple_omp_task_arg_size (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->arg_size;
/* Return a pointer to the data block size for OMP_TASK GS. */
static inline tree *
-gimple_omp_task_arg_size_ptr (gimple gs)
+gimple_omp_task_arg_size_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->arg_size;
/* Set ARG_SIZE to be the data block size for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_arg_size (gimple gs, tree arg_size)
+gimple_omp_task_set_arg_size (gimple *gs, tree arg_size)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->arg_size = arg_size;
/* Return align of the data block in bytes in OMP_TASK GS. */
static inline tree
-gimple_omp_task_arg_align (const_gimple gs)
+gimple_omp_task_arg_align (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->arg_align;
/* Return a pointer to the data block align for OMP_TASK GS. */
static inline tree *
-gimple_omp_task_arg_align_ptr (gimple gs)
+gimple_omp_task_arg_align_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->arg_align;
/* Set ARG_SIZE to be the data block align for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_arg_align (gimple gs, tree arg_align)
+gimple_omp_task_set_arg_align (gimple *gs, tree arg_align)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->arg_align = arg_align;
/* Return the clauses associated with OMP_SINGLE GS. */
static inline tree
-gimple_omp_single_clauses (const_gimple gs)
+gimple_omp_single_clauses (const gimple *gs)
{
const gomp_single *omp_single_stmt = as_a <const gomp_single *> (gs);
return omp_single_stmt->clauses;
/* Return a pointer to the clauses associated with OMP_SINGLE GS. */
static inline tree *
-gimple_omp_single_clauses_ptr (gimple gs)
+gimple_omp_single_clauses_ptr (gimple *gs)
{
gomp_single *omp_single_stmt = as_a <gomp_single *> (gs);
return &omp_single_stmt->clauses;
/* Return the clauses associated with OMP_TARGET GS. */
static inline tree
-gimple_omp_target_clauses (const_gimple gs)
+gimple_omp_target_clauses (const gimple *gs)
{
const gomp_target *omp_target_stmt = as_a <const gomp_target *> (gs);
return omp_target_stmt->clauses;
/* Return a pointer to the clauses associated with OMP_TARGET GS. */
static inline tree *
-gimple_omp_target_clauses_ptr (gimple gs)
+gimple_omp_target_clauses_ptr (gimple *gs)
{
gomp_target *omp_target_stmt = as_a <gomp_target *> (gs);
return &omp_target_stmt->clauses;
/* Return the kind of the OMP_TARGET G. */
static inline int
-gimple_omp_target_kind (const_gimple g)
+gimple_omp_target_kind (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_TARGET);
return (gimple_omp_subcode (g) & GF_OMP_TARGET_KIND_MASK);
/* Return the clauses associated with OMP_TEAMS GS. */
static inline tree
-gimple_omp_teams_clauses (const_gimple gs)
+gimple_omp_teams_clauses (const gimple *gs)
{
const gomp_teams *omp_teams_stmt = as_a <const gomp_teams *> (gs);
return omp_teams_stmt->clauses;
/* Return a pointer to the clauses associated with OMP_TEAMS GS. */
static inline tree *
-gimple_omp_teams_clauses_ptr (gimple gs)
+gimple_omp_teams_clauses_ptr (gimple *gs)
{
gomp_teams *omp_teams_stmt = as_a <gomp_teams *> (gs);
return &omp_teams_stmt->clauses;
/* Return the clauses associated with OMP_SECTIONS GS. */
static inline tree
-gimple_omp_sections_clauses (const_gimple gs)
+gimple_omp_sections_clauses (const gimple *gs)
{
const gomp_sections *omp_sections_stmt = as_a <const gomp_sections *> (gs);
return omp_sections_stmt->clauses;
/* Return a pointer to the clauses associated with OMP_SECTIONS GS. */
static inline tree *
-gimple_omp_sections_clauses_ptr (gimple gs)
+gimple_omp_sections_clauses_ptr (gimple *gs)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
return &omp_sections_stmt->clauses;
GS. */
static inline void
-gimple_omp_sections_set_clauses (gimple gs, tree clauses)
+gimple_omp_sections_set_clauses (gimple *gs, tree clauses)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
omp_sections_stmt->clauses = clauses;
in GS. */
static inline tree
-gimple_omp_sections_control (const_gimple gs)
+gimple_omp_sections_control (const gimple *gs)
{
const gomp_sections *omp_sections_stmt = as_a <const gomp_sections *> (gs);
return omp_sections_stmt->control;
GS. */
static inline tree *
-gimple_omp_sections_control_ptr (gimple gs)
+gimple_omp_sections_control_ptr (gimple *gs)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
return &omp_sections_stmt->control;
GIMPLE_OMP_SECTIONS in GS. */
static inline void
-gimple_omp_sections_set_control (gimple gs, tree control)
+gimple_omp_sections_set_control (gimple *gs, tree control)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
omp_sections_stmt->control = control;
/* Return the return bounds for GIMPLE_RETURN GS. */
static inline tree
-gimple_return_retbnd (const_gimple gs)
+gimple_return_retbnd (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_RETURN);
return gimple_op (gs, 1);
/* Set RETVAL to be the return bounds for GIMPLE_RETURN GS. */
static inline void
-gimple_return_set_retbnd (gimple gs, tree retval)
+gimple_return_set_retbnd (gimple *gs, tree retval)
{
GIMPLE_CHECK (gs, GIMPLE_RETURN);
gimple_set_op (gs, 1, retval);
case GIMPLE_OMP_CONTINUE
static inline bool
-is_gimple_omp (const_gimple stmt)
+is_gimple_omp (const gimple *stmt)
{
switch (gimple_code (stmt))
{
specifically. */
static inline bool
-is_gimple_omp_oacc (const_gimple stmt)
+is_gimple_omp_oacc (const gimple *stmt)
{
gcc_assert (is_gimple_omp (stmt));
switch (gimple_code (stmt))
/* Return true if the OMP gimple statement STMT is offloaded. */
static inline bool
-is_gimple_omp_offloaded (const_gimple stmt)
+is_gimple_omp_offloaded (const gimple *stmt)
{
gcc_assert (is_gimple_omp (stmt));
switch (gimple_code (stmt))
/* Returns TRUE if statement G is a GIMPLE_NOP. */
static inline bool
-gimple_nop_p (const_gimple g)
+gimple_nop_p (const gimple *g)
{
return gimple_code (g) == GIMPLE_NOP;
}
/* Return true if GS is a GIMPLE_RESX. */
static inline bool
-is_gimple_resx (const_gimple gs)
+is_gimple_resx (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_RESX;
}
void_type_node if the statement computes nothing. */
static inline tree
-gimple_expr_type (const_gimple stmt)
+gimple_expr_type (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
/* In general we want to pass out a type that can be substituted
by annotate_all_with_location. */
static inline bool
-gimple_do_not_emit_location_p (gimple g)
+gimple_do_not_emit_location_p (gimple *g)
{
return gimple_plf (g, GF_PLF_1);
}
annotate_one_with_location. */
static inline void
-gimple_set_do_not_emit_location (gimple g)
+gimple_set_do_not_emit_location (gimple *g)
{
/* The PLF flags are initialized to 0 when a new tuple is created,
so no need to initialize it anywhere. */
GIMPLE statements are inserted before *GSI_P. */
void
-gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
+gimple_regimplify_operands (gimple *stmt, gimple_stmt_iterator *gsi_p)
{
size_t i, num_ops;
tree lhs;
gimple_seq pre = NULL;
- gimple post_stmt = NULL;
+ gimple *post_stmt = NULL;
push_gimplify_context (gimple_in_ssa_p (cfun));
bool, enum gsi_iterator_update);
extern tree force_gimple_operand_gsi (gimple_stmt_iterator *, tree, bool, tree,
bool, enum gsi_iterator_update);
-extern void gimple_regimplify_operands (gimple, gimple_stmt_iterator *);
+extern void gimple_regimplify_operands (gimple *, gimple_stmt_iterator *);
#endif /* GCC_GIMPLIFY_ME_H */
only. */
static inline void
-gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
{
gimple_seq_add_stmt_without_update (seq_p, gs);
}
BODY is not a sequence, but the first tuple in a sequence. */
void
-pop_gimplify_context (gimple body)
+pop_gimplify_context (gimple *body)
{
struct gimplify_ctx *c = gimplify_ctxp;
tuple in the sequence of generated tuples for this statement.
Return NULL if gimplifying T produced no tuples. */
-static gimple
+static gimple *
gimplify_and_return_first (tree t, gimple_seq *seq_p)
{
gimple_stmt_iterator last = gsi_last (*seq_p);
generate debug info for them; otherwise don't. */
void
-declare_vars (tree vars, gimple gs, bool debug_info)
+declare_vars (tree vars, gimple *gs, bool debug_info)
{
tree last = vars;
if (last)
&& flag_stack_reuse != SR_NONE)
{
tree clobber = build_constructor (TREE_TYPE (t), NULL);
- gimple clobber_stmt;
+ gimple *clobber_stmt;
TREE_THIS_VOLATILE (clobber) = 1;
clobber_stmt = gimple_build_assign (t, clobber);
gimple_set_location (clobber_stmt, end_locus);
EXPR_LOCATION (*expr_p));
vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
}
- gimple call = gimple_build_call_internal_vec (ifn, vargs);
+ gimple *call = gimple_build_call_internal_vec (ifn, vargs);
gimplify_seq_add_stmt (pre_p, call);
return GS_ALL_DONE;
}
&& TREE_OPERAND (expr, 2) != NULL_TREE
&& gimple_seq_may_fallthru (seq))
{
- gimple g;
+ gimple *g;
label_cont = create_artificial_label (UNKNOWN_LOCATION);
g = gimple_build_goto (label_cont);
tree *from_p = &TREE_OPERAND (*expr_p, 1);
tree *to_p = &TREE_OPERAND (*expr_p, 0);
enum gimplify_status ret = GS_UNHANDLED;
- gimple assign;
+ gimple *assign;
location_t loc = EXPR_LOCATION (*expr_p);
gimple_stmt_iterator gsi;
for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
{
- gimple wce = gsi_stmt (iter);
+ gimple *wce = gsi_stmt (iter);
if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
{
static void
gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
{
- gimple wce;
+ gimple *wce;
gimple_seq cleanup_stmts = NULL;
/* Errors can result in improperly nested cleanups. Which results in
gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple g;
+ gimple *g;
gimple_seq body = NULL;
gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple g;
+ gimple *g;
gimple_seq body = NULL;
gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple stmt;
+ gimple *stmt;
gimple_seq body = NULL;
enum omp_region_type ort;
if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
{
push_gimplify_context ();
- gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
+ gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
if (gimple_code (g) == GIMPLE_BIND)
pop_gimplify_context (g);
else
gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
- gimple body_stmt;
+ gimple *body_stmt;
gtransaction *trans_stmt;
gimple_seq body = NULL;
int subcode = 0;
case CATCH_EXPR:
{
- gimple c;
+ gimple *c;
gimple_seq handler = NULL;
gimplify_and_add (CATCH_BODY (*expr_p), &handler);
c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
case EH_FILTER_EXPR:
{
- gimple ehf;
+ gimple *ehf;
gimple_seq failure = NULL;
gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
case OMP_CRITICAL:
{
gimple_seq body = NULL;
- gimple g;
+ gimple *g;
gimplify_and_add (OMP_BODY (*expr_p), &body);
switch (TREE_CODE (*expr_p))
{
location_t saved_location = input_location;
gimple_seq parm_stmts, seq;
- gimple outer_stmt;
+ gimple *outer_stmt;
gbind *outer_bind;
struct cgraph_node *cgn;
{
tree x;
gbind *new_bind;
- gimple tf;
+ gimple *tf;
gimple_seq cleanup = NULL, body = NULL;
tree tmp_var;
gcall *call;
&& !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
{
gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
- gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
+ gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
/* Clear the block for BIND, since it is no longer directly inside
the function, but within a try block. */
This function returns the newly created GIMPLE_ASSIGN tuple. */
-gimple
+gimple *
gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
{
tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
extern void free_gimplify_stack (void);
extern void push_gimplify_context (bool in_ssa = false,
bool rhs_cond_ok = false);
-extern void pop_gimplify_context (gimple);
+extern void pop_gimplify_context (gimple *);
extern gbind *gimple_current_bind_expr (void);
extern vec<gbind *> gimple_bind_expr_stack (void);
extern void gimplify_and_add (tree, gimple_seq *);
extern tree get_formal_tmp_var (tree, gimple_seq *);
extern tree get_initialized_tmp_var (tree, gimple_seq *, gimple_seq *);
-extern void declare_vars (tree, gimple, bool);
+extern void declare_vars (tree, gimple *, bool);
extern void gimple_add_tmp_var (tree);
extern void gimple_add_tmp_var_fn (struct function *, tree);
extern tree unshare_expr (tree);
extern void gimplify_function_tree (tree);
extern enum gimplify_status gimplify_va_arg_expr (tree *, gimple_seq *,
gimple_seq *);
-gimple gimplify_assign (tree, tree, gimple_seq *);
+gimple *gimplify_assign (tree, tree, gimple_seq *);
/* Return true if gimplify_one_sizepos doesn't need to gimplify
expr (when in TYPE_SIZE{,_UNIT} and similar type/decl size/bitsize
DEF_STMT. GSI points to entry basic block of the TO_REGION. */
static void
-copy_def(tree tr, gimple def_stmt, sese region, sese to_region, gimple_stmt_iterator *gsi)
+copy_def(tree tr, gimple *def_stmt, sese region, sese to_region, gimple_stmt_iterator *gsi)
{
if (!defined_in_sese_p (tr, region))
return;
if (region->parameter_rename_map->get(use_tr))
continue;
- gimple def_of_use = SSA_NAME_DEF_STMT (use_tr);
+ gimple *def_of_use = SSA_NAME_DEF_STMT (use_tr);
if (!def_of_use)
continue;
copy_def (use_tr, def_of_use, region, to_region, gsi);
}
- gimple copy = gimple_copy (def_stmt);
+ gimple *copy = gimple_copy (def_stmt);
gsi_insert_after (gsi, copy, GSI_NEW_STMT);
/* Create new names for all the definitions created by COPY and
FOR_EACH_VEC_ELT (region->params, i, tr)
{
// If def is not in region.
- gimple def_stmt = SSA_NAME_DEF_STMT (tr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (tr);
if (def_stmt)
copy_def (tr, def_stmt, region, to_region, &gsi);
}
dump_gbb_cases (FILE *file, gimple_bb_p gbb)
{
int i;
- gimple stmt;
- vec<gimple> cases;
+ gimple *stmt;
+ vec<gimple *> cases;
if (!gbb)
return;
dump_gbb_conditions (FILE *file, gimple_bb_p gbb)
{
int i;
- gimple stmt;
- vec<gimple> conditions;
+ gimple *stmt;
+ vec<gimple *> conditions;
if (!gbb)
return;
static bool
stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
- gimple stmt)
+ gimple *stmt)
{
data_reference_p dr;
int j;
static bool
stmt_simple_for_scop_p (basic_block scop_entry, loop_p outermost_loop,
- gimple stmt, basic_block bb)
+ gimple *stmt, basic_block bb)
{
loop_p loop = bb->loop_father;
scop should end before this statement. The evaluation is limited using
OUTERMOST_LOOP as outermost loop that may change. */
-static gimple
+static gimple *
harmful_stmt_in_bb (basic_block scop_entry, loop_p outer_loop, basic_block bb)
{
gimple_stmt_iterator gsi;
{
loop_p loop = bb->loop_father;
struct scopdet_info result;
- gimple stmt;
+ gimple *stmt;
/* XXX: ENTRY_BLOCK_PTR could be optimized in later steps. */
basic_block entry_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
static void
remove_duplicate_close_phi (gphi *phi, gphi_iterator *gsi)
{
- gimple use_stmt;
+ gimple *use_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
tree res = gimple_phi_result (phi);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
loop_p loop;
if (is_gimple_debug (stmt))
int i;
unsigned j;
data_reference_p dr;
- gimple stmt;
+ gimple *stmt;
loop_p loop = GBB_BB (gbb)->loop_father;
/* Find parameters in the access functions of data references. */
add_conditions_to_domain (poly_bb_p pbb)
{
unsigned int i;
- gimple stmt;
+ gimple *stmt;
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
if (GBB_CONDITIONS (gbb).is_empty ())
{
edge e = single_pred_edge (bb);
basic_block pred = e->src;
- gimple stmt;
+ gimple *stmt;
if (loop_depth (pred->loop_father) > loop_depth (bb->loop_father))
return NULL;
virtual void after_dom_children (basic_block);
private:
- auto_vec<gimple, 3> m_conditions, m_cases;
+ auto_vec<gimple *, 3> m_conditions, m_cases;
sese m_region;
};
GBB_DATA_REFS vector of BB. */
static void
-analyze_drs_in_stmts (scop_p scop, basic_block bb, vec<gimple> stmts)
+analyze_drs_in_stmts (scop_p scop, basic_block bb, vec<gimple *> stmts)
{
loop_p nest;
gimple_bb_p gbb;
- gimple stmt;
+ gimple *stmt;
int i;
sese region = SCOP_REGION (scop);
on STMTS. */
static void
-insert_stmts (scop_p scop, gimple stmt, gimple_seq stmts,
+insert_stmts (scop_p scop, gimple *stmt, gimple_seq stmts,
gimple_stmt_iterator insert_gsi)
{
gimple_stmt_iterator gsi;
- auto_vec<gimple, 3> x;
+ auto_vec<gimple *, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
/* Insert the assignment "RES := EXPR" just after AFTER_STMT. */
static void
-insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple after_stmt)
+insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple *after_stmt)
{
gimple_seq stmts;
gimple_stmt_iterator gsi;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
gassign *stmt = gimple_build_assign (unshare_expr (res), var);
- auto_vec<gimple, 3> x;
+ auto_vec<gimple *, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
gimple_stmt_iterator gsi;
gimple_seq stmts = NULL;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
- gimple stmt = gimple_build_assign (unshare_expr (res), var);
+ gimple *stmt = gimple_build_assign (unshare_expr (res), var);
basic_block bb;
- auto_vec<gimple, 3> x;
+ auto_vec<gimple *, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
/* Returns true when PHI is a loop close phi node. */
static bool
-scalar_close_phi_node_p (gimple phi)
+scalar_close_phi_node_p (gimple *phi)
{
if (gimple_code (phi) != GIMPLE_PHI
|| virtual_operand_p (gimple_phi_result (phi)))
propagate_expr_outside_region (tree def, tree expr, sese region)
{
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
gimple_seq stmts;
bool replaced_once = false;
rewrite_close_phi_out_of_ssa (scop_p scop, gimple_stmt_iterator *psi)
{
sese region = SCOP_REGION (scop);
- gimple phi = gsi_stmt (*psi);
+ gimple *phi = gsi_stmt (*psi);
tree res = gimple_phi_result (phi);
basic_block bb = gimple_bb (phi);
gimple_stmt_iterator gsi = gsi_after_labels (bb);
tree arg = gimple_phi_arg_def (phi, 0);
- gimple stmt;
+ gimple *stmt;
/* Note that loop close phi nodes should have a single argument
because we translated the representation into a canonical form
basic_block bb = gimple_bb (phi);
tree res = gimple_phi_result (phi);
tree zero_dim_array = create_zero_dim_array (res, "phi_out_of_ssa");
- gimple stmt;
+ gimple *stmt;
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
rewrite_degenerate_phi (gphi_iterator *psi)
{
tree rhs;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
static void
rewrite_cross_bb_scalar_dependence (scop_p scop, tree zero_dim_array,
- tree def, gimple use_stmt)
+ tree def, gimple *use_stmt)
{
- gimple name_stmt;
+ gimple *name_stmt;
tree name;
ssa_op_iter iter;
use_operand_p use_p;
SCOP. */
static void
-handle_scalar_deps_crossing_scop_limits (scop_p scop, tree def, gimple stmt)
+handle_scalar_deps_crossing_scop_limits (scop_p scop, tree def, gimple *stmt)
{
tree var = create_tmp_reg (TREE_TYPE (def));
tree new_name = make_ssa_name (var, stmt);
bool needs_copy = false;
use_operand_p use_p;
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
sese region = SCOP_REGION (scop);
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
arrays everywhere else. */
if (needs_copy)
{
- gimple assign = gimple_build_assign (new_name, def);
+ gimple *assign = gimple_build_assign (new_name, def);
gimple_stmt_iterator psi = gsi_after_labels (SESE_EXIT (region)->dest);
update_stmt (assign);
rewrite_cross_bb_scalar_deps (scop_p scop, gimple_stmt_iterator *gsi)
{
sese region = SCOP_REGION (scop);
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
imm_use_iterator imm_iter;
tree def;
basic_block def_bb;
tree zero_dim_array = NULL_TREE;
- gimple use_stmt;
+ gimple *use_stmt;
bool res = false;
switch (gimple_code (stmt))
polyhedral form. */
static edge
-split_pbb (scop_p scop, poly_bb_p pbb, basic_block bb, gimple stmt)
+split_pbb (scop_p scop, poly_bb_p pbb, basic_block bb, gimple *stmt)
{
edge e1 = split_block (bb, stmt);
new_pbb_from_pbb (scop, pbb, e1->dest);
statements for which we want to ignore data dependences. */
static basic_block
-split_reduction_stmt (scop_p scop, gimple stmt)
+split_reduction_stmt (scop_p scop, gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
poly_bb_p pbb = pbb_from_bb (bb);
/* Return true when stmt is a reduction operation. */
static inline bool
-is_reduction_operation_p (gimple stmt)
+is_reduction_operation_p (gimple *stmt)
{
enum tree_code code;
static gphi *
follow_ssa_with_commutative_ops (tree arg, tree lhs)
{
- gimple stmt;
+ gimple *stmt;
if (TREE_CODE (arg) != SSA_NAME)
return NULL;
the STMT. Return the phi node of the reduction cycle, or NULL. */
static gphi *
-detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
- vec<gimple> *in,
- vec<gimple> *out)
+detect_commutative_reduction_arg (tree lhs, gimple *stmt, tree arg,
+ vec<gimple *> *in,
+ vec<gimple *> *out)
{
gphi *phi = follow_ssa_with_commutative_ops (arg, lhs);
STMT. Return the phi node of the reduction cycle, or NULL. */
static gphi *
-detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
- vec<gimple> *out)
+detect_commutative_reduction_assign (gimple *stmt, vec<gimple *> *in,
+ vec<gimple *> *out)
{
tree lhs = gimple_assign_lhs (stmt);
static gphi *
follow_inital_value_to_phi (tree arg, tree lhs)
{
- gimple stmt;
+ gimple *stmt;
if (!arg || TREE_CODE (arg) != SSA_NAME)
return NULL;
LOOP_PHI. */
static bool
-used_outside_reduction (tree def, gimple loop_phi)
+used_outside_reduction (tree def, gimple *loop_phi)
{
use_operand_p use_p;
imm_use_iterator imm_iter;
/* In LOOP, DEF should be used only in LOOP_PHI. */
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
{
- gimple stmt = USE_STMT (use_p);
+ gimple *stmt = USE_STMT (use_p);
if (stmt != loop_phi
&& !is_gimple_debug (stmt)
node of the reduction cycle, or NULL. */
static gphi *
-detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
- vec<gimple> *out)
+detect_commutative_reduction (scop_p scop, gimple *stmt, vec<gimple *> *in,
+ vec<gimple *> *out)
{
if (scalar_close_phi_node_p (stmt))
{
- gimple def;
+ gimple *def;
gphi *loop_phi, *phi, *close_phi = as_a <gphi *> (stmt);
tree init, lhs, arg = gimple_phi_arg_def (close_phi, 0);
static void
translate_scalar_reduction_to_array_for_stmt (scop_p scop, tree red,
- gimple stmt, gphi *loop_phi)
+ gimple *stmt, gphi *loop_phi)
{
tree res = gimple_phi_result (loop_phi);
gassign *assign = gimple_build_assign (res, unshare_expr (red));
tree def;
use_operand_p use_p;
gimple_stmt_iterator gsi;
- auto_vec<gimple, 3> update;
+ auto_vec<gimple *, 3> update;
unsigned int i;
- gimple stmt;
+ gimple *stmt;
def = PHI_RESULT (phi);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
{
loop_p loop;
basic_block header, def_bb;
- gimple stmt;
+ gimple *stmt;
if (TREE_CODE (*index) != SSA_NAME)
return true;
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
tree res, def = gimple_phi_result (close_phi);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
static void
translate_scalar_reduction_to_array (scop_p scop,
- vec<gimple> in,
- vec<gimple> out)
+ vec<gimple *> in,
+ vec<gimple *> out)
{
- gimple loop_stmt;
+ gimple *loop_stmt;
unsigned int i = out.length () - 1;
tree red = close_phi_written_to_memory (as_a <gphi *> (out[i]));
FOR_EACH_VEC_ELT (in, i, loop_stmt)
{
- gimple close_stmt = out[i];
+ gimple *close_stmt = out[i];
if (i == 0)
{
gphi *close_phi)
{
bool res;
- auto_vec<gimple, 10> in;
- auto_vec<gimple, 10> out;
+ auto_vec<gimple *, 10> in;
+ auto_vec<gimple *, 10> out;
detect_commutative_reduction (scop, close_phi, &in, &out);
res = in.length () > 1;
Each enum value should correspond with a single member of the union
gimple_statement_d. */
-DEFGSSTRUCT(GSS_BASE, gimple_statement_base, false)
+DEFGSSTRUCT(GSS_BASE, gimple, false)
DEFGSSTRUCT(GSS_WITH_OPS, gimple_statement_with_ops, true)
DEFGSSTRUCT(GSS_WITH_MEM_OPS_BASE, gimple_statement_with_memory_ops_base, false)
DEFGSSTRUCT(GSS_WITH_MEM_OPS, gimple_statement_with_memory_ops, true)
wide_int arg_min, arg_max;
while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
{
- gimple g = SSA_NAME_DEF_STMT (arg);
+ gimple *g = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (g)
&& CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
{
wide_int arg_min, arg_max;
while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
{
- gimple g = SSA_NAME_DEF_STMT (arg);
+ gimple *g = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (g)
&& CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
{
/* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
static void
-expand_arith_overflow (enum tree_code code, gimple stmt)
+expand_arith_overflow (enum tree_code code, gimple *stmt)
{
tree lhs = gimple_call_lhs (stmt);
if (lhs == NULL_TREE)
bool
possible_polymorphic_call_target_p (tree ref,
- gimple stmt,
+ gimple *stmt,
struct cgraph_node *n)
{
ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
for (gimple_stmt_iterator gsi = gsi_start_bb (bb->bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
func_checker::compare_bb (sem_bb *bb1, sem_bb *bb2)
{
gimple_stmt_iterator gsi1, gsi2;
- gimple s1, s2;
+ gimple *s1, *s2;
gsi1 = gsi_start_bb_nondebug (bb1->bb);
gsi2 = gsi_start_bb_nondebug (bb2->bb);
assignment statements are semantically equivalent. */
bool
-func_checker::compare_gimple_assign (gimple s1, gimple s2)
+func_checker::compare_gimple_assign (gimple *s1, gimple *s2)
{
tree arg1, arg2;
tree_code code1, code2;
condition statements are semantically equivalent. */
bool
-func_checker::compare_gimple_cond (gimple s1, gimple s2)
+func_checker::compare_gimple_cond (gimple *s1, gimple *s2)
{
tree t1, t2;
tree_code code1, code2;
goto statements are semantically equivalent. */
bool
-func_checker::compare_gimple_goto (gimple g1, gimple g2)
+func_checker::compare_gimple_goto (gimple *g1, gimple *g2)
{
tree dest1, dest2;
FUNC is name of function and LINE is location in the source file. */
static inline bool
-return_different_stmts_1 (gimple s1, gimple s2, const char *code,
+return_different_stmts_1 (gimple *s1, gimple *s2, const char *code,
const char *func, unsigned int line)
{
if (dump_file && (dump_flags & TDF_DETAILS))
/* Verifies for given GIMPLEs S1 and S2 that
assignment statements are semantically equivalent. */
- bool compare_gimple_assign (gimple s1, gimple s2);
+ bool compare_gimple_assign (gimple *s1, gimple *s2);
/* Verifies for given GIMPLEs S1 and S2 that
condition statements are semantically equivalent. */
- bool compare_gimple_cond (gimple s1, gimple s2);
+ bool compare_gimple_cond (gimple *s1, gimple *s2);
/* Verifies for given GIMPLE_LABEL stmts S1 and S2 that
label statements are semantically equivalent. */
/* Verifies for given GIMPLEs S1 and S2 that
goto statements are semantically equivalent. */
- bool compare_gimple_goto (gimple s1, gimple s2);
+ bool compare_gimple_goto (gimple *s1, gimple *s2);
/* Verifies for given GIMPLE_RESX stmts S1 and S2 that
resx statements are semantically equivalent. */
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) != GIMPLE_DEBUG
&& gimple_code (stmt) != GIMPLE_PREDICT)
/* Improve accumulated hash for HSTATE based on a gimple statement STMT. */
void
-sem_function::hash_stmt (gimple stmt, inchash::hash &hstate)
+sem_function::hash_stmt (gimple *stmt, inchash::hash &hstate)
{
enum gimple_code code = gimple_code (stmt);
}
/* Improve accumulated hash for HSTATE based on a gimple statement STMT. */
- void hash_stmt (gimple stmt, inchash::hash &inchash);
+ void hash_stmt (gimple *stmt, inchash::hash &inchash);
/* Return true if polymorphic comparison must be processed. */
bool compare_polymorphic_p (void);
parameter. */
static tree
-unmodified_parm_1 (gimple stmt, tree op)
+unmodified_parm_1 (gimple *stmt, tree op)
{
/* SSA_NAME referring to parm default def? */
if (TREE_CODE (op) == SSA_NAME
parameter. Also traverse chains of SSA register assignments. */
static tree
-unmodified_parm (gimple stmt, tree op)
+unmodified_parm (gimple *stmt, tree op)
{
tree res = unmodified_parm_1 (stmt, op);
if (res)
static bool
unmodified_parm_or_parm_agg_item (struct ipa_func_body_info *fbi,
- gimple stmt, tree op, int *index_p,
+ gimple *stmt, tree op, int *index_p,
struct agg_position_info *aggpos)
{
tree res = unmodified_parm_1 (stmt, op);
penalty wrappers. */
static int
-eliminated_by_inlining_prob (gimple stmt)
+eliminated_by_inlining_prob (gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
enum tree_code rhs_code;
struct inline_summary *summary,
basic_block bb)
{
- gimple last;
+ gimple *last;
tree op;
int index;
struct agg_position_info aggpos;
enum tree_code code, inverted_code;
edge e;
edge_iterator ei;
- gimple set_stmt;
+ gimple *set_stmt;
tree op2;
last = last_stmt (bb);
struct inline_summary *summary,
basic_block bb)
{
- gimple lastg;
+ gimple *lastg;
tree op;
int index;
struct agg_position_info aggpos;
static struct predicate
will_be_nonconstant_predicate (struct ipa_func_body_info *fbi,
struct inline_summary *summary,
- gimple stmt,
+ gimple *stmt,
vec<predicate_t> nonconstant_names)
{
struct predicate p = true_predicate ();
struct record_modified_bb_info
{
bitmap bb_set;
- gimple stmt;
+ gimple *stmt;
};
/* Callback of walk_aliased_vdefs. Records basic blocks where the value may be
ought to be REG_BR_PROB_BASE / estimated_iters. */
static int
-param_change_prob (gimple stmt, int i)
+param_change_prob (gimple *stmt, int i)
{
tree op = gimple_call_arg (stmt, i);
basic_block bb = gimple_bb (stmt);
edge e;
edge_iterator ei;
basic_block first_bb = NULL;
- gimple stmt;
+ gimple *stmt;
if (single_pred_p (bb))
{
an impact on the earlier inlining.
Here find this pattern and fix it up later. */
-static gimple
+static gimple *
find_foldable_builtin_expect (basic_block bb)
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_call_builtin_p (stmt, BUILT_IN_EXPECT)
|| (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt)
tree var = gimple_call_lhs (stmt);
tree arg = gimple_call_arg (stmt, 0);
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
bool match = false;
bool done = false;
while (TREE_CODE (arg) == SSA_NAME)
{
- gimple stmt_tmp = SSA_NAME_DEF_STMT (arg);
+ gimple *stmt_tmp = SSA_NAME_DEF_STMT (arg);
if (!is_gimple_assign (stmt_tmp))
break;
switch (gimple_assign_rhs_code (stmt_tmp))
for (; !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_clobber_p (stmt))
int nblocks, n;
int *order;
predicate array_index = true_predicate ();
- gimple fix_builtin_expect_stmt;
+ gimple *fix_builtin_expect_stmt;
gcc_assert (my_function && my_function->cfg);
gcc_assert (cfun == my_function);
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
int this_size = estimate_num_insns (stmt, &eni_size_weights);
int this_time = estimate_num_insns (stmt, &eni_time_weights);
int prob;
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
affine_iv iv;
ssa_op_iter iter;
tree use;
" Estimated badness is %f, frequency %.2f.\n",
edge->caller->name (), edge->caller->order,
edge->call_stmt
- && (LOCATION_LOCUS (gimple_location ((const_gimple)
+ && (LOCATION_LOCUS (gimple_location ((const gimple *)
edge->call_stmt))
> BUILTINS_LOCATION)
- ? gimple_filename ((const_gimple) edge->call_stmt)
+ ? gimple_filename ((const gimple *) edge->call_stmt)
: "unknown",
edge->call_stmt
- ? gimple_lineno ((const_gimple) edge->call_stmt)
+ ? gimple_lineno ((const gimple *) edge->call_stmt)
: -1,
badness.to_double (),
edge->frequency / (double)CGRAPH_FREQ_BASE);
bool
decl_maybe_in_construction_p (tree base, tree outer_type,
- gimple call, tree function)
+ gimple *call, tree function)
{
if (outer_type)
outer_type = TYPE_MAIN_VARIANT (outer_type);
undefined anyway. */
if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
{
- gimple phi = SSA_NAME_DEF_STMT (op);
+ gimple *phi = SSA_NAME_DEF_STMT (op);
if (gimple_phi_num_args (phi) > 2)
goto done;
ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
tree ref,
- gimple stmt,
+ gimple *stmt,
tree *instance)
{
tree otr_type = NULL;
and destructor functions. */
static bool
-noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
+noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
{
if (is_gimple_assign (stmt))
{
in unknown way or ERROR_MARK_NODE if type is unchanged. */
static tree
-extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
+extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
HOST_WIDE_INT *type_offset)
{
HOST_WIDE_INT offset, size, max_size;
static bool
check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
{
- gimple stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *stmt = SSA_NAME_DEF_STMT (vdef);
struct type_change_info *tci = (struct type_change_info *) data;
tree fn;
ipa_polymorphic_call_context::get_dynamic_type (tree instance,
tree otr_object,
tree otr_type,
- gimple call)
+ gimple *call)
{
struct type_change_info tci;
ao_ref ao;
bool function_entry_reached = false;
tree instance_ref = NULL;
- gimple stmt = call;
+ gimple *stmt = call;
/* Remember OFFSET before it is modified by restrict_to_inner_class.
This is because we do not update INSTANCE when walking inwards. */
HOST_WIDE_INT instance_offset = offset;
int size = 0;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_CALL
&& !gimple_call_fndecl (stmt))
{
*/
static bool
-stmt_may_be_vtbl_ptr_store (gimple stmt)
+stmt_may_be_vtbl_ptr_store (gimple *stmt)
{
if (is_gimple_call (stmt))
return false;
static bool
check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
{
- gimple stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *stmt = SSA_NAME_DEF_STMT (vdef);
struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
if (stmt_may_be_vtbl_ptr_store (stmt))
type of the THIS pointer. */
static bool
-param_type_may_change_p (tree function, tree arg, gimple call)
+param_type_may_change_p (tree function, tree arg, gimple *call)
{
/* Pure functions can not do any changes on the dynamic type;
that require writting to memory. */
static bool
parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
- gimple stmt, tree parm_load)
+ gimple *stmt, tree parm_load)
{
struct ipa_param_aa_status *paa;
bool modified = false;
static int
load_from_unmodified_param (struct ipa_func_body_info *fbi,
vec<ipa_param_descriptor> descriptors,
- gimple stmt)
+ gimple *stmt)
{
int index;
tree op1;
static bool
parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
- int index, gimple stmt, tree ref)
+ int index, gimple *stmt, tree ref)
{
struct ipa_param_aa_status *paa;
bool modified = false;
static bool
parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
- gimple call, tree parm)
+ gimple *call, tree parm)
{
bool modified = false;
ao_ref refd;
bool
ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
vec<ipa_param_descriptor> descriptors,
- gimple stmt, tree op, int *index_p,
+ gimple *stmt, tree op, int *index_p,
HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
bool *by_ref_p)
{
gdp = &p;
*/
- gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
+ gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
index = load_from_unmodified_param (fbi, descriptors, def);
}
compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
- gcall *call, gimple stmt, tree name,
+ gcall *call, gimple *stmt, tree name,
tree param_type)
{
HOST_WIDE_INT offset, size, max_size;
RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
static tree
-get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
+get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
{
HOST_WIDE_INT size, max_size;
tree expr, parm, obj;
gcall *call, gphi *phi)
{
HOST_WIDE_INT offset;
- gimple assign, cond;
+ gimple *assign, *cond;
basic_block phi_bb, assign_bb, cond_bb;
tree tmp, parm, expr, obj;
int index, i;
{
while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
if (gimple_assign_single_p (def_stmt))
rhs = gimple_assign_rhs1 (def_stmt);
for (; !gsi_end_p (gsi); gsi_prev (&gsi))
{
struct ipa_known_agg_contents_list *n, **p;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
tree lhs, rhs, lhs_base;
}
else
{
- gimple stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *stmt = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (stmt))
compute_complex_assign_jump_func (fbi, info, jfunc,
call, stmt, arg, param_type);
field rather than the pfn. */
static tree
-ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
+ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
HOST_WIDE_INT *offset_p)
{
tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
}
int index;
- gimple def = SSA_NAME_DEF_STMT (target);
+ gimple *def = SSA_NAME_DEF_STMT (target);
if (gimple_assign_single_p (def)
&& ipa_load_from_parm_agg (fbi, info->descriptors, def,
gimple_assign_rhs1 (def), &index, &offset,
tree n2 = PHI_ARG_DEF (def, 1);
if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
return;
- gimple d1 = SSA_NAME_DEF_STMT (n1);
- gimple d2 = SSA_NAME_DEF_STMT (n2);
+ gimple *d1 = SSA_NAME_DEF_STMT (n1);
+ gimple *d2 = SSA_NAME_DEF_STMT (n2);
tree rec;
basic_block bb, virt_bb;
/* Third, let's see that the branching is done depending on the least
significant bit of the pfn. */
- gimple branch = last_stmt (bb);
+ gimple *branch = last_stmt (bb);
if (!branch || gimple_code (branch) != GIMPLE_COND)
return;
else
{
struct ipa_jump_func jfunc;
- gimple stmt = SSA_NAME_DEF_STMT (obj);
+ gimple *stmt = SSA_NAME_DEF_STMT (obj);
tree expr;
expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
formal parameters are called. */
static void
-ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple stmt)
+ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
{
if (is_gimple_call (stmt))
ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
passed in DATA. */
static bool
-visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
+visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
{
struct ipa_node_params *info = (struct ipa_node_params *) data;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
a load into a temporary. */
if (is_gimple_reg_type (TREE_TYPE (expr)))
{
- gimple tem = gimple_build_assign (NULL_TREE, expr);
+ gimple *tem = gimple_build_assign (NULL_TREE, expr);
if (gimple_in_ssa_p (cfun))
{
gimple_set_vuse (tem, gimple_vuse (stmt));
{
unsigned int ix;
tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
- gimple def_temp;
+ gimple *def_temp;
arg = gimple_call_arg (stmt, adj->base_index);
if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
struct ipa_agg_replacement_value *v;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree rhs, val, t;
HOST_WIDE_INT offset, size;
int index;
tree ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *, HOST_WIDE_INT,
bool);
bool ipa_load_from_parm_agg (struct ipa_func_body_info *,
- vec<ipa_param_descriptor>, gimple, tree, int *,
+ vec<ipa_param_descriptor>, gimple *, tree, int *,
HOST_WIDE_INT *, HOST_WIDE_INT *, bool *);
/* Debugging interface. */
/* Wrapper around check_decl for loads in local more. */
static bool
-check_load (gimple, tree op, tree, void *data)
+check_load (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, false);
/* Wrapper around check_decl for stores in local more. */
static bool
-check_store (gimple, tree op, tree, void *data)
+check_store (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, false);
/* Wrapper around check_decl for loads in ipa mode. */
static bool
-check_ipa_load (gimple, tree op, tree, void *data)
+check_ipa_load (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, true);
/* Wrapper around check_decl for stores in ipa mode. */
static bool
-check_ipa_store (gimple, tree op, tree, void *data)
+check_ipa_store (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, true);
static void
check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
{
- gimple stmt = gsi_stmt (*gsip);
+ gimple *stmt = gsi_stmt (*gsip);
if (is_gimple_debug (stmt))
return;
symtab_node *referring;
symtab_node *referred;
- gimple stmt;
+ gimple *stmt;
unsigned int lto_stmt_uid;
unsigned int referred_index;
ENUM_BITFIELD (ipa_ref_use) use:3;
variable, check it if it is present in bitmap passed via DATA. */
static bool
-test_nonssa_use (gimple, tree t, tree, void *data)
+test_nonssa_use (gimple *, tree t, tree, void *data)
{
t = get_base_address (t);
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (is_gimple_debug (stmt))
continue;
if (walk_stmt_load_store_addr_ops
to optimize away an unused function call. */
static void
-check_forbidden_calls (gimple stmt)
+check_forbidden_calls (gimple *stmt)
{
imm_use_iterator use_iter;
use_operand_p use_p;
e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) == GIMPLE_LABEL
|| is_gimple_debug (stmt)
|| gimple_clobber_p (stmt))
Return true when access to T prevents splitting the function. */
static bool
-mark_nonssa_use (gimple, tree t, tree, void *data)
+mark_nonssa_use (gimple *, tree t, tree, void *data)
{
t = get_base_address (t);
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
tree op;
ssa_op_iter iter;
tree decl;
tree retval = NULL, real_retval = NULL, retbnd = NULL;
bool split_part_return_p = false;
bool with_bounds = chkp_function_instrumented_p (current_function_decl);
- gimple last_stmt = NULL;
+ gimple *last_stmt = NULL;
unsigned int i;
tree arg, ddef;
vec<tree, va_gc> **debug_args = NULL;
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_vuse (stmt))
{
gimple_set_vuse (stmt, NULL_TREE);
&& is_gimple_reg (parm))
{
tree ddecl;
- gimple def_temp;
+ gimple *def_temp;
/* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
otherwise if it didn't exist before, we'd end up with
unsigned int i;
tree var, vexpr;
gimple_stmt_iterator cgsi;
- gimple def_temp;
+ gimple *def_temp;
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
var = BLOCK_VARS (DECL_INITIAL (node->decl));
gsi_insert_after (&gsi, call, GSI_NEW_STMT);
if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
{
- gimple cpy;
+ gimple *cpy;
tree tem = create_tmp_reg (restype);
tem = make_ssa_name (tem, call);
cpy = gimple_build_assign (retval, NOP_EXPR, tem);
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
int this_time, this_size;
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
this_size = estimate_num_insns (stmt, &eni_size_weights);
this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
bool type_in_anonymous_namespace_p (const_tree);
bool type_with_linkage_p (const_tree);
bool odr_type_p (const_tree);
-bool possible_polymorphic_call_target_p (tree ref, gimple stmt, struct cgraph_node *n);
+bool possible_polymorphic_call_target_p (tree ref, gimple *stmt, struct cgraph_node *n);
void dump_possible_polymorphic_call_targets (FILE *, tree, HOST_WIDE_INT,
const ipa_polymorphic_call_context &);
bool possible_polymorphic_call_target_p (tree, HOST_WIDE_INT,
const ipa_polymorphic_call_context &,
struct cgraph_node *);
tree inlined_polymorphic_ctor_dtor_block_p (tree, bool);
-bool decl_maybe_in_construction_p (tree, tree, gimple, tree);
+bool decl_maybe_in_construction_p (tree, tree, gimple *, tree);
tree vtable_pointer_value_to_binfo (const_tree);
bool vtable_pointer_value_to_vtable (const_tree, tree *, unsigned HOST_WIDE_INT *);
tree subbinfo_with_vtable_at_offset (tree, unsigned HOST_WIDE_INT, tree);
inline vec <cgraph_node *>
possible_polymorphic_call_targets (tree ref,
- gimple call,
+ gimple *call,
bool *completep = NULL,
void **cache_token = NULL)
{
so they point to STMTS. */
static void
-fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple *stmts,
+fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
struct function *fn)
{
struct cgraph_edge *cedge;
/* Fixup call_stmt pointers in NODE and all clones. */
static void
-fixup_call_stmt_edges (struct cgraph_node *orig, gimple *stmts)
+fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
{
struct cgraph_node *node;
struct function *fn;
{
struct function *fn;
enum LTO_tags tag;
- gimple *stmts;
+ gimple **stmts;
basic_block bb;
struct cgraph_node *node;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
- stmts = (gimple *) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple));
+ stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
FOR_ALL_BB_FN (bb, cfun)
{
gimple_stmt_iterator bsi = gsi_start_phis (bb);
while (!gsi_end_p (bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gsi_next (&bsi);
stmts[gimple_uid (stmt)] = stmt;
}
bsi = gsi_start_bb (bb);
while (!gsi_end_p (bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
/* If we're recompiling LTO objects with debug stmts but
we're not supposed to have debug stmts, remove them now.
We can't remove them earlier because this would cause uid
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
/* The tree of contexts corresponding to the encountered constructs. */
struct omp_context *outer;
- gimple stmt;
+ gimple *stmt;
/* Map variables to fields in a structure that allows communication
between sending and receiving threads. */
workshare_safe_to_combine_p (basic_block ws_entry_bb)
{
struct omp_for_data fd;
- gimple ws_stmt = last_stmt (ws_entry_bb);
+ gimple *ws_stmt = last_stmt (ws_entry_bb);
if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
return true;
expanded. */
static vec<tree, va_gc> *
-get_ws_args_for (gimple par_stmt, gimple ws_stmt)
+get_ws_args_for (gimple *par_stmt, gimple *ws_stmt)
{
tree t;
location_t loc = gimple_location (ws_stmt);
|| (last_and_only_stmt (ws_entry_bb)
&& last_and_only_stmt (par_exit_bb))))
{
- gimple par_stmt = last_stmt (par_entry_bb);
- gimple ws_stmt = last_stmt (ws_entry_bb);
+ gimple *par_stmt = last_stmt (par_entry_bb);
+ gimple *ws_stmt = last_stmt (ws_entry_bb);
if (region->inner->type == GIMPLE_OMP_FOR)
{
/* Create a new context, with OUTER_CTX being the surrounding context. */
static omp_context *
-new_omp_context (gimple stmt, omp_context *outer_ctx)
+new_omp_context (gimple *stmt, omp_context *outer_ctx)
{
omp_context *ctx = XCNEW (omp_context);
bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
switch (gimple_code (stmt))
find_combined_for, NULL, &wi);
if (wi.info)
{
- gomp_for *for_stmt = as_a <gomp_for *> ((gimple) wi.info);
+ gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
struct omp_for_data fd;
extract_omp_for_data (for_stmt, &fd, NULL);
/* We need two temporaries with fd.loop.v type (istart/iend)
}
static bool
-oacc_loop_or_target_p (gimple stmt)
+oacc_loop_or_target_p (gimple *stmt)
{
enum gimple_code outer_type = gimple_code (stmt);
return ((outer_type == GIMPLE_OMP_TARGET
/* Check nesting restrictions. */
static bool
-check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
+check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
{
/* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
inside an OpenACC CTX. */
scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
omp_context *ctx = (omp_context *) wi->info;
if (gimple_has_location (stmt))
/* Build a call to GOMP_barrier. */
-static gimple
+static gimple *
build_omp_barrier (tree lhs)
{
tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
/* If a context was created for STMT when it was scanned, return it. */
static omp_context *
-maybe_lookup_ctx (gimple stmt)
+maybe_lookup_ctx (gimple *stmt)
{
splay_tree_node n;
n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
{
tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
- gimple tseq;
+ gimple *tseq;
x = build_outer_var_ref (var, ctx);
if (is_reference (var)
/* Don't want uninit warnings on simduid, it is always uninitialized,
but we use it not for the value, but for the DECL_UID only. */
TREE_NO_WARNING (uid) = 1;
- gimple g
+ gimple *g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
gimple_call_set_lhs (g, lane);
gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
tree t = NULL_TREE, array, x;
tree type = get_base_type (var);
- gimple stmt;
+ gimple *stmt;
/* Now insert the partial reductions into the array. */
lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
{
gimple_seq sub_seq = NULL;
- gimple stmt;
+ gimple *stmt;
tree x, c, tid = NULL_TREE;
int count = 0;
/* Get the current thread id. */
tree call = builtin_decl_explicit (BUILT_IN_GOACC_GET_THREAD_NUM);
tid = create_tmp_var (TREE_TYPE (TREE_TYPE (call)));
- gimple stmt = gimple_build_call (call, 0);
+ gimple *stmt = gimple_build_call (call, 0);
gimple_call_set_lhs (stmt, tid);
gimple_seq_add_stmt (stmt_seqp, stmt);
}
{
tree t, t1, t2, val, cond, c, clauses, flags;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
enum built_in_function start_ix;
int start_ix2;
location_t clause_loc;
static gimple_seq
maybe_catch_exception (gimple_seq body)
{
- gimple g;
+ gimple *g;
tree decl;
if (!flag_exceptions)
basic_block exit_bb;
edge_iterator ei;
edge e;
- gimple stmt;
+ gimple *stmt;
int any_addressable_vars = -1;
exit_bb = region->exit;
scheduling point. */
static void
-optimize_omp_library_calls (gimple entry_stmt)
+optimize_omp_library_calls (gimple *entry_stmt)
{
basic_block bb;
gimple_stmt_iterator gsi;
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple call = gsi_stmt (gsi);
+ gimple *call = gsi_stmt (gsi);
tree decl;
if (is_gimple_call (call)
bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
true, GSI_SAME_STMT);
- gimple stmt = gimple_build_assign (to, from);
+ gimple *stmt = gimple_build_assign (to, from);
gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
|| walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
struct function *child_cfun;
tree child_fn, block, t;
gimple_stmt_iterator gsi;
- gimple entry_stmt, stmt;
+ gimple *entry_stmt, *stmt;
edge e;
vec<tree, va_gc> *ws_args;
= single_succ_p (entry_bb) ? single_succ (entry_bb)
: FALLTHRU_EDGE (entry_bb)->dest;
tree arg;
- gimple parcopy_stmt = NULL;
+ gimple *parcopy_stmt = NULL;
for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
{
- gimple stmt;
+ gimple *stmt;
gcc_assert (!gsi_end_p (gsi));
stmt = gsi_stmt (gsi);
static void
expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
- tree *counts, gimple inner_stmt, tree startvar)
+ tree *counts, gimple *inner_stmt, tree startvar)
{
int i;
if (gimple_omp_for_combined_p (fd->for_stmt))
gimple_stmt_iterator gsi;
edge e;
tree t;
- gimple stmt;
+ gimple *stmt;
last_bb = cont_bb;
for (i = fd->collapse - 1; i >= 0; i--)
struct omp_for_data *fd,
enum built_in_function start_fn,
enum built_in_function next_fn,
- gimple inner_stmt)
+ gimple *inner_stmt)
{
tree type, istart0, iend0, iend;
tree t, vmain, vback, bias = NULL_TREE;
phis = phi_nodes (l3_bb);
for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
PHI_ARG_DEF_FROM_EDGE (phi, e));
}
static void
expand_omp_for_static_nochunk (struct omp_region *region,
struct omp_for_data *fd,
- gimple inner_stmt)
+ gimple *inner_stmt)
{
tree n, q, s0, e0, e, t, tt, nthreads, threadid;
tree type, itype, vmain, vback;
static void
expand_omp_for_static_chunk (struct omp_region *region,
- struct omp_for_data *fd, gimple inner_stmt)
+ struct omp_for_data *fd, gimple *inner_stmt)
{
tree n, s0, e0, e, t;
tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
tree n1 = low_val;
tree n2 = high_val;
- gimple stmt = gimple_build_assign (ind_var, n1);
+ gimple *stmt = gimple_build_assign (ind_var, n1);
/* Replace the GIMPLE_OMP_FOR statement. */
gsi_replace (&gsi, stmt, true);
tree type, t;
basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
gcond *cond_stmt;
bool broken_loop = region->cont == NULL;
edge e, ne;
/* Expand the OMP loop defined by REGION. */
static void
-expand_omp_for (struct omp_region *region, gimple inner_stmt)
+expand_omp_for (struct omp_region *region, gimple *inner_stmt)
{
struct omp_for_data fd;
struct omp_for_data_loop *loops;
basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
gimple_stmt_iterator si, switch_si;
gomp_sections *sections_stmt;
- gimple stmt;
+ gimple *stmt;
gomp_continue *cont;
edge_iterator ei;
edge e;
gimple_stmt_iterator gsi;
basic_block store_bb;
location_t loc;
- gimple stmt;
+ gimple *stmt;
tree decl, call, type, itype;
gsi = gsi_last_bb (load_bb);
gimple_stmt_iterator gsi;
basic_block store_bb = single_succ (load_bb);
location_t loc;
- gimple stmt;
+ gimple *stmt;
tree decl, call, type, itype;
machine_mode imode;
bool exchange;
tree lhs, rhs;
basic_block store_bb = single_succ (load_bb);
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
location_t loc;
enum tree_code code;
bool need_old, need_new;
tree type, itype, cmpxchg, iaddr;
gimple_stmt_iterator si;
basic_block loop_header = single_succ (load_bb);
- gimple phi, stmt;
+ gimple *phi, *stmt;
edge e;
enum built_in_function fncode;
tree child_fn, block, t;
gimple_stmt_iterator gsi;
gomp_target *entry_stmt;
- gimple stmt;
+ gimple *stmt;
edge e;
bool offloaded, data_region;
basic_block entry_succ_bb = single_succ (entry_bb);
gimple_stmt_iterator gsi;
tree arg;
- gimple tgtcopy_stmt = NULL;
+ gimple *tgtcopy_stmt = NULL;
tree sender = TREE_VEC_ELT (data_arg, 0);
for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
}
- gimple g;
+ gimple *g;
/* The maximum number used by any start_ix, without varargs. */
auto_vec<tree, 11> args;
args.quick_push (device);
while (region)
{
location_t saved_location;
- gimple inner_stmt = NULL;
+ gimple *inner_stmt = NULL;
/* First, determine whether this is a combined parallel+workshare
region. */
bool single_tree)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
basic_block son;
gsi = gsi_last_bb (bb);
static void
oacc_gimple_assign (tree dest, tree_code op, tree src, gimple_seq *seq)
{
- gimple stmt;
+ gimple *stmt;
if (TREE_CODE (TREE_TYPE (dest)) != COMPLEX_TYPE)
{
gimple_seq *stmt_seqp, omp_context *ctx)
{
tree c, t, oc;
- gimple stmt;
+ gimple *stmt;
omp_context *octx;
/* Find the innermost OpenACC parallel context. */
gimple_seq *stmt_seqp, omp_context *ctx)
{
tree c, x, var, array, loop_header, loop_body, loop_exit, type;
- gimple stmt;
+ gimple *stmt;
/* Create for loop.
gsi = gsi_start (*body);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
{
inner = gimple_bind_body (bind_stmt);
enter, exit;
bool reduction_found = false;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
static void
maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
{
- gimple omp_return = gimple_seq_last_stmt (*body);
+ gimple *omp_return = gimple_seq_last_stmt (*body);
gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
if (gimple_omp_return_nowait_p (omp_return))
return;
tree lhs = create_tmp_var (c_bool_type);
gimple_omp_return_set_lhs (omp_return, lhs);
tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple g = gimple_build_cond (NE_EXPR, lhs,
+ gimple *g = gimple_build_cond (NE_EXPR, lhs,
fold_convert (c_bool_type,
boolean_false_node),
ctx->outer->cancel_label, fallthru_label);
tree block, control;
gimple_stmt_iterator tgsi;
gomp_sections *stmt;
- gimple t;
+ gimple *t;
gbind *new_stmt, *bind;
gimple_seq ilist, dlist, olist, new_body;
for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
{
omp_context *sctx;
- gimple sec_start;
+ gimple *sec_start;
sec_start = gsi_stmt (tgsi);
sctx = maybe_lookup_ctx (sec_start);
location_t loc = gimple_location (single_stmt);
tree tlabel = create_artificial_label (loc);
tree flabel = create_artificial_label (loc);
- gimple call, cond;
+ gimple *call, *cond;
tree lhs, decl;
decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block;
- gimple t;
+ gimple *t;
gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
gbind *bind;
gimple_seq bind_body, bind_body_tail = NULL, dlist;
lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block, lab = NULL, x, bfn_decl;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gbind *bind;
location_t loc = gimple_location (stmt);
gimple_seq tseq;
static void
lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gcall *x;
gbind *bind;
tree block = make_node (BLOCK);
lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gcall *x;
gbind *bind;
struct walk_stmt_info *wi)
{
int *info = (int *) wi->info;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
switch (gimple_code (stmt))
}
static void
-lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
+lower_depend_clauses (gimple *stmt, gimple_seq *iseq, gimple_seq *oseq)
{
tree c, clauses;
- gimple g;
+ gimple *g;
size_t n_in = 0, n_out = 0, idx = 2, i;
clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
{
tree clauses;
tree child_fn, t;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gbind *par_bind, *bind, *dep_bind = NULL;
gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
location_t loc = gimple_location (stmt);
location_t loc = gimple_location (teams_stmt);
tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
- gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
+ gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
gimple_set_location (call, loc);
gimple_seq_add_stmt (&bind_body, call);
static void
lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
struct walk_stmt_info wi;
gcall *call_stmt;
gimple_call_set_lhs (call_stmt, lhs);
tree fallthru_label;
fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple g;
+ gimple *g;
g = gimple_build_label (fallthru_label);
gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
g = gimple_build_cond (NE_EXPR, lhs,
static bool
diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
- gimple branch_ctx, gimple label_ctx)
+ gimple *branch_ctx, gimple *label_ctx)
{
gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple context = (gimple) wi->info;
- gimple inner_context;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *context = (gimple *) wi->info;
+ gimple *inner_context;
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple context = (gimple) wi->info;
+ gimple *context = (gimple *) wi->info;
splay_tree_node n;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
n = splay_tree_lookup (all_labels,
(splay_tree_key) lab);
diagnose_sb_0 (gsi_p, context,
- n ? (gimple) n->value : NULL);
+ n ? (gimple *) n->value : NULL);
}
lab = gimple_cond_false_label (cond_stmt);
if (lab)
n = splay_tree_lookup (all_labels,
(splay_tree_key) lab);
diagnose_sb_0 (gsi_p, context,
- n ? (gimple) n->value : NULL);
+ n ? (gimple *) n->value : NULL);
}
}
break;
break;
n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
- diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
+ diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
}
break;
{
tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
- if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
+ if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
break;
}
}
make_gimple_omp_edges (basic_block bb, struct omp_region **region,
int *region_idx)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
enum gimple_code code = gimple_code (last);
struct omp_region *cur_region = *region;
bool fallthru = false;
struct modify_stmt_info {
ipa_parm_adjustment_vec adjustments;
- gimple stmt;
+ gimple *stmt;
/* True if the parent statement was modified by
ipa_simd_modify_stmt_ops. */
bool modified;
if (tp != orig_tp)
{
repl = build_fold_addr_expr (repl);
- gimple stmt;
+ gimple *stmt;
if (is_gimple_debug (info->stmt))
{
tree vexpr = make_node (DEBUG_EXPR_DECL);
set_ssa_default_def (cfun, adjustments[j].base, NULL_TREE);
SET_SSA_NAME_VAR_OR_IDENTIFIER (name, base_var);
SSA_NAME_IS_DEFAULT_DEF (name) = 0;
- gimple stmt = gimple_build_assign (name, new_decl);
+ gimple *stmt = gimple_build_assign (name, new_decl);
gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
}
else
gsi = gsi_start_bb (bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
info.stmt = stmt;
struct walk_stmt_info wi;
edge e = make_edge (incr_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
e->probability = REG_BR_PROB_BASE;
gsi = gsi_last_bb (incr_bb);
- gimple g = gimple_build_assign (iter2, PLUS_EXPR, iter1,
+ gimple *g = gimple_build_assign (iter2, PLUS_EXPR, iter1,
build_int_cst (unsigned_type_node, 1));
gsi_insert_after (&gsi, g, GSI_CONTINUE_LINKING);
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
tree repl = gimple_get_lhs (g);
FOR_EACH_IMM_USE_STMT (use_stmt, iter, def)
if (is_gimple_debug (use_stmt) || use_stmt == call)
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (TREE_ADDRESSABLE (orig_arg))
{
gsi = gsi_after_labels (body_bb);
static void
execute_ipa_stmt_fixups (opt_pass *pass,
- struct cgraph_node *node, gimple *stmts)
+ struct cgraph_node *node, gimple **stmts)
{
while (pass)
{
/* Execute stmt fixup hooks of all IPA passes for NODE and STMTS. */
void
-execute_all_ipa_stmt_fixups (struct cgraph_node *node, gimple *stmts)
+execute_all_ipa_stmt_fixups (struct cgraph_node *node, gimple **stmts)
{
pass_manager *passes = g->get_passes ();
execute_ipa_stmt_fixups (passes->all_regular_ipa_passes, node, stmts);
static bool
expr_coherent_p (tree t1, tree t2)
{
- gimple stmt;
+ gimple *stmt;
tree ssa_name_1 = NULL;
tree ssa_name_2 = NULL;
enum tree_code loop_bound_code,
int loop_bound_step)
{
- gimple stmt;
+ gimple *stmt;
tree compare_var, compare_base;
enum tree_code compare_code;
tree compare_step_var;
{
unsigned i;
bool check_value_one;
- gimple lhs_def_stmt;
+ gimple *lhs_def_stmt;
gphi *phi_stmt;
tree cmp_rhs, cmp_lhs;
- gimple last;
+ gimple *last;
gcond *cmp_stmt;
last = last_stmt (exit_edge->src);
expr_expected_value_1 (tree type, tree op0, enum tree_code code,
tree op1, bitmap visited, enum br_predictor *predictor)
{
- gimple def;
+ gimple *def;
if (predictor)
*predictor = PRED_UNCONDITIONAL;
static void
tree_predict_by_opcode (basic_block bb)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
edge then_edge;
tree op0, op1;
tree type;
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
{
- gimple last = last_stmt (e->src);
+ gimple *last = last_stmt (e->src);
if (last
&& gimple_code (last) == GIMPLE_RETURN)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl;
if (is_gimple_call (stmt))
{
edge e;
edge_iterator ei;
- gimple last;
+ gimple *last;
FOR_EACH_EDGE (e, ei, bb->succs)
{
for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
gsi_next (&bi))
{
- gimple stmt = gsi_stmt (bi);
+ gimple *stmt = gsi_stmt (bi);
if (is_gimple_call (stmt)
/* Constant and pure calls are hardly used to signalize
something exceptional. */
pass_strip_predict_hints::execute (function *fun)
{
basic_block bb;
- gimple ass_stmt;
+ gimple *ass_stmt;
tree var;
FOR_EACH_BB_FN (bb, fun)
gimple_stmt_iterator bi;
for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
{
- gimple stmt = gsi_stmt (bi);
+ gimple *stmt = gsi_stmt (bi);
if (gimple_code (stmt) == GIMPLE_PREDICT)
{
for (i = 0; i < values.length (); i++)
{
histogram_value hist = values[i];
- gimple stmt = hist->hvalue.stmt;
+ gimple *stmt = hist->hvalue.stmt;
t = (int) hist->type;
FOR_EACH_EDGE (e, ei, bb->succs)
{
gimple_stmt_iterator gsi;
- gimple last = NULL;
+ gimple *last = NULL;
/* It may happen that there are compiler generated statements
without a locus at all. Go through the basic block from the
if (have_exit_edge || need_exit_edge)
{
gimple_stmt_iterator gsi;
- gimple first;
+ gimple *first;
gsi = gsi_start_nondebug_after_labels_bb (bb);
gcc_checking_assert (!gsi_end_p (gsi));
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_has_location (stmt))
output_location (gimple_filename (stmt), gimple_lineno (stmt),
&offset, bb);
{
if (TREE_CODE (t) == SSA_NAME)
{
- gimple g = SSA_NAME_DEF_STMT (t);
+ gimple *g = SSA_NAME_DEF_STMT (t);
if (gimple_assign_single_p (g))
return gimple_assign_rhs1 (g);
}
{
/* This map maps a pointer (the first argument of UBSAN_NULL) to
a vector of UBSAN_NULL call statements that check this pointer. */
- hash_map<tree, auto_vec<gimple> > null_check_map;
+ hash_map<tree, auto_vec<gimple *> > null_check_map;
/* This map maps a pointer (the second argument of ASAN_CHECK) to
a vector of ASAN_CHECK call statements that check the access. */
- hash_map<tree_operand_hash, auto_vec<gimple> > asan_check_map;
+ hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map;
/* This map maps a tree triplet (the first, second and fourth argument
of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
that virtual table pointer. */
- hash_map<sanopt_tree_triplet_hash, auto_vec<gimple> > vptr_check_map;
+ hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map;
/* Number of IFN_ASAN_CHECK statements. */
int asan_num_accesses;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
{
/* Get the first dominating check from the list of stored checks.
Non-dominating checks are silently dropped. */
-static gimple
-maybe_get_dominating_check (auto_vec<gimple> &v)
+static gimple *
+maybe_get_dominating_check (auto_vec<gimple *> &v)
{
for (; !v.is_empty (); v.pop ())
{
- gimple g = v.last ();
+ gimple *g = v.last ();
sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
if (!si->visited_p)
/* At this point we shouldn't have any statements
/* Optimize away redundant UBSAN_NULL calls. */
static bool
-maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
+maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple *stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 3);
tree ptr = gimple_call_arg (stmt, 0);
gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
bool remove = false;
- auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr);
- gimple g = maybe_get_dominating_check (v);
+ auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (ptr);
+ gimple *g = maybe_get_dominating_check (v);
if (!g)
{
/* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
when we can actually optimize. */
static bool
-maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
+maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple *stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 5);
sanopt_tree_triplet triplet;
triplet.t2 = gimple_call_arg (stmt, 1);
triplet.t3 = gimple_call_arg (stmt, 3);
- auto_vec<gimple> &v = ctx->vptr_check_map.get_or_insert (triplet);
- gimple g = maybe_get_dominating_check (v);
+ auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (triplet);
+ gimple *g = maybe_get_dominating_check (v);
if (!g)
{
/* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
if preceded by checks in V. */
static bool
-can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
+can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb)
{
unsigned int i;
- gimple g;
- gimple to_pop = NULL;
+ gimple *g;
+ gimple *to_pop = NULL;
bool remove = false;
basic_block last_bb = bb;
bool cleanup = false;
/* Optimize away redundant ASAN_CHECK calls. */
static bool
-maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
+maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple *stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 4);
tree ptr = gimple_call_arg (stmt, 1);
gimple_set_uid (stmt, info->freeing_call_events);
- auto_vec<gimple> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
+ auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
tree base_addr = maybe_get_single_definition (ptr);
- auto_vec<gimple> *base_checks = NULL;
+ auto_vec<gimple *> *base_checks = NULL;
if (base_addr)
{
base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
ptr_checks = ctx->asan_check_map.get (ptr);
}
- gimple g = maybe_get_dominating_check (*ptr_checks);
- gimple g2 = NULL;
+ gimple *g = maybe_get_dominating_check (*ptr_checks);
+ gimple *g2 = NULL;
if (base_checks)
/* Try with base address as well. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool remove = false;
if (!is_gimple_call (stmt))
FOR_EACH_BB_FN (bb, fun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && gimple_call_internal_p (stmt)
&& gimple_call_internal_fn (stmt) == IFN_ASAN_CHECK)
++asan_num_accesses;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool no_next = false;
if (!is_gimple_call (stmt))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (is_gimple_debug (stmt))
continue;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (!is_gimple_debug (stmt))
continue;
is set when the code generation cannot continue. */
static bool
-rename_uses (gimple copy, rename_map_type *rename_map,
+rename_uses (gimple *copy, rename_map_type *rename_map,
gimple_stmt_iterator *gsi_tgt,
sese region, loop_p loop, vec<tree> iv_map,
bool *gloog_error)
{
def_operand_p def_p;
ssa_op_iter op_iter;
- gimple stmt = gsi_stmt (gsi);
- gimple copy;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *copy;
tree lhs;
/* Do not copy labels or conditions. */
sese region = if_region->region;
edge entry = region->entry;
basic_block bb = entry->dest;
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
gimple_stmt_iterator gsi = gsi_last_bb (bb);
gcond *cond_stmt;
if (!defined_in_sese_p (t, region))
return true;
- gimple stmt = SSA_NAME_DEF_STMT (t);
+ gimple *stmt = SSA_NAME_DEF_STMT (t);
if (gimple_code (stmt) == GIMPLE_PHI
|| gimple_code (stmt) == GIMPLE_CALL)
tree
scalar_evolution_in_region (sese region, loop_p loop, tree t)
{
- gimple def;
+ gimple *def;
struct loop *def_loop;
basic_block before = block_before_sese (region);
/* Returns true when STMT is defined in REGION. */
static inline bool
-stmt_in_sese_p (gimple stmt, sese region)
+stmt_in_sese_p (gimple *stmt, sese region)
{
basic_block bb = gimple_bb (stmt);
return bb && bb_in_sese_p (bb, region);
static inline bool
defined_in_sese_p (tree name, sese region)
{
- gimple stmt = SSA_NAME_DEF_STMT (name);
+ gimple *stmt = SSA_NAME_DEF_STMT (name);
return stmt_in_sese_p (stmt, region);
}
corresponding element in CONDITION_CASES is not NULL_TREE. For a
SWITCH_EXPR the corresponding element in CONDITION_CASES is a
CASE_LABEL_EXPR. */
- vec<gimple> conditions;
- vec<gimple> condition_cases;
+ vec<gimple *> conditions;
+ vec<gimple *> condition_cases;
vec<data_reference_p> data_refs;
} *gimple_bb_p;
extern bool single_imm_use_1 (const ssa_use_operand_t *head,
- use_operand_p *use_p, gimple *stmt);
+ use_operand_p *use_p, gimple **stmt);
enum ssa_op_iter_type {
unsigned i;
unsigned numops;
use_optype_p uses;
- gimple stmt;
+ gimple *stmt;
};
/* NOTE: Keep these in sync with doc/tree-ssa.texi. */
/* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
in STMT. */
static inline void
-link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
+link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple *stmt)
{
if (stmt)
link_imm_use (linknode, def);
in STMT. */
static inline void
relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
- gimple stmt)
+ gimple *stmt)
{
if (stmt)
relink_imm_use (linknode, old);
/* If VAR has only a single immediate nondebug use, return true, and
set USE_P and STMT to the use pointer and stmt of occurrence. */
static inline bool
-single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
+single_imm_use (const_tree var, use_operand_p *use_p, gimple **stmt)
{
const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
/* Initialize the iterator PTR to the virtual defs in STMT. */
static inline void
-op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init (ssa_op_iter *ptr, gimple *stmt, int flags)
{
/* PHI nodes require a different iterator initialization path. We
do not support iterating over virtual defs or uses without
/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
the first use. */
static inline use_operand_p
-op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init_use (ssa_op_iter *ptr, gimple *stmt, int flags)
{
gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
&& (flags & SSA_OP_USE));
/* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
the first def. */
static inline def_operand_p
-op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init_def (ssa_op_iter *ptr, gimple *stmt, int flags)
{
gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
&& (flags & SSA_OP_DEF));
/* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
the first operand as a tree. */
static inline tree
-op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init_tree (ssa_op_iter *ptr, gimple *stmt, int flags)
{
op_iter_init (ptr, stmt, flags);
ptr->iter_type = ssa_op_iter_tree;
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
static inline tree
-single_ssa_tree_operand (gimple stmt, int flags)
+single_ssa_tree_operand (gimple *stmt, int flags)
{
tree var;
ssa_op_iter iter;
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
static inline use_operand_p
-single_ssa_use_operand (gimple stmt, int flags)
+single_ssa_use_operand (gimple *stmt, int flags)
{
use_operand_p var;
ssa_op_iter iter;
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
static inline def_operand_p
-single_ssa_def_operand (gimple stmt, int flags)
+single_ssa_def_operand (gimple *stmt, int flags)
{
def_operand_p var;
ssa_op_iter iter;
/* Return true if there are zero operands in STMT matching the type
given in FLAGS. */
static inline bool
-zero_ssa_operands (gimple stmt, int flags)
+zero_ssa_operands (gimple *stmt, int flags)
{
ssa_op_iter iter;
/* Return the number of operands matching FLAGS in STMT. */
static inline int
-num_ssa_operands (gimple stmt, int flags)
+num_ssa_operands (gimple *stmt, int flags)
{
ssa_op_iter iter;
tree t;
{
use_operand_p use_p;
use_operand_p last_p = head;
- gimple head_stmt = USE_STMT (head);
+ gimple *head_stmt = USE_STMT (head);
tree use = USE_FROM_PTR (head);
ssa_op_iter op_iter;
int flag;
}
/* Initialize IMM to traverse over uses of VAR. Return the first statement. */
-static inline gimple
+static inline gimple *
first_imm_use_stmt (imm_use_iterator *imm, tree var)
{
imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
/* Bump IMM to the next stmt which has a use of var. */
-static inline gimple
+static inline gimple *
next_imm_use_stmt (imm_use_iterator *imm)
{
imm->imm_use = imm->iter_node.next;
/* Delink all immediate_use information for STMT. */
static inline void
-delink_stmt_imm_use (gimple stmt)
+delink_stmt_imm_use (gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use_p;
ipa_ref *
symtab_node::create_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type, gimple stmt)
+ enum ipa_ref_use use_type, gimple *stmt)
{
ipa_ref *ref = NULL, *ref2 = NULL;
ipa_ref_list *list, *list2;
ipa_ref *
symtab_node::maybe_create_reference (tree val, enum ipa_ref_use use_type,
- gimple stmt)
+ gimple *stmt)
{
STRIP_NOPS (val);
if (TREE_CODE (val) != ADDR_EXPR)
/* Clone reference REF to this symtab_node and set its stmt to STMT. */
ipa_ref *
-symtab_node::clone_reference (ipa_ref *ref, gimple stmt)
+symtab_node::clone_reference (ipa_ref *ref, gimple *stmt)
{
bool speculative = ref->speculative;
unsigned int stmt_uid = ref->lto_stmt_uid;
ipa_ref *
symtab_node::find_reference (symtab_node *referred_node,
- gimple stmt, unsigned int lto_stmt_uid)
+ gimple *stmt, unsigned int lto_stmt_uid)
{
ipa_ref *r = NULL;
int i;
/* Remove all references that are associated with statement STMT. */
void
-symtab_node::remove_stmt_references (gimple stmt)
+symtab_node::remove_stmt_references (gimple *stmt)
{
ipa_ref *r = NULL;
int i = 0;
#define CONST_CAST_RTX(X) CONST_CAST (struct rtx_def *, (X))
#define CONST_CAST_RTX_INSN(X) CONST_CAST (struct rtx_insn *, (X))
#define CONST_CAST_BB(X) CONST_CAST (struct basic_block_def *, (X))
-#define CONST_CAST_GIMPLE(X) CONST_CAST (struct gimple_statement_base *, (X))
+#define CONST_CAST_GIMPLE(X) CONST_CAST (gimple *, (X))
/* Activate certain diagnostics as warnings (not errors via the
-Werror flag). */
"Perform architecture specific checking of statements gimplified\
from @code{VA_ARG_EXPR}. @var{stmt} is the statement. Returns true if\
the statement doesn't need to be checked for @code{va_list} references.",
- bool, (struct stdarg_info *ai, const_gimple stmt), NULL)
+ bool, (struct stdarg_info *ai, const gimple *stmt), NULL)
/* This target hook allows the operating system to override the DECL
that represents the external variable that contains the stack
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
/* We are only interested in an assignment with a single
rhs operand because if it is not, the original assignment
will not possibly be a self-assignment. */
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
if (gimple_assign_single_p (def_stmt))
vdecl = gimple_assign_rhs1 (def_stmt);
}
they are the same. If so, print a warning message about self-assignment. */
static void
-compare_and_warn (gimple stmt, tree lhs, tree rhs)
+compare_and_warn (gimple *stmt, tree lhs, tree rhs)
{
if (operand_equal_p (lhs, rhs, OEP_PURE_SAME))
{
/* Check and warn if STMT is a self-assign statement. */
static void
-warn_self_assign (gimple stmt)
+warn_self_assign (gimple *stmt)
{
tree rhs, lhs;
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
/* We are only interested in an assignment with a single
rhs operand because if it is not, the original assignment
will not possibly be a self-assignment. */
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
if (gimple_assign_single_p (def_stmt))
vdecl = gimple_assign_rhs1 (def_stmt);
}
they are the same. If so, print a warning message about self-assignment. */
static void
-compare_and_warn (gimple stmt, tree lhs, tree rhs)
+compare_and_warn (gimple *stmt, tree lhs, tree rhs)
{
if (operand_equal_p (lhs, rhs, OEP_PURE_SAME))
{
/* Check and warn if STMT is a self-assign statement. */
static void
-warn_self_assign (gimple stmt)
+warn_self_assign (gimple *stmt)
{
tree rhs, lhs;
static bool
ignore_bb_p (const_basic_block bb)
{
- gimple g;
+ gimple *g;
if (bb->index < NUM_FIXED_BLOCKS)
return true;
count_insns (basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
int n = 0;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
/* Return true if CALL is const, or tm_pure. */
static bool
-is_tm_pure_call (gimple call)
+is_tm_pure_call (gimple *call)
{
tree fn = gimple_call_fn (call);
transaction. */
bool
-is_tm_ending (gimple stmt)
+is_tm_ending (gimple *stmt)
{
tree fndecl;
/* Return true if STMT is a TM load. */
static bool
-is_tm_load (gimple stmt)
+is_tm_load (gimple *stmt)
{
tree fndecl;
after-write, after-read, etc optimized variants. */
static bool
-is_tm_simple_load (gimple stmt)
+is_tm_simple_load (gimple *stmt)
{
tree fndecl;
/* Return true if STMT is a TM store. */
static bool
-is_tm_store (gimple stmt)
+is_tm_store (gimple *stmt)
{
tree fndecl;
after-write, after-read, etc optimized variants. */
static bool
-is_tm_simple_store (gimple stmt)
+is_tm_simple_store (gimple *stmt)
{
tree fndecl;
unsigned int block_flags : 8;
unsigned int func_flags : 8;
unsigned int saw_volatile : 1;
- gimple stmt;
+ gimple *stmt;
};
/* Return true if T is a volatile variable of some kind. */
diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
struct diagnose_tm *d = (struct diagnose_tm *) wi->info;
/* Save stmt for use in leaf analysis. */
/* Entry block for the transaction this address occurs in. */
basic_block entry_block;
/* Dominating statements the store occurs in. */
- vec<gimple> stmts;
+ vec<gimple *> stmts;
/* Initially, while we are building the log, we place a nonzero
value here to mean that this address *will* be saved with a
save/restore sequence. Later, when generating the save sequence
If known, ENTRY_BLOCK is the entry block for the region, otherwise
NULL. */
static void
-tm_log_add (basic_block entry_block, tree addr, gimple stmt)
+tm_log_add (basic_block entry_block, tree addr, gimple *stmt)
{
tm_log_entry **slot;
struct tm_log_entry l, *lp;
else
{
size_t i;
- gimple oldstmt;
+ gimple *oldstmt;
lp = *slot;
ADDR is the address to save.
STMT is the statement before which to place it. */
static void
-tm_log_emit_stmt (tree addr, gimple stmt)
+tm_log_emit_stmt (tree addr, gimple *stmt)
{
tree type = TREE_TYPE (addr);
tree size = TYPE_SIZE_UNIT (type);
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- gimple log;
+ gimple *log;
enum built_in_function code = BUILT_IN_TM_LOG;
if (type == float_type_node)
FOR_EACH_HASH_TABLE_ELEMENT (*tm_log, lp, tm_log_entry_t, hi)
{
size_t i;
- gimple stmt;
+ gimple *stmt;
if (dump_file)
{
{
size_t i;
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple stmt;
+ gimple *stmt;
struct tm_log_entry l, *lp;
for (i = 0; i < tm_log_save_addresses.length (); ++i)
int i;
struct tm_log_entry l, *lp;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
for (i = tm_log_save_addresses.length () - 1; i >= 0; i--)
{
static enum thread_memory_type
thread_private_new_memory (basic_block entry_block, tree x)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
enum tree_code code;
tm_new_mem_map_t **slot;
tm_new_mem_map_t elt, *elt_p;
private memory instrumentation. If no TPM instrumentation is
desired, STMT should be null. */
static bool
-requires_barrier (basic_block entry_block, tree x, gimple stmt)
+requires_barrier (basic_block entry_block, tree x, gimple *stmt)
{
tree orig = x;
while (handled_component_p (x))
static void
examine_assign_tm (unsigned *state, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (requires_barrier (/*entry_block=*/NULL, gimple_assign_rhs1 (stmt), NULL))
*state |= GTMA_HAVE_LOAD;
static void
examine_call_tm (unsigned *state, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree fn;
if (is_tm_pure_call (stmt))
static void
lower_transaction (gimple_stmt_iterator *gsi, struct walk_stmt_info *wi)
{
- gimple g;
+ gimple *g;
gtransaction *stmt = as_a <gtransaction *> (gsi_stmt (*gsi));
unsigned int *outer_state = (unsigned int *) wi->info;
unsigned int this_state = 0;
struct walk_stmt_info *wi)
{
unsigned int *state = (unsigned int *) wi->info;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
*handled_ops_p = true;
switch (gimple_code (stmt))
lower_sequence_no_tm (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info * wi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) == GIMPLE_TRANSACTION)
{
After TM_MARK, this gets replaced by a call to
BUILT_IN_TM_START.
Hence this will be either a gtransaction *or a gcall *. */
- gimple transaction_stmt;
+ gimple *transaction_stmt;
/* After TM_MARK expands the GIMPLE_TRANSACTION into a call to
BUILT_IN_TM_START, this field is true if the transaction is an
tm_region_init_1 (struct tm_region *region, basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple g;
+ gimple *g;
if (!region
|| (!region->irr_blocks && !region->exit_blocks))
static void
tm_region_init (struct tm_region *region)
{
- gimple g;
+ gimple *g;
edge_iterator ei;
edge e;
basic_block bb;
}
else
{
- gimple g;
+ gimple *g;
tree temp;
temp = create_tmp_reg (t);
}
else if (!useless_type_conversion_p (simple_type, type))
{
- gimple g;
+ gimple *g;
tree temp;
temp = create_tmp_reg (simple_type);
static void
expand_assign_tm (struct tm_region *region, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
bool store_p = requires_barrier (region->entry_block, lhs, NULL);
bool load_p = requires_barrier (region->entry_block, rhs, NULL);
- gimple gcall = NULL;
+ gimple *gcall = NULL;
if (!load_p && !store_p)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
tree t1 = create_tmp_reg (tm_state_type);
tree t2 = build_int_cst (tm_state_type, A_RESTORELIVEVARIABLES);
- gimple stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
+ gimple *stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
gimple_stmt_iterator gsi = gsi_last_bb (test_bb);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
tree t1 = create_tmp_reg (tm_state_type);
tree t2 = build_int_cst (tm_state_type, A_ABORTTRANSACTION);
- gimple stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
+ gimple *stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
gimple_stmt_iterator gsi = gsi_last_bb (test_bb);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
tree t1 = create_tmp_reg (tm_state_type);
tree t2 = build_int_cst (tm_state_type, A_RUNUNINSTRUMENTEDCODE);
- gimple stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
+ gimple *stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
gimple_stmt_iterator gsi = gsi_last_bb (test_bb);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
as necessary. Adjust *PNEXT as needed for the split block. */
static inline void
-split_bb_make_tm_edge (gimple stmt, basic_block dest_bb,
+split_bb_make_tm_edge (gimple *stmt, basic_block dest_bb,
gimple_stmt_iterator iter, gimple_stmt_iterator *pnext)
{
basic_block bb = gimple_bb (stmt);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi = next_gsi)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gcall *call_stmt;
next_gsi = gsi;
it accesses. */
static unsigned int
-tm_memopt_value_number (gimple stmt, enum insert_option op)
+tm_memopt_value_number (gimple *stmt, enum insert_option op)
{
struct tm_memop tmpmem, *mem;
tm_memop **slot;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bitmap bits;
unsigned int loc;
/* Inform about a load/store optimization. */
static void
-dump_tm_memopt_transform (gimple stmt)
+dump_tm_memopt_transform (gimple *stmt)
{
if (dump_file)
{
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bitmap read_avail = READ_AVAIL_IN (bb);
bitmap store_avail = STORE_AVAIL_IN (bb);
bitmap store_antic = STORE_ANTIC_OUT (bb);
ipa_uninstrument_transaction (struct tm_region *region,
vec<basic_block> queue)
{
- gimple transaction = region->transaction_stmt;
+ gimple *transaction = region->transaction_stmt;
basic_block transaction_bb = gimple_bb (transaction);
int n = queue.length ();
basic_block *new_bbs = XNEWVEC (basic_block, n);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !is_tm_pure_call (stmt))
{
tree fndecl = gimple_call_fndecl (stmt);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !is_tm_pure_call (stmt))
{
tree fndecl = gimple_call_fndecl (stmt);
for (i = 0; bbs.iterate (i, &bb); ++i)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
if (gimple_code (stmt) == GIMPLE_ASM)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_call (stmt))
continue;
#define PR_READONLY 0x4000
extern void compute_transaction_bits (void);
-extern bool is_tm_ending (gimple);
+extern bool is_tm_ending (gimple *);
extern tree build_tm_abort_call (location_t, bool);
extern bool is_tm_safe (const_tree);
extern bool is_tm_pure (const_tree);
unsigned i;
aff_tree to_add, current, curre;
tree e, rhs;
- gimple def;
+ gimple *def;
widest_int scale;
struct name_expansion *exp;
else if (bc == SSA_NAME)
{
tree base_val0, type;
- gimple base_def;
+ gimple *base_def;
int bit_sz;
/* Only handles cases where base value is converted
enum tree_code tcode,
const char *temp_name1,
const char *temp_name2,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
tree lbub_real_cst, lbub_cst, float_type;
static void
gen_conditions_for_domain (tree arg, inp_domain domain,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
if (domain.has_lb)
static void
gen_conditions_for_pow_cst_base (tree base, tree expn,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
inp_domain exp_domain;
static void
gen_conditions_for_pow_int_base (tree base, tree expn,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
- gimple base_def;
+ gimple *base_def;
tree base_val0;
tree int_type;
tree temp, tempn;
tree cst0;
- gimple stmt1, stmt2;
+ gimple *stmt1, *stmt2;
int bit_sz, max_exp;
inp_domain exp_domain;
and *NCONDS is the number of logical conditions. */
static void
-gen_conditions_for_pow (gcall *pow_call, vec<gimple> conds,
+gen_conditions_for_pow (gcall *pow_call, vec<gimple *> conds,
unsigned *nconds)
{
tree base, expn;
condition are separated by NULL tree in the vector. */
static void
-gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple> conds,
+gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple *> conds,
unsigned int *nconds)
{
gcall *call;
edge bi_call_in_edge0, guard_bb_in_edge;
unsigned tn_cond_stmts, nconds;
unsigned ci;
- gimple cond_expr = NULL;
- gimple cond_expr_start;
+ gimple *cond_expr = NULL;
+ gimple *cond_expr_start;
tree bi_call_label_decl;
- gimple bi_call_label;
+ gimple *bi_call_label;
- auto_vec<gimple, 12> conds;
+ auto_vec<gimple *, 12> conds;
gen_shrink_wrap_conditions (bi_call, conds, &nconds);
/* This can happen if the condition generator decides
cond_expr_start = conds[0];
for (ci = 0; ci < tn_cond_stmts; ci++)
{
- gimple c = conds[ci];
+ gimple *c = conds[ci];
gcc_assert (c || ci != 0);
if (!c)
break;
cond_expr_start = conds[ci0];
for (; ci < tn_cond_stmts; ci++)
{
- gimple c = conds[ci];
+ gimple *c = conds[ci];
gcc_assert (c || ci != ci0);
if (!c)
break;
static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
/* Various helpers. */
-static inline bool stmt_starts_bb_p (gimple, gimple);
+static inline bool stmt_starts_bb_p (gimple *, gimple *);
static int gimple_verify_flow_info (void);
static void gimple_make_forwarder_block (edge);
-static gimple first_non_label_stmt (basic_block);
+static gimple *first_non_label_stmt (basic_block);
static bool verify_gimple_transaction (gtransaction *);
-static bool call_can_make_abnormal_goto (gimple);
+static bool call_can_make_abnormal_goto (gimple *);
/* Flowgraph optimization and cleanup. */
static void gimple_merge_blocks (basic_block, basic_block);
replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
{
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
return;
struct loop *loop;
basic_block bb;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_LOOP (loop, 0)
{
/* Return true if T is a computed goto. */
bool
-computed_goto_p (gimple t)
+computed_goto_p (gimple *t)
{
return (gimple_code (t) == GIMPLE_GOTO
&& TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
assert_unreachable_fallthru_edge_p (edge e)
{
basic_block pred_bb = e->src;
- gimple last = last_stmt (pred_bb);
+ gimple *last = last_stmt (pred_bb);
if (last && gimple_code (last) == GIMPLE_COND)
{
basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
if (EDGE_COUNT (other_bb->succs) == 0)
{
gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
- gimple stmt;
+ gimple *stmt;
if (gsi_end_p (gsi))
return false;
CFG build time and only ever clear it later. */
static void
-gimple_call_initialize_ctrl_altering (gimple stmt)
+gimple_call_initialize_ctrl_altering (gimple *stmt)
{
int flags = gimple_call_flags (stmt);
make_blocks_1 (gimple_seq seq, basic_block bb)
{
gimple_stmt_iterator i = gsi_start (seq);
- gimple stmt = NULL;
+ gimple *stmt = NULL;
bool start_new_block = true;
bool first_stmt_of_seq = true;
while (!gsi_end_p (i))
{
- gimple prev_stmt;
+ gimple *prev_stmt;
prev_stmt = stmt;
stmt = gsi_stmt (i);
{
tree lhs = gimple_get_lhs (stmt);
tree tmp = create_tmp_var (TREE_TYPE (lhs));
- gimple s = gimple_build_assign (lhs, tmp);
+ gimple *s = gimple_build_assign (lhs, tmp);
gimple_set_location (s, gimple_location (stmt));
gimple_set_block (s, gimple_block (stmt));
gimple_set_lhs (stmt, tmp);
{
gimple_stmt_iterator gsi
= gsi_start_nondebug_after_labels_bb (e->dest);
- gimple g = gsi_stmt (gsi);
+ gimple *g = gsi_stmt (gsi);
if (g
&& is_gimple_call (g)
&& gimple_call_internal_p (g)
factored computed goto. */
tree factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION);
- gimple factored_computed_goto_label
+ gimple *factored_computed_goto_label
= gimple_build_label (factored_label_decl);
gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
/* Build our new computed goto. */
- gimple factored_computed_goto = gimple_build_goto (var);
+ gimple *factored_computed_goto = gimple_build_goto (var);
gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
FOR_EACH_VEC_ELT (*bbs, idx, bb)
continue;
gsi = gsi_last_bb (bb);
- gimple last = gsi_stmt (gsi);
+ gimple *last = gsi_stmt (gsi);
gcc_assert (computed_goto_p (last));
/* Copy the original computed goto's destination into VAR. */
- gimple assignment
+ gimple *assignment
= gimple_build_assign (var, gimple_goto_dest (last));
gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
else
{
tree arg = inner ? boolean_true_node : boolean_false_node;
- gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
+ gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
1, arg);
gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
static int
make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
bool fallthru = false;
int ret = 0;
if (!gsi_end_p (gsi))
{
/* Make an edge to every setjmp-like call. */
- gimple call_stmt = gsi_stmt (gsi);
+ gimple *call_stmt = gsi_stmt (gsi);
if (is_gimple_call (call_stmt)
&& ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
|| gimple_call_builtin_p (call_stmt,
bool
gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
basic_block bb = gimple_bb (stmt);
basic_block lastbb, afterbb;
int old_num_bbs = n_basic_blocks_for_fn (cfun);
{
edge e;
edge_iterator ei;
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
if (locus == UNKNOWN_LOCATION)
FOR_EACH_EDGE (e, ei, bb->succs)
{
- gimple first = first_non_label_stmt (e->dest);
- gimple last = last_stmt (e->dest);
+ gimple *first = first_non_label_stmt (e->dest);
+ gimple *last = last_stmt (e->dest);
if ((first && same_line_p (locus, gimple_location (first)))
|| (last && same_line_p (locus, gimple_location (last))))
{
make_cond_expr_edges (basic_block bb)
{
gcond *entry = as_a <gcond *> (last_stmt (bb));
- gimple then_stmt, else_stmt;
+ gimple *then_stmt, *else_stmt;
basic_block then_bb, else_bb;
tree then_label, else_label;
edge e;
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (bb)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
group_case_labels_stmt (as_a <gswitch *> (stmt));
}
{
gimple_stmt_iterator gsi =
gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
- gimple stmt;
+ gimple *stmt;
stmt = gimple_build_label (dest);
gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
make_goto_expr_edges (basic_block bb)
{
gimple_stmt_iterator last = gsi_last_bb (bb);
- gimple goto_t = gsi_stmt (last);
+ gimple *goto_t = gsi_stmt (last);
/* A simple GOTO creates normal edges. */
if (simple_goto_p (goto_t))
First do so for each block ending in a control statement. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
tree label, new_label;
if (!stmt)
FOR_EACH_BB_FN (bb, cfun)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
group_case_labels_stmt (as_a <gswitch *> (stmt));
}
static bool
gimple_can_merge_blocks_p (basic_block a, basic_block b)
{
- gimple stmt;
+ gimple *stmt;
if (!single_succ_p (a))
return false;
{
imm_use_iterator imm_iter;
use_operand_p use;
- gimple stmt;
+ gimple *stmt;
edge e;
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
if (gimple_code (stmt) != GIMPLE_PHI)
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- gimple orig_stmt = stmt;
+ gimple *orig_stmt = stmt;
size_t i;
/* FIXME. It shouldn't be required to keep TREE_CONSTANT
gsi = gsi_last_bb (a);
for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
{
- gimple phi = gsi_stmt (psi);
+ gimple *phi = gsi_stmt (psi);
tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
- gimple copy;
+ gimple *copy;
bool may_replace_uses = (virtual_operand_p (def)
|| may_propagate_copy (def, use));
{
imm_use_iterator iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_IMM_USE_STMT (stmt, iter, def)
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
/* Remove labels from B and set gimple_bb to A for other statements. */
for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
tree label = gimple_label_label (label_stmt);
/* Other user labels keep around in a form of a debug stmt. */
else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
{
- gimple dbg = gimple_build_debug_bind (label,
- integer_zero_node,
- stmt);
+ gimple *dbg = gimple_build_debug_bind (label,
+ integer_zero_node,
+ stmt);
gimple_debug_bind_reset_value (dbg);
gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
}
details. */
for (i = gsi_last_bb (bb); !gsi_end_p (i);)
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
glabel *label_stmt = dyn_cast <glabel *> (stmt);
if (label_stmt
&& (FORCED_LABEL (gimple_label_label (label_stmt))
edge
find_taken_edge (basic_block bb, tree val)
{
- gimple stmt;
+ gimple *stmt;
stmt = last_stmt (bb);
flow. Transfers of control flow associated with EH are excluded. */
static bool
-call_can_make_abnormal_goto (gimple t)
+call_can_make_abnormal_goto (gimple *t)
{
/* If the function has no non-local labels, then a call cannot make an
abnormal transfer of control. */
Transfers of control flow associated with EH are excluded. */
bool
-stmt_can_make_abnormal_goto (gimple t)
+stmt_can_make_abnormal_goto (gimple *t)
{
if (computed_goto_p (t))
return true;
/* Return true if T represents a stmt that always transfers control. */
bool
-is_ctrl_stmt (gimple t)
+is_ctrl_stmt (gimple *t)
{
switch (gimple_code (t))
{
(e.g., a call to a non-returning function). */
bool
-is_ctrl_altering_stmt (gimple t)
+is_ctrl_altering_stmt (gimple *t)
{
gcc_assert (t);
/* Return true if T is a simple local goto. */
bool
-simple_goto_p (gimple t)
+simple_goto_p (gimple *t)
{
return (gimple_code (t) == GIMPLE_GOTO
&& TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
label. */
static inline bool
-stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
+stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
{
if (stmt == NULL)
return false;
/* Return true if T should end a basic block. */
bool
-stmt_ends_bb_p (gimple t)
+stmt_ends_bb_p (gimple *t)
{
return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
}
/* Return the first statement in basic block BB. */
-gimple
+gimple *
first_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_start_bb (bb);
- gimple stmt = NULL;
+ gimple *stmt = NULL;
while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
{
/* Return the first non-label statement in basic block BB. */
-static gimple
+static gimple *
first_non_label_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_start_bb (bb);
/* Return the last statement in basic block BB. */
-gimple
+gimple *
last_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_last_bb (bb);
- gimple stmt = NULL;
+ gimple *stmt = NULL;
while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
{
if the block is totally empty, or if it contains more than one
statement. */
-gimple
+gimple *
last_and_only_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
- gimple last, prev;
+ gimple *last, *prev;
if (gsi_end_p (i))
return NULL;
Returns true if anything is wrong. */
static bool
-verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
+verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
{
/* There isn't much that could be wrong in a gimple debug stmt. A
gimple debug bind stmt, for example, maps a tree, that's usually
error, otherwise false. */
static bool
-verify_gimple_stmt (gimple stmt)
+verify_gimple_stmt (gimple *stmt)
{
switch (gimple_code (stmt))
{
and false otherwise. */
static bool
-verify_gimple_phi (gimple phi)
+verify_gimple_phi (gimple *phi)
{
bool err = false;
unsigned i;
for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
{
- gimple stmt = gsi_stmt (ittr);
+ gimple *stmt = gsi_stmt (ittr);
switch (gimple_code (stmt))
{
static bool eh_error_found;
bool
-verify_eh_throw_stmt_node (const gimple &stmt, const int &,
- hash_set<gimple> *visited)
+verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
+ hash_set<gimple *> *visited)
{
if (!visited->contains (stmt))
{
timevar_push (TV_TREE_STMT_VERIFY);
hash_set<void *> visited;
- hash_set<gimple> visited_stmts;
+ hash_set<gimple *> visited_stmts;
/* Collect all BLOCKs referenced by the BLOCK tree of FN. */
hash_set<tree> blocks;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool err2 = false;
struct walk_stmt_info wi;
tree addr;
}
eh_error_found = false;
- hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
+ hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
if (eh_table)
- eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
+ eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
(&visited_stmts);
if (err || eh_error_found)
int err = 0;
basic_block bb;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
edge e;
edge_iterator ei;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
tree label;
- gimple prev_stmt = stmt;
+ gimple *prev_stmt = stmt;
stmt = gsi_stmt (gsi);
/* Verify that body of basic block BB is free of control flow. */
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (found_ctrl_stmt)
{
{
basic_block src = e->src;
gimple_stmt_iterator i;
- gimple stmt;
+ gimple *stmt;
/* We can replace or remove a complex jump only when we have exactly
two edges. */
basic_block bb = e->src;
gimple_stmt_iterator gsi;
edge ret;
- gimple stmt;
+ gimple *stmt;
if (e->flags & EDGE_ABNORMAL)
return NULL;
e->src = new_bb;
/* Get a stmt iterator pointing to the first stmt to move. */
- if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
+ if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
gsi = gsi_after_labels (bb);
else
{
- gsi = gsi_for_stmt ((gimple) stmt);
+ gsi = gsi_for_stmt ((gimple *) stmt);
gsi_next (&gsi);
}
static basic_block
gimple_split_block_before_cond_jump (basic_block bb)
{
- gimple last, split_point;
+ gimple *last, *split_point;
gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
if (gsi_end_p (gsi))
return NULL;
def_operand_p def_p;
ssa_op_iter op_iter;
tree lhs;
- gimple stmt, copy;
+ gimple *stmt, *copy;
stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_LABEL)
gcov_type total_count = 0, exit_count = 0;
edge exits[2], nexits[2], e;
gimple_stmt_iterator gsi;
- gimple cond_stmt;
+ gimple *cond_stmt;
edge sorig, snew;
basic_block exit_bb;
gphi_iterator psi;
struct walk_stmt_info *wi)
{
struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
tree block = gimple_block (stmt);
if (block == p->orig_block
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
struct walk_stmt_info wi;
memset (&wi, 0, sizeof (wi));
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
eh_region stmt_region;
int lp_nr;
static bool
gimple_block_ends_with_condjump_p (const_basic_block bb)
{
- gimple stmt = last_stmt (CONST_CAST_BB (bb));
+ gimple *stmt = last_stmt (CONST_CAST_BB (bb));
return (stmt && gimple_code (stmt) == GIMPLE_COND);
}
Helper function for gimple_flow_call_edges_add. */
static bool
-need_fake_edge_p (gimple t)
+need_fake_edge_p (gimple *t)
{
tree fndecl = NULL_TREE;
int call_flags = 0;
{
basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
- gimple t = NULL;
+ gimple *t = NULL;
if (!gsi_end_p (gsi))
t = gsi_stmt (gsi);
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
gimple_stmt_iterator gsi;
- gimple stmt, last_stmt;
+ gimple *stmt, *last_stmt;
if (!bb)
continue;
bool changed = false;
edge e;
edge_iterator ei;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && stmt_can_throw_internal (stmt))
return false;
bool changed = false;
edge e;
edge_iterator ei;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (!cfun->has_nonlocal_label
&& !cfun->calls_setjmp)
basic_block cond_bb, void *cond_e)
{
gimple_stmt_iterator gsi;
- gimple new_cond_expr;
+ gimple *new_cond_expr;
tree cond_expr = (tree) cond_e;
edge e0;
and creation of a new conditionally executed basic block.
Return created basic block. */
basic_block
-insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
+insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
{
edge fall = split_block (bb, stmt);
gimple_stmt_iterator iter = gsi_last_bb (bb);
pass_warn_function_return::execute (function *fun)
{
source_location location;
- gimple last;
+ gimple *last;
edge e;
edge_iterator ei;
{
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
{
- gimple last = last_stmt (e->src);
+ gimple *last = last_stmt (e->src);
greturn *return_stmt = dyn_cast <greturn *> (last);
if (return_stmt
&& gimple_return_retval (return_stmt) == NULL
for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
{
- gimple g = gsi_stmt (i);
+ gimple *g = gsi_stmt (i);
switch (gimple_code (g))
{
bb->count = apply_scale (bb->count, count_scale);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl = is_gimple_call (stmt)
? gimple_call_fndecl (stmt)
: NULL;
when inlining a noreturn call that does in fact return. */
if (EDGE_COUNT (bb->succs) == 0)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (!stmt
|| (!is_ctrl_stmt (stmt)
&& (!is_gimple_call (stmt)
/* Garbage collection support for edge_def. */
extern void gt_ggc_mx (tree&);
-extern void gt_ggc_mx (gimple&);
+extern void gt_ggc_mx (gimple *&);
extern void gt_ggc_mx (rtx&);
extern void gt_ggc_mx (basic_block&);
/* PCH support for edge_def. */
extern void gt_pch_nx (tree&);
-extern void gt_pch_nx (gimple&);
+extern void gt_pch_nx (gimple *&);
extern void gt_pch_nx (rtx&);
extern void gt_pch_nx (basic_block&);
extern void gimple_dump_cfg (FILE *, int);
extern void dump_cfg_stats (FILE *);
extern void debug_cfg_stats (void);
-extern bool computed_goto_p (gimple);
-extern bool stmt_can_make_abnormal_goto (gimple);
+extern bool computed_goto_p (gimple *);
+extern bool stmt_can_make_abnormal_goto (gimple *);
extern basic_block get_abnormal_succ_dispatcher (basic_block);
-extern bool is_ctrl_stmt (gimple);
-extern bool is_ctrl_altering_stmt (gimple);
-extern bool simple_goto_p (gimple);
-extern bool stmt_ends_bb_p (gimple);
+extern bool is_ctrl_stmt (gimple *);
+extern bool is_ctrl_altering_stmt (gimple *);
+extern bool simple_goto_p (gimple *);
+extern bool stmt_ends_bb_p (gimple *);
extern bool assert_unreachable_fallthru_edge_p (edge);
extern void delete_tree_cfg_annotations (void);
extern gphi *get_virtual_phi (basic_block);
-extern gimple first_stmt (basic_block);
-extern gimple last_stmt (basic_block);
-extern gimple last_and_only_stmt (basic_block);
+extern gimple *first_stmt (basic_block);
+extern gimple *last_stmt (basic_block);
+extern gimple *last_and_only_stmt (basic_block);
extern void verify_gimple_in_seq (gimple_seq);
extern void verify_gimple_in_cfg (struct function *, bool);
extern tree gimple_block_label (basic_block);
extern void extract_true_false_edges_from_block (basic_block, edge *, edge *);
extern unsigned int execute_fixup_cfg (void);
extern unsigned int split_critical_edges (void);
-extern basic_block insert_cond_bb (basic_block, gimple, gimple);
+extern basic_block insert_cond_bb (basic_block, gimple *, gimple *);
extern bool gimple_find_sub_bbs (gimple_seq, gimple_stmt_iterator *);
#endif /* _TREE_CFG_H */
{
edge taken_edge;
bool retval = false;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree val;
if (!single_succ_p (bb))
to updated gimple_call_flags. */
static void
-cleanup_call_ctrl_altering_flag (gimple bb_end)
+cleanup_call_ctrl_altering_flag (gimple *bb_end)
{
if (!is_gimple_call (bb_end)
|| !gimple_call_ctrl_altering_p (bb_end))
{
gimple_stmt_iterator gsi;
bool retval = false;
- gimple stmt;
+ gimple *stmt;
/* If the last statement of the block could throw and now cannot,
we need to prune cfg. */
anything else means this is not a forwarder block. */
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
{
edge succ = single_succ_edge (bb), e, s;
basic_block dest = succ->dest;
- gimple label;
+ gimple *label;
edge_iterator ei;
gimple_stmt_iterator gsi, gsi_to;
bool can_move_debug_stmts;
gsi_to = gsi_after_labels (dest);
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); )
{
- gimple debug = gsi_stmt (gsi);
+ gimple *debug = gsi_stmt (gsi);
if (!is_gimple_debug (debug))
break;
gsi_remove (&gsi, false);
Return true if cleanup-cfg needs to run. */
bool
-fixup_noreturn_call (gimple stmt)
+fixup_noreturn_call (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
bool changed = false;
{
edge succ = single_succ_edge (bb);
basic_block dest = succ->dest;
- gimple label;
+ gimple *label;
basic_block dombb, domdest, dom;
/* We check for infinite loops already in tree_forwarder_block_p.
gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
use_operand_p imm_use;
- gimple use_stmt;
+ gimple *use_stmt;
/* If the PHI's result is never used, then we can just
ignore it. */
/* In tree-cfgcleanup.c */
extern bitmap cfgcleanup_altered_bbs;
extern bool cleanup_tree_cfg (void);
-extern bool fixup_noreturn_call (gimple stmt);
+extern bool fixup_noreturn_call (gimple *stmt);
#endif /* GCC_TREE_CFGCLEANUP_H */
/* Bounds used for the check. */
tree bounds;
/* Check statement. Can be NULL for removed checks. */
- gimple stmt;
+ gimple *stmt;
};
/* Structure to hold checks information for BB. */
static void
chkp_collect_value (tree ptr, address_t &res)
{
- gimple def_stmt;
+ gimple *def_stmt;
enum gimple_code code;
enum tree_code rhs_code;
address_t addr;
/* Fill check_info structure *CI with information about
check STMT. */
static void
-chkp_fill_check_info (gimple stmt, struct check_info *ci)
+chkp_fill_check_info (gimple *stmt, struct check_info *ci)
{
ci->addr.pol.create (0);
ci->bounds = gimple_call_arg (stmt, 1);
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
if (gimple_code (stmt) != GIMPLE_CALL)
continue;
static int
chkp_get_check_result (struct check_info *ci, tree bounds)
{
- gimple bnd_def;
+ gimple *bnd_def;
address_t bound_val;
int sign, res = 0;
static void
chkp_use_outer_bounds_if_possible (struct check_info *ci)
{
- gimple bnd_def;
+ gimple *bnd_def;
tree bnd1, bnd2, bnd_res = NULL;
int check_res1, check_res2;
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree fndecl;
if (gimple_code (stmt) != GIMPLE_CALL
checks only when size is not zero. */
if (!known)
{
- gimple check = gimple_build_cond (NE_EXPR,
- size,
- size_zero_node,
- NULL_TREE,
- NULL_TREE);
+ gimple *check = gimple_build_cond (NE_EXPR,
+ size,
+ size_zero_node,
+ NULL_TREE,
+ NULL_TREE);
/* Split block before string function call. */
gsi_prev (&i);
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
- gimple dom_use, use_stmt, stmt = gsi_stmt (i);
+ gimple *dom_use, *use_stmt, *stmt = gsi_stmt (i);
basic_block dom_bb;
ssa_op_iter iter;
imm_use_iterator use_iter;
corresponding to CODE. */
bool
-chkp_gimple_call_builtin_p (gimple call,
+chkp_gimple_call_builtin_p (gimple *call,
enum built_in_function code)
{
tree fndecl;
chkp_insert_retbnd_call (tree bndval, tree retval,
gimple_stmt_iterator *gsi)
{
- gimple call;
+ gimple *call;
if (!bndval)
bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
/* Mark statement S to not be instrumented. */
static void
-chkp_mark_stmt (gimple s)
+chkp_mark_stmt (gimple *s)
{
gimple_set_plf (s, GF_PLF_1, true);
}
/* Mark statement S to be instrumented. */
static void
-chkp_unmark_stmt (gimple s)
+chkp_unmark_stmt (gimple *s)
{
gimple_set_plf (s, GF_PLF_1, false);
}
/* Return 1 if statement S should not be instrumented. */
static bool
-chkp_marked_stmt_p (gimple s)
+chkp_marked_stmt_p (gimple *s)
{
return gimple_plf (s, GF_PLF_1);
}
/* Get SSA_NAME to be used as temp. */
static tree
-chkp_get_tmp_reg (gimple stmt)
+chkp_get_tmp_reg (gimple *stmt)
{
if (in_chkp_pass)
return make_ssa_name (chkp_get_tmp_var (), stmt);
chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
bool *res)
{
- gimple phi;
+ gimple *phi;
unsigned i;
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
bool
chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
{
- gimple phi;
+ gimple *phi;
unsigned i;
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
{
if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
{
- gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
+ gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
bnd = gimple_assign_rhs1 (bnd_def);
}
else
{
tree copy;
- gimple def = SSA_NAME_DEF_STMT (ptr);
- gimple assign;
+ gimple *def = SSA_NAME_DEF_STMT (ptr);
+ gimple *assign;
gimple_stmt_iterator gsi;
if (bnd_var)
}
else
{
- gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
+ gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
/* Sometimes (e.g. when we load a pointer from a
memory) bounds are produced later than a pointer.
We need to insert bounds copy appropriately. */
tree dirflag)
{
gimple_seq seq;
- gimple check;
+ gimple *check;
tree node;
if (!chkp_function_instrumented_p (current_function_decl)
if (dump_file && (dump_flags & TDF_DETAILS))
{
- gimple before = gsi_stmt (iter);
+ gimple *before = gsi_stmt (iter);
fprintf (dump_file, "Generated lower bound check for statement ");
print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
fprintf (dump_file, " ");
tree dirflag)
{
gimple_seq seq;
- gimple check;
+ gimple *check;
tree node;
if (!chkp_function_instrumented_p (current_function_decl)
if (dump_file && (dump_flags & TDF_DETAILS))
{
- gimple before = gsi_stmt (iter);
+ gimple *before = gsi_stmt (iter);
fprintf (dump_file, "Generated upper bound check for statement ");
print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
fprintf (dump_file, " ");
tree dirflag)
{
gimple_stmt_iterator call_iter = *gsi;
- gimple call = gsi_stmt (*gsi);
+ gimple *call = gsi_stmt (*gsi);
tree fndecl = gimple_call_fndecl (call);
tree addr = gimple_call_arg (call, 0);
tree bounds = chkp_find_bounds (addr, gsi);
void
chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
{
- gimple call = gsi_stmt (*gsi);
+ gimple *call = gsi_stmt (*gsi);
tree fndecl = gimple_call_fndecl (call);
tree addr = gimple_call_arg (call, 0);
tree bounds = chkp_find_bounds (addr, gsi);
- gimple extract;
+ gimple *extract;
if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
fndecl = chkp_extract_lower_fndecl;
if (!all_bounds[offs / POINTER_SIZE])
{
tree temp = make_temp_ssa_name (type, NULL, "");
- gimple assign = gimple_build_assign (temp, elem);
+ gimple *assign = gimple_build_assign (temp, elem);
gimple_stmt_iterator gsi;
gsi_insert_before (iter, assign, GSI_SAME_STMT);
{
gimple_seq seq;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
tree bounds;
if (iter)
|| flag_chkp_use_static_const_bounds > 0)
{
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
zero_bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
|| flag_chkp_use_static_const_bounds > 0)
{
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
none_bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
{
gimple_stmt_iterator gsi;
tree bounds;
- gimple stmt;
+ gimple *stmt;
tree fndecl = gimple_call_fndecl (call);
unsigned int retflags;
chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
{
gimple_seq seq;
- gimple stmt;
+ gimple *stmt;
tree bounds;
seq = NULL;
gimple_stmt_iterator *gsi)
{
gimple_seq seq;
- gimple stmt;
+ gimple *stmt;
seq = NULL;
/* Compute bounds for pointer NODE which was assigned in
assignment statement ASSIGN. Return computed bounds. */
static tree
-chkp_compute_bounds_for_assignment (tree node, gimple assign)
+chkp_compute_bounds_for_assignment (tree node, gimple *assign)
{
enum tree_code rhs_code = gimple_assign_rhs_code (assign);
tree rhs1 = gimple_assign_rhs1 (assign);
tree val2 = gimple_assign_rhs3 (assign);
tree bnd1 = chkp_find_bounds (val1, &iter);
tree bnd2 = chkp_find_bounds (val2, &iter);
- gimple stmt;
+ gimple *stmt;
if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
bounds = incomplete_bounds;
bounds = bnd1;
else
{
- gimple stmt;
+ gimple *stmt;
tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
boolean_type_node, rhs1, rhs2);
bounds = chkp_get_tmp_reg (assign);
&& TREE_CODE (base) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
{
- gimple stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
+ gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
bounds = gimple_assign_lhs (stmt);
}
Return computed bounds. */
static tree
-chkp_get_bounds_by_definition (tree node, gimple def_stmt,
+chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
gphi_iterator *iter)
{
tree var, bounds;
tree bounds, size_reloc, lb, size, max_size, cond;
gimple_stmt_iterator gsi;
gimple_seq seq = NULL;
- gimple stmt;
+ gimple *stmt;
/* If instrumentation is not enabled for vars having
incomplete type then just return zero bounds to avoid
{
tree bnd_var = chkp_make_static_bounds (decl);
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (bounds, bnd_var);
{
tree bnd_var = chkp_make_static_bounds (cst);
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (bounds, bnd_var);
else
{
gimple_seq seq;
- gimple stmt;
+ gimple *stmt;
tree bounds;
seq = NULL;
bounds = chkp_get_registered_bounds (ptr_src);
if (!bounds)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
gphi_iterator phi_iter;
bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
&& chkp_type_has_pointer (node_type)
&& flag_chkp_store_bounds)
{
- gimple stmt = gsi_stmt (*iter);
+ gimple *stmt = gsi_stmt (*iter);
tree rhs1 = gimple_assign_rhs1 (stmt);
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
/* Add code to copy bounds for all pointers copied
in ASSIGN created during inline of EDGE. */
void
-chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
+chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
{
tree lhs = gimple_assign_lhs (assign);
tree rhs = gimple_assign_rhs1 (assign);
/* We should create edges for all created calls to bndldx and bndstx. */
while (gsi_stmt (iter) != assign)
{
- gimple stmt = gsi_stmt (iter);
+ gimple *stmt = gsi_stmt (iter);
if (gimple_code (stmt) == GIMPLE_CALL)
{
tree fndecl = gimple_call_fndecl (stmt);
FOR_ALL_BB_FN (bb, cfun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
gimple_stmt_iterator next = i;
gsi_next (&next);
while (!gsi_end_p (next))
{
- gimple next_stmt = gsi_stmt (next);
+ gimple *next_stmt = gsi_stmt (next);
gsi_remove (&next, false);
gsi_insert_on_edge (fall, next_stmt);
}
static void
chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* For calls we want to walk call args only. */
if (gimple_code (stmt) == GIMPLE_CALL)
{
next = bb->next_bb;
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
- gimple s = gsi_stmt (i);
+ gimple *s = gsi_stmt (i);
/* Skip statement marked to not be instrumented. */
if (chkp_marked_stmt_p (s))
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
enum built_in_function fcode;
extern gcall *chkp_retbnd_call_by_val (tree val);
extern bool chkp_function_instrumented_p (tree fndecl);
extern void chkp_function_mark_instrumented (tree fndecl);
-extern void chkp_copy_bounds_for_assign (gimple assign,
+extern void chkp_copy_bounds_for_assign (gimple *assign,
struct cgraph_edge *edge);
-extern bool chkp_gimple_call_builtin_p (gimple call,
+extern bool chkp_gimple_call_builtin_p (gimple *call,
enum built_in_function code);
extern rtx chkp_expand_zero_bounds (void);
extern void chkp_expand_bounds_reset_for_mem (tree mem, tree ptr);
bool
convert_affine_scev (struct loop *loop, tree type,
- tree *base, tree *step, gimple at_stmt,
+ tree *base, tree *step, gimple *at_stmt,
bool use_overflow_semantics)
{
tree ct = TREE_TYPE (*step);
The increment for a pointer type is always sizetype. */
tree
-chrec_convert_rhs (tree type, tree chrec, gimple at_stmt)
+chrec_convert_rhs (tree type, tree chrec, gimple *at_stmt)
{
if (POINTER_TYPE_P (type))
type = sizetype;
tests, but also to enforce that the result follows them. */
static tree
-chrec_convert_1 (tree type, tree chrec, gimple at_stmt,
+chrec_convert_1 (tree type, tree chrec, gimple *at_stmt,
bool use_overflow_semantics)
{
tree ct, res;
tests, but also to enforce that the result follows them. */
tree
-chrec_convert (tree type, tree chrec, gimple at_stmt,
+chrec_convert (tree type, tree chrec, gimple *at_stmt,
bool use_overflow_semantics)
{
return chrec_convert_1 (type, chrec, at_stmt, use_overflow_semantics);
extern tree chrec_fold_plus (tree, tree, tree);
extern tree chrec_fold_minus (tree, tree, tree);
extern tree chrec_fold_multiply (tree, tree, tree);
-extern tree chrec_convert (tree, tree, gimple, bool = true);
-extern tree chrec_convert_rhs (tree, tree, gimple);
+extern tree chrec_convert (tree, tree, gimple *, bool = true);
+extern tree chrec_convert_rhs (tree, tree, gimple *);
extern tree chrec_convert_aggressive (tree, tree, bool *);
/* Operations. */
extern tree reset_evolution_in_loop (unsigned, tree, tree);
extern tree chrec_merge (tree, tree);
extern void for_each_scev_op (tree *, bool (*) (tree *, void *), void *);
-extern bool convert_affine_scev (struct loop *, tree, tree *, tree *, gimple,
+extern bool convert_affine_scev (struct loop *, tree, tree *, tree *, gimple *,
bool);
/* Observers. */
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt;
+ gimple *stmt;
tree op0, op1;
bool sim_again_p;
/* Evaluate statement STMT against the complex lattice defined above. */
static enum ssa_prop_result
-complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
+complex_visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
tree *result_p)
{
complex_lattice_t new_l, old_l, op1_l, op2_l;
complex_lattice_t lattice = find_lattice_value (ssa_name);
size_t ssa_name_index;
tree comp;
- gimple last;
+ gimple *last;
gimple_seq list;
/* We know the value must be zero, else there's a bug in our lattice
/* Update the complex components of the ssa name on the lhs of STMT. */
static void
-update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r,
+update_complex_components (gimple_stmt_iterator *gsi, gimple *stmt, tree r,
tree i)
{
tree lhs;
static void
update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
{
- gimple stmt;
+ gimple *stmt;
gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i);
stmt = gsi_stmt (*gsi);
if (is_complex_reg (gimple_phi_result (phi)))
{
tree lr, li;
- gimple pr = NULL, pi = NULL;
+ gimple *pr = NULL, *pi = NULL;
unsigned int i, n;
lr = get_component_ssa_name (gimple_phi_result (phi), false);
{
tree inner_type = TREE_TYPE (type);
tree r, i, lhs, rhs;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_assign (stmt))
{
else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
{
tree x;
- gimple t;
+ gimple *t;
location_t loc;
loc = gimple_location (stmt);
machine_mode mode;
enum built_in_function bcode;
tree fn, type, lhs;
- gimple old_stmt;
+ gimple *old_stmt;
gcall *stmt;
old_stmt = gsi_stmt (*gsi);
{
tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
basic_block bb_cond, bb_true, bb_false, bb_join;
- gimple stmt;
+ gimple *stmt;
/* Examine |br| < |bi|, and branch. */
t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
if (TREE_CODE (compare) != INTEGER_CST)
{
edge e;
- gimple stmt;
+ gimple *stmt;
tree cond, tmp;
tmp = create_tmp_var (boolean_type_node);
tree br, tree bi, enum tree_code code)
{
tree cr, ci, cc, type;
- gimple stmt;
+ gimple *stmt;
cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
static void
expand_complex_operations_1 (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree type, inner_type, lhs;
tree ac, ar, ai, bc, br, bi;
complex_lattice_t al, bl;
needs to point to the original SSA name. Since statements and
SSA names are of different data types, we need this union. See
the explanation in struct imm_use_iterator. */
- union { gimple stmt; tree ssa_name; } GTY((skip(""))) loc;
+ union { gimple *stmt; tree ssa_name; } GTY((skip(""))) loc;
tree *GTY((skip(""))) use;
};
tree var;
/* Statement that defines this SSA name. */
- gimple def_stmt;
+ gimple *def_stmt;
/* Value range information. */
union ssa_name_info_type {
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
return false;
- gimple def_stmt = SSA_NAME_DEF_STMT (op0);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op0);
enum tree_code subcode;
if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
bool
dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
struct loop *loop = loop_containing_stmt (stmt);
tree ref = DR_REF (dr);
HOST_WIDE_INT pbitsize, pbitpos;
which the data reference should be analyzed. */
struct data_reference *
-create_data_ref (loop_p nest, loop_p loop, tree memref, gimple stmt,
+create_data_ref (loop_p nest, loop_p loop, tree memref, gimple *stmt,
bool is_read)
{
struct data_reference *dr;
true if STMT clobbers memory, false otherwise. */
static bool
-get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_heap> *references)
+get_references_in_stmt (gimple *stmt, vec<data_ref_loc, va_heap> *references)
{
bool clobbers_memory = false;
data_ref_loc ref;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
get_references_in_stmt (stmt, &references);
if (references.length ())
{
loop of the loop nest in which the references should be analyzed. */
bool
-find_data_references_in_stmt (struct loop *nest, gimple stmt,
+find_data_references_in_stmt (struct loop *nest, gimple *stmt,
vec<data_reference_p> *datarefs)
{
unsigned i;
should be analyzed. */
bool
-graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple stmt,
+graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple *stmt,
vec<data_reference_p> *datarefs)
{
unsigned i;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (!find_data_references_in_stmt (loop, stmt, datarefs))
{
struct data_reference
{
/* A pointer to the statement that contains this DR. */
- gimple stmt;
+ gimple *stmt;
/* A pointer to the memory reference. */
tree ref;
extern void free_dependence_relations (vec<ddr_p> );
extern void free_data_ref (data_reference_p);
extern void free_data_refs (vec<data_reference_p> );
-extern bool find_data_references_in_stmt (struct loop *, gimple,
+extern bool find_data_references_in_stmt (struct loop *, gimple *,
vec<data_reference_p> *);
-extern bool graphite_find_data_references_in_stmt (loop_p, loop_p, gimple,
+extern bool graphite_find_data_references_in_stmt (loop_p, loop_p, gimple *,
vec<data_reference_p> *);
tree find_data_references_in_loop (struct loop *, vec<data_reference_p> *);
bool loop_nest_has_data_refs (loop_p loop);
-struct data_reference *create_data_ref (loop_p, loop_p, tree, gimple, bool);
+struct data_reference *create_data_ref (loop_p, loop_p, tree, gimple *, bool);
extern bool find_loop_nest (struct loop *, vec<loop_p> *);
extern struct data_dependence_relation *initialize_data_dependence_relation
(struct data_reference *, struct data_reference *, vec<loop_p>);
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
bool
-stmt_references_abnormal_ssa_name (gimple stmt)
+stmt_references_abnormal_ssa_name (gimple *stmt)
{
ssa_op_iter oi;
use_operand_p use_p;
extern tree get_addr_base_and_unit_offset_1 (tree, HOST_WIDE_INT *,
tree (*) (tree));
extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
-extern bool stmt_references_abnormal_ssa_name (gimple);
+extern bool stmt_references_abnormal_ssa_name (gimple *);
extern void dump_enumerated_decls (FILE *, int);
/* In some instances a tree and a gimple need to be stored in a same table,
i.e. in hash tables. This is a structure to do this. */
-typedef union {tree *tp; tree t; gimple g;} treemple;
+typedef union {tree *tp; tree t; gimple *g;} treemple;
/* Misc functions used in this file. */
/* Add statement T in function IFUN to landing pad NUM. */
static void
-add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
+add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
{
gcc_assert (num != 0);
if (!get_eh_throw_stmt_table (ifun))
- set_eh_throw_stmt_table (ifun, hash_map<gimple, int>::create_ggc (31));
+ set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
}
/* Add statement T in the current function (cfun) to EH landing pad NUM. */
void
-add_stmt_to_eh_lp (gimple t, int num)
+add_stmt_to_eh_lp (gimple *t, int num)
{
add_stmt_to_eh_lp_fn (cfun, t, num);
}
/* Add statement T to the single EH landing pad in REGION. */
static void
-record_stmt_eh_region (eh_region region, gimple t)
+record_stmt_eh_region (eh_region region, gimple *t)
{
if (region == NULL)
return;
/* Remove statement T in function IFUN from its EH landing pad. */
bool
-remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
+remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
{
if (!get_eh_throw_stmt_table (ifun))
return false;
EH landing pad. */
bool
-remove_stmt_from_eh_lp (gimple t)
+remove_stmt_from_eh_lp (gimple *t)
{
return remove_stmt_from_eh_lp_fn (cfun, t);
}
statement is not recorded in the region table. */
int
-lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
+lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t)
{
if (ifun->eh->throw_stmt_table == NULL)
return 0;
/* Likewise, but always use the current function. */
int
-lookup_stmt_eh_lp (gimple t)
+lookup_stmt_eh_lp (gimple *t)
{
/* We can get called from initialized data when -fnon-call-exceptions
is on; prevent crash. */
}
static void
-collect_finally_tree (gimple stmt, gtry *region);
+collect_finally_tree (gimple *stmt, gtry *region);
/* Go through the gimple sequence. Works with collect_finally_tree to
record all GIMPLE_LABEL and GIMPLE_TRY statements. */
}
static void
-collect_finally_tree (gimple stmt, gtry *region)
+collect_finally_tree (gimple *stmt, gtry *region)
{
treemple temp;
would leave the try_finally node that START lives in. */
static bool
-outside_finally_tree (treemple start, gimple target)
+outside_finally_tree (treemple start, gimple *target)
{
struct finally_tree_node n, *p;
treemple stmt;
location_t location;
gimple_seq repl_stmt;
- gimple cont_stmt;
+ gimple *cont_stmt;
int index;
/* This is used when index >= 0 to indicate that stmt is a label (as
opposed to a goto stmt). */
size_t goto_queue_active;
/* Pointer map to help in searching goto_queue when it is large. */
- hash_map<gimple, goto_queue_node *> *goto_queue_map;
+ hash_map<gimple *, goto_queue_node *> *goto_queue_map;
/* The set of unique labels seen as entries in the goto queue. */
vec<tree> dest_array;
if (!tf->goto_queue_map)
{
- tf->goto_queue_map = new hash_map<gimple, goto_queue_node *>;
+ tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
for (i = 0; i < tf->goto_queue_active; i++)
{
bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
static void
-replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
+replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
gimple_stmt_iterator *gsi)
{
gimple_seq seq;
try_finally node. */
static void
-maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
+maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
{
struct leh_tf_state *tf = state->tf;
treemple new_stmt;
static void
do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
{
- gimple x;
+ gimple *x;
/* In the case of a return, the queue node must be a gimple statement. */
gcc_assert (!q->is_label);
static gimple_seq
frob_into_branch_around (gtry *tp, eh_region region, tree over)
{
- gimple x;
+ gimple *x;
gimple_seq cleanup, result;
location_t loc = gimple_location (tp);
for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating
it on the EH paths. When it is not eliminated, make it transparent in
the debug info. */
static inline geh_else *
get_eh_else (gimple_seq finally)
{
- gimple x = gimple_seq_first_stmt (finally);
+ gimple *x = gimple_seq_first_stmt (finally);
if (gimple_code (x) == GIMPLE_EH_ELSE)
{
gcc_assert (gimple_seq_singleton_p (finally));
gimple_stmt_iterator gsi;
bool finally_may_fallthru;
gimple_seq finally;
- gimple x;
+ gimple *x;
geh_mnt *eh_mnt;
gtry *try_stmt;
geh_else *eh_else;
struct leh_tf_state *tf)
{
tree lab;
- gimple x;
+ gimple *x;
geh_else *eh_else;
gimple_seq finally;
struct goto_queue_node *q, *qe;
struct goto_queue_node *q, *qe;
geh_else *eh_else;
glabel *label_stmt;
- gimple x;
+ gimple *x;
gimple_seq finally;
gimple_stmt_iterator gsi;
tree finally_label;
for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
{
tree block = gimple_block (stmt);
gimple_seq finally;
gimple_seq new_stmt;
gimple_seq seq;
- gimple x;
+ gimple *x;
geh_else *eh_else;
tree tmp;
location_t tf_loc = gimple_location (tf->try_finally_expr);
tree last_case;
vec<tree> case_label_vec;
gimple_seq switch_body = NULL;
- gimple x;
+ gimple *x;
geh_else *eh_else;
tree tmp;
- gimple switch_stmt;
+ gimple *switch_stmt;
gimple_seq finally;
- hash_map<tree, gimple> *cont_map = NULL;
+ hash_map<tree, gimple *> *cont_map = NULL;
/* The location of the TRY_FINALLY stmt. */
location_t tf_loc = gimple_location (tf->try_finally_expr);
/* The location of the finally block. */
/* We store the cont_stmt in the pointer map, so that we can recover
it in the loop below. */
if (!cont_map)
- cont_map = new hash_map<tree, gimple>;
+ cont_map = new hash_map<tree, gimple *>;
cont_map->put (case_lab, q->cont_stmt);
case_label_vec.quick_push (case_lab);
}
}
for (j = last_case_index; j < last_case_index + nlabels; j++)
{
- gimple cont_stmt;
+ gimple *cont_stmt;
last_case = case_label_vec[j];
{
/* Duplicate __builtin_stack_restore in the hope of eliminating it
on the EH paths and, consequently, useless cleanups. */
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt)
&& !gimple_clobber_p (stmt)
&& !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
if (this_tf.fallthru_label)
{
/* This must be reached only if ndests == 0. */
- gimple x = gimple_build_label (this_tf.fallthru_label);
+ gimple *x = gimple_build_label (this_tf.fallthru_label);
gimple_seq_add_stmt (&this_tf.top_p_seq, x);
}
gimple_stmt_iterator gsi;
tree out_label;
gimple_seq new_seq, cleanup;
- gimple x;
+ gimple *x;
location_t try_catch_loc = gimple_location (tp);
if (flag_exceptions)
{
struct leh_state this_state = *state;
eh_region this_region = NULL;
- gimple inner, x;
+ gimple *inner, *x;
gimple_seq new_seq;
inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
if (flag_exceptions)
{
- gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
+ gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
eh_region this_region;
this_region = gen_eh_region_must_not_throw (state->cur_region);
result = gimple_try_eval (tp);
if (fake_tf.fallthru_label)
{
- gimple x = gimple_build_label (fake_tf.fallthru_label);
+ gimple *x = gimple_build_label (fake_tf.fallthru_label);
gimple_seq_add_stmt (&result, x);
}
}
lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
{
gimple_seq replace;
- gimple x;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *x;
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
{
tree lhs = gimple_get_lhs (stmt);
tree tmp = create_tmp_var (TREE_TYPE (lhs));
- gimple s = gimple_build_assign (lhs, tmp);
+ gimple *s = gimple_build_assign (lhs, tmp);
gimple_set_location (s, gimple_location (stmt));
gimple_set_block (s, gimple_block (stmt));
gimple_set_lhs (stmt, tmp);
if there is such a landing pad within the current function. */
void
-make_eh_edges (gimple stmt)
+make_eh_edges (gimple *stmt)
{
basic_block src, dst;
eh_landing_pad lp;
{
eh_landing_pad old_lp, new_lp;
basic_block old_bb;
- gimple throw_stmt;
+ gimple *throw_stmt;
int old_lp_nr, new_lp_nr;
tree old_label, new_label;
edge_iterator ei;
an assignment or a conditional) may throw. */
static bool
-stmt_could_throw_1_p (gimple stmt)
+stmt_could_throw_1_p (gimple *stmt)
{
enum tree_code code = gimple_expr_code (stmt);
bool honor_nans = false;
/* Return true if statement STMT could throw an exception. */
bool
-stmt_could_throw_p (gimple stmt)
+stmt_could_throw_p (gimple *stmt)
{
if (!flag_exceptions)
return false;
the current function (CFUN). */
bool
-stmt_can_throw_external (gimple stmt)
+stmt_can_throw_external (gimple *stmt)
{
int lp_nr;
the current function (CFUN). */
bool
-stmt_can_throw_internal (gimple stmt)
+stmt_can_throw_internal (gimple *stmt)
{
int lp_nr;
any change was made. */
bool
-maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
+maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
{
if (stmt_could_throw_p (stmt))
return false;
/* Likewise, but always use the current function. */
bool
-maybe_clean_eh_stmt (gimple stmt)
+maybe_clean_eh_stmt (gimple *stmt)
{
return maybe_clean_eh_stmt_fn (cfun, stmt);
}
done that my require an EH edge purge. */
bool
-maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
+maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
{
int lp_nr = lookup_stmt_eh_lp (old_stmt);
operand is the return value of duplicate_eh_regions. */
bool
-maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
- struct function *old_fun, gimple old_stmt,
+maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
+ struct function *old_fun, gimple *old_stmt,
hash_map<void *, void *> *map,
int default_lp_nr)
{
and thus no remapping is required. */
bool
-maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
+maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
{
int lp_nr;
same_handler_p (gimple_seq oneh, gimple_seq twoh)
{
gimple_stmt_iterator gsi;
- gimple ones, twos;
+ gimple *ones, *twos;
unsigned int ai;
gsi = gsi_start (oneh);
static void
optimize_double_finally (gtry *one, gtry *two)
{
- gimple oneh;
+ gimple *oneh;
gimple_stmt_iterator gsi;
gimple_seq cleanup;
refactor_eh_r (gimple_seq seq)
{
gimple_stmt_iterator gsi;
- gimple one, two;
+ gimple *one, *two;
one = NULL;
two = NULL;
int lp_nr;
eh_region src_r, dst_r;
gimple_stmt_iterator gsi;
- gimple x;
+ gimple *x;
tree fn, src_nr;
bool ret = false;
FOR_EACH_BB_FN (bb, fun)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
if (last && is_gimple_resx (last))
{
dominance_invalidated |=
call, and has an incoming EH edge. */
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_clobber_p (stmt))
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!gimple_clobber_p (stmt))
continue;
unlink_stmt_vdef (stmt);
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_code (stmt) == GIMPLE_LABEL)
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
if (is_gimple_debug (stmt))
continue;
/* But adjust virtual operands if we sunk across a PHI node. */
if (vuse)
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
use_operand_p use_p;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
int region_nr;
eh_region r;
tree filter, fn;
- gimple x;
+ gimple *x;
bool redirected = false;
region_nr = gimple_eh_dispatch_region (stmt);
FOR_EACH_BB_FN (bb, fun)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
if (last == NULL)
continue;
if (gimple_code (last) == GIMPLE_EH_DISPATCH)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (mark_landing_pads)
{
{
for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
{
- gimple use_stmt;
+ gimple *use_stmt;
gphi *phi = gpi.phi ();
tree lhs = gimple_phi_result (phi);
tree rhs = gimple_phi_arg_def (phi, 0);
{
basic_block bb = label_to_block (lp->post_landing_pad);
gimple_stmt_iterator gsi;
- gimple resx;
+ gimple *resx;
eh_region new_region;
edge_iterator ei;
edge e, e_out;
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_EH)
{
- gimple stmt = last_stmt (e->src);
+ gimple *stmt = last_stmt (e->src);
remove_stmt_from_eh_lp (stmt);
remove_edge (e);
}
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_EH)
{
- gimple stmt = last_stmt (e->src);
+ gimple *stmt = last_stmt (e->src);
remove_stmt_from_eh_lp (stmt);
add_stmt_to_eh_lp (stmt, new_lp_nr);
remove_edge (e);
edge that make_eh_edges would create. */
DEBUG_FUNCTION bool
-verify_eh_edges (gimple stmt)
+verify_eh_edges (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
eh_landing_pad lp = NULL;
typedef struct eh_region_d *eh_region;
extern void using_eh_for_cleanups (void);
-extern void add_stmt_to_eh_lp (gimple, int);
-extern bool remove_stmt_from_eh_lp_fn (struct function *, gimple);
-extern bool remove_stmt_from_eh_lp (gimple);
-extern int lookup_stmt_eh_lp_fn (struct function *, gimple);
-extern int lookup_stmt_eh_lp (gimple);
+extern void add_stmt_to_eh_lp (gimple *, int);
+extern bool remove_stmt_from_eh_lp_fn (struct function *, gimple *);
+extern bool remove_stmt_from_eh_lp (gimple *);
+extern int lookup_stmt_eh_lp_fn (struct function *, gimple *);
+extern int lookup_stmt_eh_lp (gimple *);
extern bool make_eh_dispatch_edges (geh_dispatch *);
-extern void make_eh_edges (gimple);
+extern void make_eh_edges (gimple *);
extern edge redirect_eh_edge (edge, basic_block);
extern void redirect_eh_dispatch_edge (geh_dispatch *, edge, basic_block);
extern bool operation_could_trap_helper_p (enum tree_code, bool, bool, bool,
bool, tree, bool *);
extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
extern bool tree_could_trap_p (tree);
-extern bool stmt_could_throw_p (gimple);
+extern bool stmt_could_throw_p (gimple *);
extern bool tree_could_throw_p (tree);
-extern bool stmt_can_throw_external (gimple);
-extern bool stmt_can_throw_internal (gimple);
-extern bool maybe_clean_eh_stmt_fn (struct function *, gimple);
-extern bool maybe_clean_eh_stmt (gimple);
-extern bool maybe_clean_or_replace_eh_stmt (gimple, gimple);
-extern bool maybe_duplicate_eh_stmt_fn (struct function *, gimple,
- struct function *, gimple,
+extern bool stmt_can_throw_external (gimple *);
+extern bool stmt_can_throw_internal (gimple *);
+extern bool maybe_clean_eh_stmt_fn (struct function *, gimple *);
+extern bool maybe_clean_eh_stmt (gimple *);
+extern bool maybe_clean_or_replace_eh_stmt (gimple *, gimple *);
+extern bool maybe_duplicate_eh_stmt_fn (struct function *, gimple *,
+ struct function *, gimple *,
hash_map<void *, void *> *, int);
-extern bool maybe_duplicate_eh_stmt (gimple, gimple);
+extern bool maybe_duplicate_eh_stmt (gimple *, gimple *);
extern void maybe_remove_unreachable_handlers (void);
-extern bool verify_eh_edges (gimple);
+extern bool verify_eh_edges (gimple *);
extern bool verify_eh_dispatch_edge (geh_dispatch *);
#endif /* GCC_TREE_EH_H */
new assignment statement, and substitute yet another SSA_NAME. */
if (wi->changed)
{
- gimple x;
+ gimple *x;
addr = create_tmp_var (TREE_TYPE (t));
x = gimple_build_assign (addr, t);
/* Lower all of the operands of STMT. */
static void
-lower_emutls_stmt (gimple stmt, struct lower_emutls_data *d)
+lower_emutls_stmt (gimple *stmt, struct lower_emutls_data *d)
{
struct walk_stmt_info wi;
ifc_temp_var (tree type, tree expr, gimple_stmt_iterator *gsi)
{
tree new_name = make_temp_ssa_name (type, NULL, "_ifc_");
- gimple stmt = gimple_build_assign (new_name, expr);
+ gimple *stmt = gimple_build_assign (new_name, expr);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
return new_name;
}
static enum tree_code
parse_predicate (tree cond, tree *op0, tree *op1)
{
- gimple s;
+ gimple *s;
if (TREE_CODE (cond) == SSA_NAME
&& is_gimple_assign (s = SSA_NAME_DEF_STMT (cond)))
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_phi_result (phi))
{
if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI
- && USE_STMT (use_p) != (gimple) phi)
+ && USE_STMT (use_p) != phi)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Difficult to handle this virtual phi.\n");
(read or written) on every iteration of the if-converted loop. */
static bool
-memrefs_read_or_written_unconditionally (gimple stmt,
+memrefs_read_or_written_unconditionally (gimple *stmt,
vec<data_reference_p> drs)
{
int i, j;
every iteration of the if-converted loop. */
static bool
-write_memrefs_written_at_least_once (gimple stmt,
+write_memrefs_written_at_least_once (gimple *stmt,
vec<data_reference_p> drs)
{
int i, j;
iteration unconditionally. */
static bool
-ifcvt_memrefs_wont_trap (gimple stmt, vec<data_reference_p> refs)
+ifcvt_memrefs_wont_trap (gimple *stmt, vec<data_reference_p> refs)
{
return write_memrefs_written_at_least_once (stmt, refs)
&& memrefs_read_or_written_unconditionally (stmt, refs);
not trap in the innermost loop containing STMT. */
static bool
-ifcvt_could_trap_p (gimple stmt, vec<data_reference_p> refs)
+ifcvt_could_trap_p (gimple *stmt, vec<data_reference_p> refs)
{
if (gimple_vuse (stmt)
&& !gimple_could_trap_p_1 (stmt, false, false)
(conditional load or store based on a mask computed from bb predicate). */
static bool
-ifcvt_can_use_mask_load_store (gimple stmt)
+ifcvt_can_use_mask_load_store (gimple *stmt)
{
tree lhs, ref;
machine_mode mode;
- LHS is not var decl. */
static bool
-if_convertible_gimple_assign_stmt_p (gimple stmt,
+if_convertible_gimple_assign_stmt_p (gimple *stmt,
vec<data_reference_p> refs,
bool *any_mask_load_store)
{
- it is builtins call. */
static bool
-if_convertible_stmt_p (gimple stmt, vec<data_reference_p> refs,
+if_convertible_stmt_p (gimple *stmt, vec<data_reference_p> refs,
bool *any_mask_load_store)
{
switch (gimple_code (stmt))
{
basic_block bb = ifc_bbs[i];
tree cond;
- gimple stmt;
+ gimple *stmt;
/* The loop latch and loop exit block are always executed and
have no extra conditions to be processed: skip them. */
EXTENDED is true if PHI has > 2 arguments. */
static bool
-is_cond_scalar_reduction (gimple phi, gimple *reduc, tree arg_0, tree arg_1,
+is_cond_scalar_reduction (gimple *phi, gimple **reduc, tree arg_0, tree arg_1,
tree *op0, tree *op1, bool extended)
{
tree lhs, r_op1, r_op2;
- gimple stmt;
- gimple header_phi = NULL;
+ gimple *stmt;
+ gimple *header_phi = NULL;
enum tree_code reduction_op;
basic_block bb = gimple_bb (phi);
struct loop *loop = bb->loop_father;
/* Check that R_OP1 is used in reduction stmt or in PHI only. */
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, r_op1)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (use_stmt == stmt)
Returns rhs of resulting PHI assignment. */
static tree
-convert_scalar_cond_reduction (gimple reduc, gimple_stmt_iterator *gsi,
+convert_scalar_cond_reduction (gimple *reduc, gimple_stmt_iterator *gsi,
tree cond, tree op0, tree op1, bool swap)
{
gimple_stmt_iterator stmt_it;
- gimple new_assign;
+ gimple *new_assign;
tree rhs;
tree rhs1 = gimple_assign_rhs1 (reduc);
tree tmp = make_temp_ssa_name (TREE_TYPE (rhs1), NULL, "_ifc_");
static void
predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi)
{
- gimple new_stmt = NULL, reduc;
+ gimple *new_stmt = NULL, *reduc;
tree rhs, res, arg0, arg1, op0, op1, scev;
tree cond;
unsigned int index0;
basic_block bb = ifc_bbs[i];
tree cond = bb_predicate (bb);
bool swap;
- gimple stmt;
+ gimple *stmt;
int index;
if (is_true_predicate (cond))
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
tree ref, addr, ptr, masktype, mask_op0, mask_op1, mask;
- gimple new_stmt;
+ gimple *new_stmt;
int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (lhs)));
ref = TREE_CODE (lhs) == SSA_NAME ? rhs : lhs;
mark_addressable (ref);
could have derived it from. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_bb (stmt, merge_target_bb);
if (predicated[i])
{
basic_block cond_bb;
tree cond = make_ssa_name (boolean_type_node);
struct loop *new_loop;
- gimple g;
+ gimple *g;
gimple_stmt_iterator gsi;
g = gimple_build_call_internal (IFN_LOOP_VECTORIZED, 2,
basic_block bb;
unsigned int num = loop->num_nodes;
unsigned int i;
- gimple stmt;
+ gimple *stmt;
edge e;
edge_iterator ei;
use statement with newly created lhs. */
static void
-ifcvt_split_def_stmt (gimple def_stmt, gimple use_stmt)
+ifcvt_split_def_stmt (gimple *def_stmt, gimple *use_stmt)
{
tree var;
tree lhs;
- gimple copy_stmt;
+ gimple *copy_stmt;
gimple_stmt_iterator gsi;
use_operand_p use_p;
imm_use_iterator imm_iter;
not have single use. */
static void
-ifcvt_walk_pattern_tree (tree var, vec<gimple> *defuse_list,
- gimple use_stmt)
+ifcvt_walk_pattern_tree (tree var, vec<gimple *> *defuse_list,
+ gimple *use_stmt)
{
tree rhs1, rhs2;
enum tree_code code;
- gimple def_stmt;
+ gimple *def_stmt;
def_stmt = SSA_NAME_DEF_STMT (var);
if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
by vectorizer. */
static bool
-stmt_is_root_of_bool_pattern (gimple stmt)
+stmt_is_root_of_bool_pattern (gimple *stmt)
{
enum tree_code code;
tree lhs, rhs;
ifcvt_repair_bool_pattern (basic_block bb)
{
tree rhs;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
- vec<gimple> defuse_list = vNULL;
- vec<gimple> pattern_roots = vNULL;
+ vec<gimple *> defuse_list = vNULL;
+ vec<gimple *> pattern_roots = vNULL;
bool repeat = true;
int niter = 0;
unsigned int ix;
while (defuse_list.length () > 0)
{
repeat = true;
- gimple def_stmt, use_stmt;
+ gimple *def_stmt, *use_stmt;
use_stmt = defuse_list.pop ();
def_stmt = defuse_list.pop ();
ifcvt_split_def_stmt (def_stmt, use_stmt);
static void
ifcvt_local_dce (basic_block bb)
{
- gimple stmt;
- gimple stmt1;
- gimple phi;
+ gimple *stmt;
+ gimple *stmt1;
+ gimple *phi;
gimple_stmt_iterator gsi;
- vec<gimple> worklist;
+ vec<gimple *> worklist;
enum gimple_code code;
use_operand_p use_p;
imm_use_iterator imm_iter;
static tree copy_decl_to_var (tree, copy_body_data *);
static tree copy_result_decl_to_var (tree, copy_body_data *);
static tree copy_decl_maybe_to_var (tree, copy_body_data *);
-static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
+static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
-static void insert_init_stmt (copy_body_data *, basic_block, gimple);
+static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
/* Insert a tree->tree mapping for ID. Despite the name suggests
that the trees should be variables, it is used for more than that. */
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
{
tree vexpr = make_node (DEBUG_EXPR_DECL);
- gimple def_temp;
+ gimple *def_temp;
gimple_stmt_iterator gsi;
tree val = SSA_NAME_VAR (name);
|| EDGE_COUNT (id->entry_bb->preds) != 1))
{
gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
- gimple init_stmt;
+ gimple *init_stmt;
tree zero = build_zero_cst (TREE_TYPE (new_tree));
init_stmt = gimple_build_assign (new_tree, zero);
/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
block using the mapping information in ID. */
-static gimple
+static gimple *
copy_gimple_bind (gbind *stmt, copy_body_data *id)
{
- gimple new_bind;
+ gimple *new_bind;
tree new_block, new_vars;
gimple_seq body, new_body;
information in ID. Return the new statement copy. */
static gimple_seq
-remap_gimple_stmt (gimple stmt, copy_body_data *id)
+remap_gimple_stmt (gimple *stmt, copy_body_data *id)
{
- gimple copy = NULL;
+ gimple *copy = NULL;
struct walk_stmt_info wi;
bool skip_first = false;
gimple_seq stmts = NULL;
if (retbnd && bndslot)
{
- gimple bndcopy = gimple_build_assign (bndslot, retbnd);
+ gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
memset (&wi, 0, sizeof (wi));
wi.info = id;
walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
if (TREE_CODE (lhs) == MEM_REF
&& TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
+ gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
if (gimple_bb (def_stmt)
&& !bitmap_bit_p (id->blocks_to_copy,
gimple_bb (def_stmt)->index))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple_seq stmts;
- gimple stmt = gsi_stmt (gsi);
- gimple orig_stmt = stmt;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *orig_stmt = stmt;
gimple_stmt_iterator stmts_gsi;
bool stmt_added = false;
the number of anonymous arguments. */
size_t nargs = gimple_call_num_args (id->call_stmt), i;
tree count, p;
- gimple new_stmt;
+ gimple *new_stmt;
for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
nargs--;
for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
{
- gimple copy_stmt;
+ gimple *copy_stmt;
bool can_throw, nonlocal_goto;
copy_stmt = gsi_stmt (si);
gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
while (is_gimple_debug (gsi_stmt (ssi)))
{
- gimple stmt = gsi_stmt (ssi);
+ gimple *stmt = gsi_stmt (ssi);
gdebug *new_stmt;
tree var;
tree value;
redirect_all_calls (copy_body_data * id, basic_block bb)
{
gimple_stmt_iterator si;
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (is_gimple_call (stmt))
{
struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
lexical block and line number information from base_stmt, if given,
or from the last stmt of the block otherwise. */
-static gimple
+static gimple *
insert_init_debug_bind (copy_body_data *id,
basic_block bb, tree var, tree value,
- gimple base_stmt)
+ gimple *base_stmt)
{
- gimple note;
+ gimple *note;
gimple_stmt_iterator gsi;
tree tracked_var;
}
static void
-insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
+insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
{
/* If VAR represents a zero-sized variable, it's possible that the
assignment statement may result in no gimple statements. */
/* Initialize parameter P with VALUE. If needed, produce init statement
at the end of BB. When BB is NULL, we return init statement to be
output later. */
-static gimple
+static gimple *
setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
basic_block bb, tree *vars)
{
- gimple init_stmt = NULL;
+ gimple *init_stmt = NULL;
tree var;
tree rhs = value;
tree def = (gimple_in_ssa_p (cfun)
top of the stack in ID from the GIMPLE_CALL STMT. */
static void
-initialize_inlined_parameters (copy_body_data *id, gimple stmt,
+initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
tree fn, basic_block bb)
{
tree parms;
{
tree fn = (tree) wip->info;
tree t;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
FOR_EACH_BB_FN (bb, fun)
{
- gimple ret;
+ gimple *ret;
gimple_seq seq = bb_seq (bb);
ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
forbidden_p = (ret != NULL);
WEIGHTS contains weights attributed to various constructs. */
int
-estimate_num_insns (gimple stmt, eni_weights *weights)
+estimate_num_insns (gimple *stmt, eni_weights *weights)
{
unsigned cost, i;
enum gimple_code code = gimple_code (stmt);
/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
static bool
-expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
+expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
{
tree use_retvar;
tree fn;
'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
if (use_retvar && gimple_call_lhs (stmt))
{
- gimple old_stmt = stmt;
+ gimple *old_stmt = stmt;
stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
gsi_replace (&stmt_gsi, stmt, false);
maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
/* Put returned bounds into the correct place if required. */
if (return_bounds)
{
- gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
- gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
+ gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
+ gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
unlink_stmt_vdef (old_stmt);
gsi_replace (&bnd_gsi, new_stmt, false);
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gsi_prev (&gsi);
if (is_gimple_call (stmt)
in the STATEMENTS pointer set. */
static void
-fold_marked_statements (int first, hash_set<gimple> *statements)
+fold_marked_statements (int first, hash_set<gimple *> *statements)
{
for (; first < n_basic_blocks_for_fn (cfun); first++)
if (BASIC_BLOCK_FOR_FN (cfun, first))
gsi_next (&gsi))
if (statements->contains (gsi_stmt (gsi)))
{
- gimple old_stmt = gsi_stmt (gsi);
+ gimple *old_stmt = gsi_stmt (gsi);
tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
if (old_decl && DECL_BUILT_IN (old_decl))
gsi_prev (&i2);
if (fold_stmt (&gsi))
{
- gimple new_stmt;
+ gimple *new_stmt;
/* If a builtin at the end of a bb folded into nothing,
the following loop won't work. */
if (gsi_end_p (gsi))
{
/* Re-read the statement from GSI as fold_stmt() may
have changed it. */
- gimple new_stmt = gsi_stmt (gsi);
+ gimple *new_stmt = gsi_stmt (gsi);
update_stmt (new_stmt);
if (is_gimple_call (old_stmt)
id.transform_return_to_modify = true;
id.transform_parameter = true;
id.transform_lang_insert_block = NULL;
- id.statements_to_fold = new hash_set<gimple>;
+ id.statements_to_fold = new hash_set<gimple *>;
push_gimplify_context ();
struct walk_stmt_info *wi)
{
copy_body_data *id = (copy_body_data *) wi->info;
- gimple gs = gsi_stmt (*gsip);
+ gimple *gs = gsi_stmt (*gsip);
if (gbind *stmt = dyn_cast <gbind *> (gs))
{
unsigned i;
struct ipa_replace_map *replace_info;
basic_block old_entry_block, bb;
- auto_vec<gimple, 10> init_stmts;
+ auto_vec<gimple *, 10> init_stmts;
tree vars = NULL_TREE;
gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
memset (&id, 0, sizeof (id));
/* Generate a new name for the new version. */
- id.statements_to_fold = new hash_set<gimple>;
+ id.statements_to_fold = new hash_set<gimple *>;
id.decl_map = new hash_map<tree, tree>;
id.debug_map = NULL;
if (tree_map)
for (i = 0; i < tree_map->length (); i++)
{
- gimple init;
+ gimple *init;
replace_info = (*tree_map)[i];
if (replace_info->replace_p)
{
tree retbnd;
/* Assign statements that need bounds copy. */
- vec<gimple> assign_stmts;
+ vec<gimple *> assign_stmts;
/* The map from local declarations in the inlined function to
equivalents in the function into which it is being inlined. */
/* GIMPLE_CALL if va arg parameter packs should be expanded or NULL
is not. */
- gimple call_stmt;
+ gimple *call_stmt;
/* Exception landing pad the inlined call lies in. */
int eh_lp_nr;
void (*transform_lang_insert_block) (tree);
/* Statements that might be possibly folded. */
- hash_set<gimple> *statements_to_fold;
+ hash_set<gimple *> *statements_to_fold;
/* Entry basic block to currently copied body. */
basic_block entry_bb;
tree copy_tree_r (tree *, int *, void *);
tree copy_decl_no_change (tree decl, copy_body_data *id);
int estimate_move_cost (tree type, bool);
-int estimate_num_insns (gimple, eni_weights *);
+int estimate_num_insns (gimple *, eni_weights *);
int estimate_num_insns_fn (tree, eni_weights *);
int count_insns_seq (gimple_seq, eni_weights *);
bool tree_versionable_function_p (tree);
decided in mark_def_sites. */
static inline bool
-rewrite_uses_p (gimple stmt)
+rewrite_uses_p (gimple *stmt)
{
return gimple_visited_p (stmt);
}
/* Set the rewrite marker on STMT to the value given by REWRITE_P. */
static inline void
-set_rewrite_uses (gimple stmt, bool rewrite_p)
+set_rewrite_uses (gimple *stmt, bool rewrite_p)
{
gimple_set_visited (stmt, rewrite_p);
}
registered, but they don't need to have their uses renamed. */
static inline bool
-register_defs_p (gimple stmt)
+register_defs_p (gimple *stmt)
{
return gimple_plf (stmt, GF_PLF_1) != 0;
}
/* If REGISTER_DEFS_P is true, mark STMT to have its DEFs registered. */
static inline void
-set_register_defs (gimple stmt, bool register_defs_p)
+set_register_defs (gimple *stmt, bool register_defs_p)
{
gimple_set_plf (stmt, GF_PLF_1, register_defs_p);
}
static void
initialize_flags_in_bb (basic_block bb)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
set_rewrite_uses (phi, false);
set_register_defs (phi, false);
}
we create. */
static void
-mark_def_sites (basic_block bb, gimple stmt, bitmap kills)
+mark_def_sites (basic_block bb, gimple *stmt, bitmap kills)
{
tree def;
use_operand_p use_p;
tracked_var = target_for_debug_bind (var);
if (tracked_var)
{
- gimple note = gimple_build_debug_bind (tracked_var,
- PHI_RESULT (phi),
+ gimple *note = gimple_build_debug_bind (tracked_var,
+ PHI_RESULT (phi),
phi);
gimple_stmt_iterator si = gsi_after_labels (bb);
gsi_insert_before (&si, note, GSI_SAME_STMT);
/* Helper function for rewrite_stmt. Rewrite uses in a debug stmt. */
static void
-rewrite_debug_stmt_uses (gimple stmt)
+rewrite_debug_stmt_uses (gimple *stmt)
{
use_operand_p use_p;
ssa_op_iter iter;
!gsi_end_p (gsi) && lim > 0;
gsi_next (&gsi), lim--)
{
- gimple gstmt = gsi_stmt (gsi);
+ gimple *gstmt = gsi_stmt (gsi);
if (!gimple_debug_source_bind_p (gstmt))
break;
if (gimple_debug_source_bind_get_value (gstmt) == var)
/* If not, add a new source bind stmt. */
if (def == NULL_TREE)
{
- gimple def_temp;
+ gimple *def_temp;
def = make_node (DEBUG_EXPR_DECL);
def_temp = gimple_build_debug_source_bind (def, var, NULL);
DECL_ARTIFICIAL (def) = 1;
use_operand_p use_p;
def_operand_p def_p;
ssa_op_iter iter;
- gimple stmt = gsi_stmt (*si);
+ gimple *stmt = gsi_stmt (*si);
/* If mark_def_sites decided that we don't need to rewrite this
statement, ignore it. */
tracked_var = target_for_debug_bind (var);
if (tracked_var)
{
- gimple note = gimple_build_debug_bind (tracked_var, name, stmt);
+ gimple *note = gimple_build_debug_bind (tracked_var, name, stmt);
gsi_insert_after (si, note, GSI_SAME_STMT);
}
}
DEF_P. Returns whether the statement should be removed. */
static inline bool
-maybe_register_def (def_operand_p def_p, gimple stmt,
+maybe_register_def (def_operand_p def_p, gimple *stmt,
gimple_stmt_iterator gsi)
{
tree def = DEF_FROM_PTR (def_p);
tree tracked_var = target_for_debug_bind (sym);
if (tracked_var)
{
- gimple note = gimple_build_debug_bind (tracked_var, def, stmt);
+ gimple *note = gimple_build_debug_bind (tracked_var, def, stmt);
/* If stmt ends the bb, insert the debug stmt on the single
non-EH edge from the stmt. */
if (gsi_one_before_end_p (gsi) && stmt_ends_bb_p (stmt))
in OLD_SSA_NAMES. Returns whether STMT should be removed. */
static bool
-rewrite_update_stmt (gimple stmt, gimple_stmt_iterator gsi)
+rewrite_update_stmt (gimple *stmt, gimple_stmt_iterator gsi)
{
use_operand_p use_p;
def_operand_p def_p;
locus = UNKNOWN_LOCATION;
else
{
- gimple stmt = SSA_NAME_DEF_STMT (reaching_def);
+ gimple *stmt = SSA_NAME_DEF_STMT (reaching_def);
gphi *other_phi = dyn_cast <gphi *> (stmt);
/* Single element PHI nodes behave like copies, so get the
renamer. BLOCKS is the set of blocks that need updating. */
static void
-mark_def_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
+mark_def_interesting (tree var, gimple *stmt, basic_block bb,
+ bool insert_phi_p)
{
gcc_checking_assert (bitmap_bit_p (blocks_to_update, bb->index));
set_register_defs (stmt, true);
nodes. */
static inline void
-mark_use_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
+mark_use_interesting (tree var, gimple *stmt, basic_block bb,
+ bool insert_phi_p)
{
basic_block def_bb = gimple_bb (stmt);
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt;
+ gimple *stmt;
ssa_op_iter i;
use_operand_p use_p;
def_operand_p def_p;
FOR_EACH_IMM_USE_FAST (use_p, iter, name)
{
- gimple stmt = USE_STMT (use_p);
+ gimple *stmt = USE_STMT (use_p);
basic_block bb = gimple_bb (stmt);
if (gimple_code (stmt) == GIMPLE_PHI)
static void
prepare_def_site_for (tree name, bool insert_phi_p)
{
- gimple stmt;
+ gimple *stmt;
basic_block bb;
gcc_checking_assert (names_to_release == NULL
update_ssa's tables. */
tree
-create_new_def_for (tree old_name, gimple stmt, def_operand_p def)
+create_new_def_for (tree old_name, gimple *stmt, def_operand_p def)
{
tree new_name;
bool used = false;
imm_use_iterator iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
gcc_assert (VAR_DECL_IS_VIRTUAL_OPERAND (name_var));
FOR_EACH_IMM_USE_STMT (stmt, iter, name)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
ssa_op_iter i;
use_operand_p use_p;
extern tree get_current_def (tree);
extern void set_current_def (tree, tree);
void delete_update_ssa (void);
-tree create_new_def_for (tree, gimple, def_operand_p);
+tree create_new_def_for (tree, gimple *, def_operand_p);
void mark_virtual_operands_for_renaming (struct function *);
void mark_virtual_operand_for_renaming (tree);
void mark_virtual_phi_result_for_renaming (gphi *);
typedef struct rdg_vertex
{
/* The statement represented by this vertex. */
- gimple stmt;
+ gimple *stmt;
/* Vector of data-references in this statement. */
vec<data_reference_p> datarefs;
/* Returns the index of STMT in RDG. */
static int
-rdg_vertex_for_stmt (struct graph *rdg ATTRIBUTE_UNUSED, gimple stmt)
+rdg_vertex_for_stmt (struct graph *rdg ATTRIBUTE_UNUSED, gimple *stmt)
{
int index = gimple_uid (stmt);
gcc_checking_assert (index == -1 || RDG_STMT (rdg, index) == stmt);
0, edge_n, bi)
{
basic_block cond_bb = cd->get_edge (edge_n)->src;
- gimple stmt = last_stmt (cond_bb);
+ gimple *stmt = last_stmt (cond_bb);
if (stmt && is_ctrl_stmt (stmt))
{
struct graph_edge *e;
for (i = 0; i < rdg->n_vertices; i++)
{
- gimple stmt = RDG_STMT (rdg, i);
+ gimple *stmt = RDG_STMT (rdg, i);
if (gimple_code (stmt) == GIMPLE_PHI)
{
edge_iterator ei;
if that failed. */
static bool
-create_rdg_vertices (struct graph *rdg, vec<gimple> stmts, loop_p loop,
+create_rdg_vertices (struct graph *rdg, vec<gimple *> stmts, loop_p loop,
vec<data_reference_p> *datarefs)
{
int i;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_VEC_ELT (stmts, i, stmt)
{
identifying statements in loop copies. */
static void
-stmts_from_loop (struct loop *loop, vec<gimple> *stmts)
+stmts_from_loop (struct loop *loop, vec<gimple *> *stmts)
{
unsigned int i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt))
stmts->safe_push (stmt);
}
vec<data_reference_p> datarefs;
/* Create the RDG vertices from the stmts of the loop nest. */
- auto_vec<gimple, 10> stmts;
+ auto_vec<gimple *, 10> stmts;
stmts_from_loop (loop_nest[0], &stmts);
rdg = new_graph (stmts.length ());
datarefs.create (10);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (!is_gimple_debug (use_stmt)
&& loop != loop_containing_stmt (use_stmt))
return true;
loop LOOP. */
static bool
-stmt_has_scalar_dependences_outside_loop (loop_p loop, gimple stmt)
+stmt_has_scalar_dependences_outside_loop (loop_p loop, gimple *stmt)
{
def_operand_p def_p;
ssa_op_iter op_iter;
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
&& !is_gimple_debug (stmt)
&& !bitmap_bit_p (partition->stmts, gimple_uid (stmt)))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
&& !is_gimple_debug (stmt)
&& !bitmap_bit_p (partition->stmts, gimple_uid (stmt)))
generate_memset_builtin (struct loop *loop, partition_t partition)
{
gimple_stmt_iterator gsi;
- gimple stmt, fn_call;
+ gimple *stmt, *fn_call;
tree mem, fn, nb_bytes;
location_t loc;
tree val;
else if (!useless_type_conversion_p (integer_type_node, TREE_TYPE (val)))
{
tree tem = make_ssa_name (integer_type_node);
- gimple cstmt = gimple_build_assign (tem, NOP_EXPR, val);
+ gimple *cstmt = gimple_build_assign (tem, NOP_EXPR, val);
gsi_insert_after (&gsi, cstmt, GSI_CONTINUE_LINKING);
val = tem;
}
generate_memcpy_builtin (struct loop *loop, partition_t partition)
{
gimple_stmt_iterator gsi;
- gimple stmt, fn_call;
+ gimple *stmt, *fn_call;
tree dest, src, fn, nb_bytes;
location_t loc;
enum built_in_function kind;
for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree vdef = gimple_vdef (stmt);
if (vdef && TREE_CODE (vdef) == SSA_NAME)
mark_virtual_operand_for_renaming (vdef);
EXECUTE_IF_SET_IN_BITMAP (partition->stmts, 0, i, bi)
{
- gimple stmt = RDG_STMT (rdg, i);
+ gimple *stmt = RDG_STMT (rdg, i);
if (gimple_has_volatile_ops (stmt))
volatiles_p = true;
single_store = NULL;
EXECUTE_IF_SET_IN_BITMAP (partition->stmts, 0, i, bi)
{
- gimple stmt = RDG_STMT (rdg, i);
+ gimple *stmt = RDG_STMT (rdg, i);
data_reference_p dr;
unsigned j;
if (single_store && !single_load)
{
- gimple stmt = DR_STMT (single_store);
+ gimple *stmt = DR_STMT (single_store);
tree rhs = gimple_assign_rhs1 (stmt);
if (const_with_all_bytes_same (rhs) == -1
&& (!INTEGRAL_TYPE_P (TREE_TYPE (rhs))
}
else if (single_store && single_load)
{
- gimple store = DR_STMT (single_store);
- gimple load = DR_STMT (single_load);
+ gimple *store = DR_STMT (single_store);
+ gimple *load = DR_STMT (single_load);
/* Direct aggregate copy or via an SSA name temporary. */
if (load != store
&& gimple_assign_lhs (load) != gimple_assign_rhs1 (store))
static void
rdg_build_partitions (struct graph *rdg,
- vec<gimple> starting_stmts,
+ vec<gimple *> starting_stmts,
vec<partition_t> *partitions)
{
bitmap processed = BITMAP_ALLOC (NULL);
int i;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_VEC_ELT (starting_stmts, i, stmt)
{
Returns the number of distributed loops. */
static int
-distribute_loop (struct loop *loop, vec<gimple> stmts,
+distribute_loop (struct loop *loop, vec<gimple *> stmts,
control_dependences *cd, int *nb_calls)
{
struct graph *rdg;
walking to innermost loops. */
FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
{
- auto_vec<gimple> work_list;
+ auto_vec<gimple *> work_list;
basic_block *bbs;
int num = loop->num;
unsigned int i;
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* If there is a stmt with side-effects bail out - we
cannot and should not distribute this loop. */
init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
{
tree t;
- gimple stmt;
+ gimple *stmt;
t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
stmt = gimple_build_assign (t, exp);
save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
{
tree t;
- gimple stmt;
+ gimple *stmt;
t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
stmt = gimple_build_assign (exp, t);
struct nesting_info *info = (struct nesting_info *) wi->info;
tree save_local_var_chain;
bitmap save_suppress;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
struct nesting_info *info = (struct nesting_info *) wi->info;
tree save_local_var_chain;
bitmap save_suppress;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
tree label, new_label, target_context, x, field;
gcall *call;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) != GIMPLE_GOTO)
{
gsi_prev (&tmp_gsi);
if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
{
- gimple stmt = gimple_build_goto (label);
+ gimple *stmt = gimple_build_goto (label);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
}
struct walk_stmt_info *wi)
{
struct nesting_info *info = (struct nesting_info *) wi->info;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
tree decl, target_context;
char save_static_chain_added;
int i;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
finalize_nesting_tree_1 (struct nesting_info *root)
{
gimple_seq stmt_list;
- gimple stmt;
+ gimple *stmt;
tree context = root->context;
struct function *sf;
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree ret_val;
if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* If this is a copy from VAR to RESULT, remove it. */
if (gimple_assign_copy_p (stmt)
&& gimple_assign_lhs (stmt) == result
need reexamination later. */
static bool
-plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
+plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
{
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (var);
later. */
static bool
-cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
+cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
{
tree then_, else_;
int object_size_type = osi->object_size_type;
{
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (var);
- gimple stmt;
+ gimple *stmt;
bool reexamine;
if (bitmap_bit_p (computed[object_size_type], varno))
check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
unsigned int depth)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
unsigned int varno = SSA_NAME_VERSION (var);
if (osi->depths[varno])
static void
check_for_plus_in_loops (struct object_size_info *osi, tree var)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
/* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
and looked for a POINTER_PLUS_EXPR in the pass-through
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
tree result;
- gimple call = gsi_stmt (i);
+ gimple *call = gsi_stmt (i);
if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
continue;
enum tree_code code
= object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
tree cst = build_int_cstu (type, bytes);
- gimple g = gimple_build_assign (lhs, code, tem, cst);
+ gimple *g
+ = gimple_build_assign (lhs, code, tem, cst);
gsi_insert_after (&i, g, GSI_NEW_STMT);
update_stmt (call);
}
continue;
/* Propagate into all uses and fold those stmts. */
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
/* Return TRUE if expression STMT is suitable for replacement. */
bool
-ssa_is_replaceable_p (gimple stmt)
+ssa_is_replaceable_p (gimple *stmt)
{
use_operand_p use_p;
tree def;
- gimple use_stmt;
+ gimple *use_stmt;
/* Only consider modify stmts. */
if (!is_gimple_assign (stmt))
{
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_has_location (stmt) || gimple_block (stmt))
SET_USE (arg_p, NULL_TREE);
if (has_zero_uses (arg))
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
stmt = SSA_NAME_DEF_STMT (arg);
bb->aux = NULL;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, i);
i++;
}
{
use_operand_p use;
imm_use_iterator imm_iter;
- gimple defa = SSA_NAME_DEF_STMT (arg);
+ gimple *defa = SSA_NAME_DEF_STMT (arg);
/* If ARG isn't defined in the same block it's too complicated for
our little mind. */
FOR_EACH_IMM_USE_FAST (use, imm_iter, result)
{
- gimple use_stmt = USE_STMT (use);
+ gimple *use_stmt = USE_STMT (use);
if (is_gimple_debug (use_stmt))
continue;
/* Now, if there's a use of RESULT that lies outside this basic block,
{
tree name;
gassign *stmt;
- gimple last = NULL;
+ gimple *last = NULL;
gimple_stmt_iterator gsi2;
gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
/* If TER decided to forward the definition of SSA name EXP this function
returns the defining statement, otherwise NULL. */
-static inline gimple
+static inline gimple *
get_gimple_for_ssa_name (tree exp)
{
int v = SSA_NAME_VERSION (exp);
return NULL;
}
-extern bool ssa_is_replaceable_p (gimple stmt);
+extern bool ssa_is_replaceable_p (gimple *stmt);
extern void finish_out_of_ssa (struct ssaexpand *sa);
extern unsigned int rewrite_out_of_ssa (struct ssaexpand *sa);
extern void expand_phi_nodes (struct ssaexpand *sa);
reduction in the current loop. */
struct reduction_info
{
- gimple reduc_stmt; /* reduction statement. */
- gimple reduc_phi; /* The phi node defining the reduction. */
+ gimple *reduc_stmt; /* reduction statement. */
+ gimple *reduc_phi; /* The phi node defining the reduction. */
enum tree_code reduction_code;/* code for the reduction operation. */
unsigned reduc_version; /* SSA_NAME_VERSION of original reduc_phi
result. */
static struct reduction_info *
-reduction_phi (reduction_info_table_type *reduction_list, gimple phi)
+reduction_phi (reduction_info_table_type *reduction_list, gimple *phi)
{
struct reduction_info tmpred, *red;
}
static tree
-reduc_stmt_res (gimple stmt)
+reduc_stmt_res (gimple *stmt)
{
return (gimple_code (stmt) == GIMPLE_PHI
? gimple_phi_result (stmt)
int_tree_htab_type *decl_address)
{
struct elv_data dta;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
memset (&dta.info, '\0', sizeof (dta.info));
dta.entry = entry;
replacement decls are stored in DECL_COPIES. */
static void
-separate_decls_in_region_stmt (edge entry, edge exit, gimple stmt,
+separate_decls_in_region_stmt (edge entry, edge exit, gimple *stmt,
name_to_copy_table_type *name_copies,
int_tree_htab_type *decl_copies)
{
replacement decls are stored in DECL_COPIES. */
static bool
-separate_decls_in_region_debug (gimple stmt,
+separate_decls_in_region_debug (gimple *stmt,
name_to_copy_table_type *name_copies,
int_tree_htab_type *decl_copies)
{
edge e;
tree t, addr, ref, x;
tree tmp_load, name;
- gimple load;
+ gimple *load;
load_struct = build_simple_mem_ref (clsn_data->load);
t = build3 (COMPONENT_REF, type, load_struct, reduc->field, NULL_TREE);
create_loads_for_reductions (reduction_info **slot, struct clsn_data *clsn_data)
{
struct reduction_info *const red = *slot;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
tree type = TREE_TYPE (reduc_stmt_res (red->reduc_stmt));
tree load_struct;
{
gimple_stmt_iterator gsi;
tree t;
- gimple stmt;
+ gimple *stmt;
gsi = gsi_after_labels (ld_st_data->load_bb);
t = build_fold_addr_expr (ld_st_data->store);
{
struct reduction_info *const red = *slot;
tree t;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
tree type = TREE_TYPE (reduc_stmt_res (red->reduc_stmt));
{
struct name_to_copy_elt *const elt = *slot;
tree t;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
tree type = TREE_TYPE (elt->new_name);
tree load_struct;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
has_debug_stmt = true;
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
{
static void
replace_uses_in_bb_by (tree name, tree val, basic_block bb)
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator imm_iter;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, name)
if (virtual_operand_p (res_z))
continue;
- gimple reduc_phi = SSA_NAME_DEF_STMT (res_c);
+ gimple *reduc_phi = SSA_NAME_DEF_STMT (res_c);
struct reduction_info *red = reduction_phi (reduction_list, reduc_phi);
if (red != NULL)
red->keep_res = nphi;
if (!wi::lts_p (nit_max, type_max))
return false;
- gimple def = SSA_NAME_DEF_STMT (nit);
+ gimple *def = SSA_NAME_DEF_STMT (nit);
/* Try to find nit + 1, in the form of n in an assignment nit = n - 1. */
if (def
basic_block bb, paral_bb, for_bb, ex_bb, continue_bb;
tree t, param;
gomp_parallel *omp_par_stmt;
- gimple omp_return_stmt1, omp_return_stmt2;
- gimple phi;
+ gimple *omp_return_stmt1, *omp_return_stmt2;
+ gimple *phi;
gcond *cond_stmt;
gomp_for *for_stmt;
gomp_continue *omp_cont_stmt;
source_location locus;
gphi *phi = gpi.phi ();
tree def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
- gimple def_stmt = SSA_NAME_DEF_STMT (def);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (def);
/* If the exit phi is not connected to a header phi in the same loop, this
value is not modified in the loop, and we're done with this phi. */
struct clsn_data clsn_data;
unsigned prob;
location_t loc;
- gimple cond_stmt;
+ gimple *cond_stmt;
unsigned int m_p_thread=2;
/* From
static void
build_new_reduction (reduction_info_table_type *reduction_list,
- gimple reduc_stmt, gphi *phi)
+ gimple *reduc_stmt, gphi *phi)
{
reduction_info **slot;
struct reduction_info *new_reduction;
if (gimple_code (reduc_stmt) == GIMPLE_PHI)
{
tree op1 = PHI_ARG_DEF (reduc_stmt, 0);
- gimple def1 = SSA_NAME_DEF_STMT (op1);
+ gimple *def1 = SSA_NAME_DEF_STMT (op1);
reduction_code = gimple_assign_rhs_code (def1);
}
if (simple_iv (loop, loop, res, &iv, true))
continue;
- gimple reduc_stmt
+ gimple *reduc_stmt
= vect_force_simple_reduction (simple_loop_info, phi, true,
&double_reduc, true);
if (!reduc_stmt)
}
use_operand_p use_p;
- gimple inner_stmt;
+ gimple *inner_stmt;
bool single_use_p = single_imm_use (res, &use_p, &inner_stmt);
gcc_assert (single_use_p);
gphi *inner_phi = as_a <gphi *> (inner_stmt);
&iv, true))
continue;
- gimple inner_reduc_stmt
+ gimple *inner_reduc_stmt
= vect_force_simple_reduction (simple_inner_loop_info, inner_phi,
true, &double_reduc, true);
gcc_assert (!double_reduc);
struct reduction_info *red;
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple reduc_phi;
+ gimple *reduc_phi;
tree val = PHI_ARG_DEF_FROM_EDGE (phi, exit);
if (!virtual_operand_p (val))
/* Hook to convert gimple stmt uids into true gimple statements. The second
parameter is an array of statements indexed by their uid. */
- void (*stmt_fixup) (struct cgraph_node *, gimple *);
+ void (*stmt_fixup) (struct cgraph_node *, gimple **);
/* Results of interprocedural propagation of an IPA pass is applied to
function body via this hook. */
void (*read_summary) (void),
void (*write_optimization_summary) (void),
void (*read_optimization_summary) (void),
- void (*stmt_fixup) (struct cgraph_node *, gimple *),
+ void (*stmt_fixup) (struct cgraph_node *, gimple **),
unsigned int function_transform_todo_flags_start,
unsigned int (*function_transform) (struct cgraph_node *),
void (*variable_transform) (varpool_node *))
extern void execute_ipa_pass_list (opt_pass *);
extern void execute_ipa_summary_passes (ipa_opt_pass_d *);
extern void execute_all_ipa_transforms (void);
-extern void execute_all_ipa_stmt_fixups (struct cgraph_node *, gimple *);
+extern void execute_all_ipa_stmt_fixups (struct cgraph_node *, gimple **);
extern bool pass_init_dump_file (opt_pass *);
extern void pass_fini_dump_file (opt_pass *);
the -2 on all the calculations below. */
#define NUM_BUCKETS 10
-static GTY ((deletable (""))) vec<gimple, va_gc> *free_phinodes[NUM_BUCKETS - 2];
+static GTY ((deletable (""))) vec<gimple *, va_gc> *free_phinodes[NUM_BUCKETS - 2];
static unsigned long free_phinode_count;
static int ideal_phi_node_len (int);
/* We no longer need PHI, release it so that it may be reused. */
void
-release_phi_node (gimple phi)
+release_phi_node (gimple *phi)
{
size_t bucket;
size_t len = gimple_phi_capacity (phi);
void
remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p)
{
- gimple phi = gsi_stmt (*gsi);
+ gimple *phi = gsi_stmt (*gsi);
if (release_lhs_p)
insert_debug_temps_for_defs (gsi);
#define GCC_TREE_PHINODES_H
extern void phinodes_print_statistics (void);
-extern void release_phi_node (gimple);
+extern void release_phi_node (gimple *);
extern void reserve_phi_args_for_new_edge (basic_block);
extern void add_phi_node_to_bb (gphi *phi, basic_block bb);
extern gphi *create_phi_node (tree, basic_block);
extern void set_phi_nodes (basic_block, gimple_seq);
static inline use_operand_p
-gimple_phi_arg_imm_use_ptr (gimple gs, int i)
+gimple_phi_arg_imm_use_ptr (gimple *gs, int i)
{
return &gimple_phi_arg (gs, i)->imm_use;
}
{
struct phi_arg_d *element, *root;
size_t index;
- gimple phi;
+ gimple *phi;
/* Since the use is the first thing in a PHI argument element, we can
calculate its index based on casting it to an argument, and performing
struct data_reference *ref;
/* The statement in that the reference appears. */
- gimple stmt;
+ gimple *stmt;
/* In case that STMT is a phi node, this field is set to the SSA name
defined by it in replace_phis_by_defined_names (in order to avoid
{
tree name, init, init_ref;
gphi *phi = NULL;
- gimple init_stmt;
+ gimple *init_stmt;
edge latch = loop_latch_edge (loop);
struct data_reference init_dr;
gphi_iterator psi;
is in the lhs of STMT, false if it is in rhs. */
static void
-replace_ref_with (gimple stmt, tree new_tree, bool set, bool in_lhs)
+replace_ref_with (gimple *stmt, tree new_tree, bool set, bool in_lhs)
{
tree val;
gassign *new_stmt;
the looparound phi nodes contained in one of the chains. If there is no
such statement, or more statements, NULL is returned. */
-static gimple
+static gimple *
single_nonlooparound_use (tree name)
{
use_operand_p use;
imm_use_iterator it;
- gimple stmt, ret = NULL;
+ gimple *stmt, *ret = NULL;
FOR_EACH_IMM_USE_FAST (use, it, name)
{
used. */
static void
-remove_stmt (gimple stmt)
+remove_stmt (gimple *stmt)
{
tree name;
- gimple next;
+ gimple *next;
gimple_stmt_iterator psi;
if (gimple_code (stmt) == GIMPLE_PHI)
static void
base_names_in_chain_on (struct loop *loop, tree name, tree var)
{
- gimple stmt, phi;
+ gimple *stmt, *phi;
imm_use_iterator iter;
replace_ssa_name_symbol (name, var);
{
edge e;
gphi *phi;
- gimple stmt;
+ gimple *stmt;
tree name, use, var;
gphi_iterator psi;
statements, NAME is replaced with the actual name used in the returned
statement. */
-static gimple
+static gimple *
find_use_stmt (tree *name)
{
- gimple stmt;
+ gimple *stmt;
tree rhs, lhs;
/* Skip over assignments. */
tree of the same operations and returns its root. Distance to the root
is stored in DISTANCE. */
-static gimple
-find_associative_operation_root (gimple stmt, unsigned *distance)
+static gimple *
+find_associative_operation_root (gimple *stmt, unsigned *distance)
{
tree lhs;
- gimple next;
+ gimple *next;
enum tree_code code = gimple_assign_rhs_code (stmt);
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
unsigned dist = 0;
tree formed by this operation instead of the statement that uses NAME1 or
NAME2. */
-static gimple
+static gimple *
find_common_use_stmt (tree *name1, tree *name2)
{
- gimple stmt1, stmt2;
+ gimple *stmt1, *stmt2;
stmt1 = find_use_stmt (name1);
if (!stmt1)
bool aswap;
tree atype;
tree name1, name2;
- gimple stmt;
+ gimple *stmt;
name1 = name_for_ref (r1);
name2 = name_for_ref (r2);
an assignment of the remaining operand. */
static void
-remove_name_from_operation (gimple stmt, tree op)
+remove_name_from_operation (gimple *stmt, tree op)
{
tree other_op;
gimple_stmt_iterator si;
/* Reassociates the expression in that NAME1 and NAME2 are used so that they
are combined in a single statement, and returns this statement. */
-static gimple
+static gimple *
reassociate_to_the_same_stmt (tree name1, tree name2)
{
- gimple stmt1, stmt2, root1, root2, s1, s2;
+ gimple *stmt1, *stmt2, *root1, *root2, *s1, *s2;
gassign *new_stmt, *tmp_stmt;
tree new_name, tmp_name, var, r1, r2;
unsigned dist1, dist2;
associative and commutative operation in the same expression, reassociate
the expression so that they are used in the same statement. */
-static gimple
+static gimple *
stmt_combining_refs (dref r1, dref r2)
{
- gimple stmt1, stmt2;
+ gimple *stmt1, *stmt2;
tree name1 = name_for_ref (r1);
tree name2 = name_for_ref (r2);
bool swap = false;
chain_p new_chain;
unsigned i;
- gimple root_stmt;
+ gimple *root_stmt;
tree rslt_type = NULL_TREE;
if (ch1 == ch2)
void
gimple_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref = tree_coverage_counter_ref (tag, base), ref_ptr;
gcall *call;
void
gimple_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
void
gimple_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
{
tree tmp1;
gassign *stmt1, *stmt2, *stmt3;
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
void
gimple_gen_average_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
void
gimple_gen_ior_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt))
update_stmt (stmt);
}
if (TREE_CODE (chrec) == SSA_NAME)
{
- gimple def;
+ gimple *def;
loop_p def_loop, loop;
if (SSA_NAME_IS_DEFAULT_DEF (chrec))
/* Return true when PHI is a loop-phi-node. */
static bool
-loop_phi_node_p (gimple phi)
+loop_phi_node_p (gimple *phi)
{
/* The implementation of this function is based on the following
property: "all the loop-phi-nodes of a loop are contained in the
static tree
add_to_evolution_1 (unsigned loop_nb, tree chrec_before, tree to_add,
- gimple at_stmt)
+ gimple *at_stmt)
{
tree type, left, right;
struct loop *loop = get_loop (cfun, loop_nb), *chloop;
static tree
add_to_evolution (unsigned loop_nb, tree chrec_before, enum tree_code code,
- tree to_add, gimple at_stmt)
+ tree to_add, gimple *at_stmt)
{
tree type = chrec_type (to_add);
tree res = NULL_TREE;
if (exit_edge)
{
- gimple stmt;
+ gimple *stmt;
stmt = last_stmt (exit_edge->src);
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
};
-static t_bool follow_ssa_edge (struct loop *loop, gimple, gphi *,
+static t_bool follow_ssa_edge (struct loop *loop, gimple *, gphi *,
tree *, int);
/* Follow the ssa edge into the binary expression RHS0 CODE RHS1.
Return true if the strongly connected component has been found. */
static t_bool
-follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
+follow_ssa_edge_binary (struct loop *loop, gimple *at_stmt,
tree type, tree rhs0, enum tree_code code, tree rhs1,
gphi *halting_phi, tree *evolution_of_loop,
int limit)
Return true if the strongly connected component has been found. */
static t_bool
-follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
+follow_ssa_edge_expr (struct loop *loop, gimple *at_stmt, tree expr,
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
Return true if the strongly connected component has been found. */
static t_bool
-follow_ssa_edge_in_rhs (struct loop *loop, gimple stmt,
+follow_ssa_edge_in_rhs (struct loop *loop, gimple *stmt,
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
path that is analyzed on the return walk. */
static t_bool
-follow_ssa_edge (struct loop *loop, gimple def, gphi *halting_phi,
+follow_ssa_edge (struct loop *loop, gimple *def, gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
struct loop *def_loop;
for (i = 0; i < n; i++)
{
tree arg = PHI_ARG_DEF (loop_phi_node, i);
- gimple ssa_chain;
+ gimple *ssa_chain;
tree ev_fn;
t_bool res;
Handle degenerate PHIs here to not miss important unrollings. */
if (TREE_CODE (init_cond) == SSA_NAME)
{
- gimple def = SSA_NAME_DEF_STMT (init_cond);
+ gimple *def = SSA_NAME_DEF_STMT (init_cond);
if (gphi *phi = dyn_cast <gphi *> (def))
{
tree res = degenerate_phi_result (phi);
analyze the effect of an inner loop: see interpret_loop_phi. */
static tree
-interpret_rhs_expr (struct loop *loop, gimple at_stmt,
+interpret_rhs_expr (struct loop *loop, gimple *at_stmt,
tree type, tree rhs1, enum tree_code code, tree rhs2)
{
tree res, chrec1, chrec2;
- gimple def;
+ gimple *def;
if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
{
/* Interpret the expression EXPR. */
static tree
-interpret_expr (struct loop *loop, gimple at_stmt, tree expr)
+interpret_expr (struct loop *loop, gimple *at_stmt, tree expr)
{
enum tree_code code;
tree type = TREE_TYPE (expr), op0, op1;
/* Interpret the rhs of the assignment STMT. */
static tree
-interpret_gimple_assign (struct loop *loop, gimple stmt)
+interpret_gimple_assign (struct loop *loop, gimple *stmt)
{
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
enum tree_code code = gimple_assign_rhs_code (stmt);
analyze_scalar_evolution_1 (struct loop *loop, tree var, tree res)
{
tree type = TREE_TYPE (var);
- gimple def;
+ gimple *def;
basic_block bb;
struct loop *def_loop;
gsi2 = gsi_start (stmts);
while (!gsi_end_p (gsi2))
{
- gimple stmt = gsi_stmt (gsi2);
+ gimple *stmt = gsi_stmt (gsi2);
gimple_stmt_iterator gsi3 = gsi2;
gsi_next (&gsi2);
gsi_remove (&gsi3, false);
tree type;
/* The statement this access belongs to. */
- gimple stmt;
+ gimple *stmt;
/* Next group representative for this aggregate. */
struct access *next_grp;
final. */
static void
-mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
+mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
int idx, parm_index = 0;
not possible. */
static struct access *
-create_access (tree expr, gimple stmt, bool write)
+create_access (tree expr, gimple *stmt, bool write)
{
struct access *access;
HOST_WIDE_INT offset, size, max_size;
created. */
static struct access *
-build_access_from_expr_1 (tree expr, gimple stmt, bool write)
+build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
{
struct access *ret = NULL;
bool partial_ref;
true if the expression is a store and false otherwise. */
static bool
-build_access_from_expr (tree expr, gimple stmt, bool write)
+build_access_from_expr (tree expr, gimple *stmt, bool write)
{
struct access *access;
NULL, in that case ignore it. */
static bool
-disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
+disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
{
if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
&& stmt_ends_bb_p (stmt))
true if any access has been inserted. */
static bool
-build_accesses_from_assign (gimple stmt)
+build_accesses_from_assign (gimple *stmt)
{
tree lhs, rhs;
struct access *lacc, *racc;
GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
static bool
-asm_visit_addr (gimple, tree op, tree, void *)
+asm_visit_addr (gimple *, tree op, tree, void *)
{
op = get_base_address (op);
if (op
that their types match. */
static inline bool
-callsite_arguments_match_p (gimple call)
+callsite_arguments_match_p (gimple *call)
{
if (gimple_call_num_args (call) < (unsigned) func_param_count)
return false;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree t;
unsigned i;
tree rep = get_access_replacement (access);
tree clobber = build_constructor (access->type, NULL);
TREE_THIS_VOLATILE (clobber) = 1;
- gimple stmt = gimple_build_assign (rep, clobber);
+ gimple *stmt = gimple_build_assign (rep, clobber);
if (insert_after)
gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
the same values as sra_modify_assign. */
static enum assignment_mod_result
-sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
+sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
{
tree lhs = gimple_assign_lhs (stmt);
struct access *acc = get_access_for_expr (lhs);
copying. */
static enum assignment_mod_result
-sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
+sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
{
struct access *lacc, *racc;
tree lhs, rhs;
gimple_stmt_iterator gsi = gsi_start_bb (bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
enum assignment_mod_result assign_result;
bool modified = false, deleted = false;
tree *t;
ptr_parm_has_direct_uses (tree parm)
{
imm_use_iterator ui;
- gimple stmt;
+ gimple *stmt;
tree name = ssa_default_def (cfun, parm);
bool ret = false;
ADJUSTMENTS is a pointer to an adjustments vector. */
static bool
-replace_removed_params_ssa_names (gimple stmt,
+replace_removed_params_ssa_names (gimple *stmt,
ipa_parm_adjustment_vec adjustments)
{
struct ipa_parm_adjustment *adj;
point to the statement). Return true iff the statement was modified. */
static bool
-sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
+sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
ipa_parm_adjustment_vec adjustments)
{
tree *lhs_p, *rhs_p;
gsi = gsi_start_bb (bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool modified = false;
tree *t;
unsigned i;
{
struct ipa_parm_adjustment *adj;
imm_use_iterator ui;
- gimple stmt;
+ gimple *stmt;
gdebug *def_temp;
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
iscc->has_thunk = true;
return true;
}
- gimple call_stmt = cs->call_stmt;
+ gimple *call_stmt = cs->call_stmt;
unsigned count = gimple_call_num_args (call_stmt);
for (unsigned i = 0; i < count; i++)
{
/* Return true whether STMT may clobber global memory. */
bool
-stmt_may_clobber_global_p (gimple stmt)
+stmt_may_clobber_global_p (gimple *stmt)
{
tree lhs;
ref->ref = NULL_TREE;
if (TREE_CODE (ptr) == SSA_NAME)
{
- gimple stmt = SSA_NAME_DEF_STMT (ptr);
+ gimple *stmt = SSA_NAME_DEF_STMT (ptr);
if (gimple_assign_single_p (stmt)
&& gimple_assign_rhs_code (stmt) == ADDR_EXPR)
ptr = gimple_assign_rhs1 (stmt);
true, otherwise return false. */
bool
-ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
{
if (is_gimple_assign (stmt))
{
}
bool
-ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
otherwise return false. */
bool
-stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
+stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
{
if (is_gimple_call (stmt))
{
}
bool
-stmt_may_clobber_ref_p (gimple stmt, tree ref)
+stmt_may_clobber_ref_p (gimple *stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
return false. */
bool
-stmt_kills_ref_p (gimple stmt, ao_ref *ref)
+stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
{
if (!ao_ref_base (ref))
return false;
}
bool
-stmt_kills_ref_p (gimple stmt, tree ref)
+stmt_kills_ref_p (gimple *stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
case false is returned. The walk starts with VUSE, one argument of PHI. */
static bool
-maybe_skip_until (gimple phi, tree target, ao_ref *ref,
+maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
tree vuse, unsigned int *cnt, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool),
/* Walk until we hit the target. */
while (vuse != target)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
/* Recurse for PHI nodes. */
if (gimple_code (def_stmt) == GIMPLE_PHI)
{
Return that, or NULL_TREE if there is no such definition. */
static tree
-get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
+get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
ao_ref *ref, unsigned int *cnt,
bitmap *visited, bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool),
void *data)
{
- gimple def0 = SSA_NAME_DEF_STMT (arg0);
- gimple def1 = SSA_NAME_DEF_STMT (arg1);
+ gimple *def0 = SSA_NAME_DEF_STMT (arg0);
+ gimple *def1 = SSA_NAME_DEF_STMT (arg1);
tree common_vuse;
if (arg0 == arg1)
Returns NULL_TREE if no suitable virtual operand can be found. */
tree
-get_continuation_for_phi (gimple phi, ao_ref *ref,
+get_continuation_for_phi (gimple *phi, ao_ref *ref,
unsigned int *cnt, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool),
do
{
- gimple def_stmt;
+ gimple *def_stmt;
/* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
res = (*walker) (ref, vuse, cnt, data);
{
do
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
if (*visited
&& !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool);
extern bool refs_anti_dependent_p (tree, tree);
extern bool refs_output_dependent_p (tree, tree);
-extern bool ref_maybe_used_by_stmt_p (gimple, tree);
-extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *);
-extern bool stmt_may_clobber_global_p (gimple);
-extern bool stmt_may_clobber_ref_p (gimple, tree);
-extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
+extern bool ref_maybe_used_by_stmt_p (gimple *, tree);
+extern bool ref_maybe_used_by_stmt_p (gimple *, ao_ref *);
+extern bool stmt_may_clobber_global_p (gimple *);
+extern bool stmt_may_clobber_ref_p (gimple *, tree);
+extern bool stmt_may_clobber_ref_p_1 (gimple *, ao_ref *);
extern bool call_may_clobber_ref_p (gcall *, tree);
extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
-extern bool stmt_kills_ref_p (gimple, tree);
-extern bool stmt_kills_ref_p (gimple, ao_ref *);
-extern tree get_continuation_for_phi (gimple, ao_ref *,
+extern bool stmt_kills_ref_p (gimple *, tree);
+extern bool stmt_kills_ref_p (gimple *, ao_ref *);
+extern tree get_continuation_for_phi (gimple *, ao_ref *,
unsigned int *, bitmap *, bool,
void *(*)(ao_ref *, tree, void *, bool),
void *);
get_default_value (tree var)
{
ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
- gimple stmt;
+ gimple *stmt;
stmt = SSA_NAME_DEF_STMT (var);
Else return VARYING. */
static ccp_lattice_t
-likely_value (gimple stmt)
+likely_value (gimple *stmt)
{
bool has_constant_operand, has_undefined_operand, all_undefined_operands;
bool has_nsa_operand;
/* Returns true if STMT cannot be constant. */
static bool
-surely_varying_stmt_p (gimple stmt)
+surely_varying_stmt_p (gimple *stmt)
{
/* If the statement has operands that we cannot handle, it cannot be
constant. */
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
bool is_varying;
/* If the statement is a control insn, then we do not
/* If the definition may be simulated again we cannot follow
this SSA edge as the SSA propagator does not necessarily
re-visit the use. */
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!gimple_nop_p (def_stmt)
&& prop_simulate_again_p (def_stmt))
return NULL_TREE;
otherwise return the original RHS or NULL_TREE. */
static tree
-ccp_fold (gimple stmt)
+ccp_fold (gimple *stmt)
{
location_t loc = gimple_location (stmt);
switch (gimple_code (stmt))
ALLOC_ALIGNED is true. */
static ccp_prop_value_t
-bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
+bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
bool alloc_aligned)
{
tree align, misalign = NULL_TREE, type;
Valid only for assignments, calls, conditionals, and switches. */
static ccp_prop_value_t
-evaluate_stmt (gimple stmt)
+evaluate_stmt (gimple *stmt)
{
ccp_prop_value_t val;
tree simplified = NULL_TREE;
return val;
}
-typedef hash_table<nofree_ptr_hash<gimple_statement_base> > gimple_htab;
+typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
insert_clobber_before_stack_restore (tree saved_val, tree var,
gimple_htab **visited)
{
- gimple stmt;
+ gimple *stmt;
gassign *clobber_stmt;
tree clobber;
imm_use_iterator iter;
gimple_stmt_iterator i;
- gimple *slot;
+ gimple **slot;
FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
static void
insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
{
- gimple stmt;
+ gimple *stmt;
tree saved_val;
gimple_htab *visited = NULL;
NULL_TREE. */
static tree
-fold_builtin_alloca_with_align (gimple stmt)
+fold_builtin_alloca_with_align (gimple *stmt)
{
unsigned HOST_WIDE_INT size, threshold, n_elem;
tree lhs, arg, block, var, elem_type, array_type;
static bool
ccp_fold_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
are handled here. */
static enum ssa_prop_result
-visit_assignment (gimple stmt, tree *output_p)
+visit_assignment (gimple *stmt, tree *output_p)
{
ccp_prop_value_t val;
enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
SSA_PROP_VARYING. */
static enum ssa_prop_result
-visit_cond_stmt (gimple stmt, edge *taken_edge_p)
+visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
{
ccp_prop_value_t val;
basic_block block;
value, return SSA_PROP_VARYING. */
static enum ssa_prop_result
-ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
+ccp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
{
tree def;
ssa_op_iter iter;
optimize_stack_restore (gimple_stmt_iterator i)
{
tree callee;
- gimple stmt;
+ gimple *stmt;
basic_block bb = gsi_bb (i);
- gimple call = gsi_stmt (i);
+ gimple *call = gsi_stmt (i);
if (gimple_code (call) != GIMPLE_CALL
|| gimple_call_num_args (call) != 1
or not is irrelevant to removing the call to __builtin_stack_restore. */
if (has_single_use (gimple_call_arg (call, 0)))
{
- gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
+ gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
if (is_gimple_call (stack_save))
{
callee = gimple_call_fndecl (stack_save);
pointer assignment. */
static tree
-optimize_stdarg_builtin (gimple call)
+optimize_stdarg_builtin (gimple *call)
{
tree callee, lhs, rhs, cfun_va_list;
bool va_list_simple_ptr;
{
basic_block bb = gsi_bb (i);
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
edge_iterator ei;
edge e;
bool ret;
gimple_stmt_iterator i;
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
- gimple stmt, old_stmt;
+ gimple *stmt, *old_stmt;
tree callee;
enum built_in_function fcode;
gsi_prev (&gsi))
{
tree var;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* A copy between 2 partitions does not introduce an interference
by itself. If they did, you would never be able to coalesce
gimple_stmt_iterator gsi;
basic_block bb;
tree var;
- gimple stmt;
+ gimple *stmt;
tree first;
var_map map;
ssa_op_iter iter;
/* Return true if this statement may generate a useful copy. */
static bool
-stmt_may_generate_copy (gimple stmt)
+stmt_may_generate_copy (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt));
value and store the LHS into *RESULT_P. */
static enum ssa_prop_result
-copy_prop_visit_assignment (gimple stmt, tree *result_p)
+copy_prop_visit_assignment (gimple *stmt, tree *result_p)
{
tree lhs, rhs;
SSA_PROP_VARYING. */
static enum ssa_prop_result
-copy_prop_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
+copy_prop_visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
{
enum ssa_prop_result retval = SSA_PROP_VARYING;
location_t loc = gimple_location (stmt);
SSA_PROP_VARYING. */
static enum ssa_prop_result
-copy_prop_visit_stmt (gimple stmt, edge *taken_edge_p, tree *result_p)
+copy_prop_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *result_p)
{
enum ssa_prop_result retval;
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
ssa_op_iter iter;
tree def;
#define STMT_NECESSARY GF_PLF_1
-static vec<gimple> worklist;
+static vec<gimple *> worklist;
/* Vector indicating an SSA name has already been processed and marked
as necessary. */
worklist if ADD_TO_WORKLIST is true. */
static inline void
-mark_stmt_necessary (gimple stmt, bool add_to_worklist)
+mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
{
gcc_assert (stmt);
static inline void
mark_operand_necessary (tree op)
{
- gimple stmt;
+ gimple *stmt;
int ver;
gcc_assert (op);
necessary. */
static void
-mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
+mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
{
/* With non-call exceptions, we have to assume that all statements could
throw. If a statement could throw, it can be deemed necessary. */
static void
mark_last_stmt_necessary (basic_block bb)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
bitmap_set_bit (last_stmt_necessary, bb->index);
bitmap_set_bit (bb_contains_live_stmts, bb->index);
basic_block bb;
gimple_stmt_iterator gsi;
edge e;
- gimple phi, stmt;
+ gimple *phi, *stmt;
int flags;
FOR_EACH_BB_FN (bb, cfun)
static bool
mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
/* All stmts we visit are necessary. */
mark_operand_necessary (vdef);
}
static void
-mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
+mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
{
unsigned int chain;
ao_ref refd;
mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
tree vdef, void *data ATTRIBUTE_UNUSED)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
/* We have to skip already visited (and thus necessary) statements
to make the chaining work after we dropped back to simple mode. */
}
static void
-mark_all_reaching_defs_necessary (gimple stmt)
+mark_all_reaching_defs_necessary (gimple *stmt)
{
walk_aliased_vdefs (NULL, gimple_vuse (stmt),
mark_all_reaching_defs_necessary_1, NULL, &visited);
/* Return true for PHI nodes with one or identical arguments
can be removed. */
static bool
-degenerate_phi_p (gimple phi)
+degenerate_phi_p (gimple *phi)
{
unsigned int i;
tree op = gimple_phi_arg_def (phi, 0);
static void
propagate_necessity (bool aggressive)
{
- gimple stmt;
+ gimple *stmt;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nProcessing worklist:\n");
if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
{
tree ptr = gimple_call_arg (stmt, 0);
- gimple def_stmt;
+ gimple *def_stmt;
tree def_callee;
/* If the pointer we free is defined by an allocation
function do not add the call to the worklist. */
|| DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
|| DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
{
- gimple bounds_def_stmt;
+ gimple *bounds_def_stmt;
tree bounds;
/* For instrumented calls we should also check used
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, vuse);
static void
remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
{
- gimple stmt = gsi_stmt (*i);
+ gimple *stmt = gsi_stmt (*i);
if (dump_file && (dump_flags & TDF_DETAILS))
{
maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
enum tree_code subcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
bool has_other_uses = false;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
has_debug_uses = true;
else if (is_gimple_assign (use_stmt)
if (has_debug_uses)
{
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
{
if (!gimple_debug_bind_p (use_stmt))
bool something_changed = false;
basic_block bb;
gimple_stmt_iterator gsi, psi;
- gimple stmt;
+ gimple *stmt;
tree call;
vec<basic_block> h;
tree ptr = gimple_call_arg (stmt, 0);
if (TREE_CODE (ptr) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
if (!gimple_nop_p (def_stmt)
&& !gimple_plf (def_stmt, STMT_NECESSARY))
gimple_set_plf (stmt, STMT_NECESSARY, false);
call is not removed. */
if (gimple_call_with_bounds_p (stmt))
{
- gimple bounds_def_stmt;
+ gimple *bounds_def_stmt;
tree bounds = gimple_call_arg (stmt, 1);
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
/* Bitmap of blocks that have had EH statements cleaned. We should
remove their dead edges eventually. */
static bitmap need_eh_cleanup;
-static vec<gimple> need_noreturn_fixup;
+static vec<gimple *> need_noreturn_fixup;
/* Statistics for dominator optimizations. */
struct opt_stats_d
static void optimize_stmt (basic_block, gimple_stmt_iterator,
class const_and_copies *,
class avail_exprs_stack *);
-static tree lookup_avail_expr (gimple, bool, class avail_exprs_stack *);
+static tree lookup_avail_expr (gimple *, bool, class avail_exprs_stack *);
static void record_cond (cond_equivalence *, class avail_exprs_stack *);
static void record_equality (tree, tree, class const_and_copies *);
static void record_equivalences_from_phis (basic_block);
static void eliminate_redundant_computations (gimple_stmt_iterator *,
class const_and_copies *,
class avail_exprs_stack *);
-static void record_equivalences_from_stmt (gimple, int,
+static void record_equivalences_from_stmt (gimple *, int,
class avail_exprs_stack *);
static edge single_incoming_edge_ignoring_loop_edges (basic_block);
static void dump_dominator_optimization_stats (FILE *file,
if (! gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
if (gimple_code (stmt) == GIMPLE_SWITCH)
now noreturn call first. */
while (!need_noreturn_fixup.is_empty ())
{
- gimple stmt = need_noreturn_fixup.pop ();
+ gimple *stmt = need_noreturn_fixup.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Fixing up noreturn call ");
/* A trivial wrapper so that we can present the generic jump
threading code with a simple API for simplifying statements. */
static tree
-simplify_stmt_for_jump_threading (gimple stmt,
- gimple within_stmt ATTRIBUTE_UNUSED,
+simplify_stmt_for_jump_threading (gimple *stmt,
+ gimple *within_stmt ATTRIBUTE_UNUSED,
class avail_exprs_stack *avail_exprs_stack)
{
return lookup_avail_expr (stmt, false, avail_exprs_stack);
&& TREE_CODE (lhs) == SSA_NAME
&& TREE_CODE (rhs) == INTEGER_CST)
{
- gimple defstmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *defstmt = SSA_NAME_DEF_STMT (lhs);
if (defstmt
&& is_gimple_assign (defstmt)
imm_use_iterator iter;
FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
/* Only bother to record more equivalences for lhs that
can be directly used by e->dest.
static int
loop_depth_of_name (tree x)
{
- gimple defstmt;
+ gimple *defstmt;
basic_block defbb;
/* If it's not an SSA_NAME, we have no clue where the definition is. */
i_2 = i_1 +/- ... */
bool
-simple_iv_increment_p (gimple stmt)
+simple_iv_increment_p (gimple *stmt)
{
enum tree_code code;
tree lhs, preinc;
- gimple phi;
+ gimple *phi;
size_t i;
if (gimple_code (stmt) != GIMPLE_ASSIGN)
void
dom_opt_dom_walker::after_dom_children (basic_block bb)
{
- gimple last;
+ gimple *last;
/* If we have an outgoing edge to a block with multiple incoming and
outgoing edges, then we may be able to thread the edge, i.e., we
bool insert = true;
bool assigns_var_p = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) == GIMPLE_PHI)
def = gimple_phi_result (stmt);
lifing is done by eliminate_redundant_computations. */
static void
-record_equivalences_from_stmt (gimple stmt, int may_optimize_p,
+record_equivalences_from_stmt (gimple *stmt, int may_optimize_p,
class avail_exprs_stack *avail_exprs_stack)
{
tree lhs;
generate here may in fact be ill-formed, but it is simply
used as an internal device in this pass, and never becomes
part of the CFG. */
- gimple defstmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *defstmt = SSA_NAME_DEF_STMT (rhs);
new_stmt = gimple_build_assign (rhs, lhs);
SSA_NAME_DEF_STMT (rhs) = defstmt;
}
CONST_AND_COPIES. */
static void
-cprop_operand (gimple stmt, use_operand_p op_p)
+cprop_operand (gimple *stmt, use_operand_p op_p)
{
tree val;
tree op = USE_FROM_PTR (op_p);
number of iteration analysis. */
if (TREE_CODE (val) != INTEGER_CST)
{
- gimple def = SSA_NAME_DEF_STMT (op);
+ gimple *def = SSA_NAME_DEF_STMT (op);
if (gimple_code (def) == GIMPLE_PHI
&& gimple_bb (def)->loop_father->header == gimple_bb (def))
return;
vdef_ops of STMT. */
static void
-cprop_into_stmt (gimple stmt)
+cprop_into_stmt (gimple *stmt)
{
use_operand_p op_p;
ssa_op_iter iter;
class const_and_copies *const_and_copies,
class avail_exprs_stack *avail_exprs_stack)
{
- gimple stmt, old_stmt;
+ gimple *stmt, *old_stmt;
bool may_optimize_p;
bool modified_p = false;
bool was_noreturn;
/* Build a new statement with the RHS and LHS exchanged. */
if (TREE_CODE (rhs) == SSA_NAME)
{
- gimple defstmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *defstmt = SSA_NAME_DEF_STMT (rhs);
new_stmt = gimple_build_assign (rhs, lhs);
SSA_NAME_DEF_STMT (rhs) = defstmt;
}
we finish processing this block and its children. */
static tree
-lookup_avail_expr (gimple stmt, bool insert,
+lookup_avail_expr (gimple *stmt, bool insert,
class avail_exprs_stack *avail_exprs_stack)
{
expr_hash_elt **slot;
#ifndef GCC_TREE_SSA_DOM_H
#define GCC_TREE_SSA_DOM_H
-extern bool simple_iv_increment_p (gimple);
+extern bool simple_iv_increment_p (gimple *);
#endif /* GCC_TREE_SSA_DOM_H */
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
-dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
+dse_possible_dead_store_p (ao_ref *ref, gimple *stmt, gimple **use_stmt)
{
- gimple temp;
+ gimple *temp;
unsigned cnt = 0;
*use_stmt = NULL;
temp = stmt;
do
{
- gimple use_stmt, defvar_def;
+ gimple *use_stmt, *defvar_def;
imm_use_iterator ui;
bool fail = false;
tree defvar;
static void
dse_optimize_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* If this statement has no virtual defs, then there is nothing
to do. */
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMSET:
{
- gimple use_stmt;
+ gimple *use_stmt;
ao_ref ref;
tree size = NULL_TREE;
if (gimple_call_num_args (stmt) == 3)
tree lhs = gimple_call_lhs (stmt);
if (lhs)
{
- gimple new_stmt = gimple_build_assign (lhs, ptr);
+ gimple *new_stmt = gimple_build_assign (lhs, ptr);
unlink_stmt_vdef (stmt);
if (gsi_replace (gsi, new_stmt, true))
bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
if (is_gimple_assign (stmt))
{
- gimple use_stmt;
+ gimple *use_stmt;
/* Self-assignments are zombies. */
if (operand_equal_p (gimple_assign_rhs1 (stmt),
/* Set to true if we delete dead edges during the optimization. */
static bool cfg_changed;
-static tree rhs_to_tree (tree type, gimple stmt);
+static tree rhs_to_tree (tree type, gimple *stmt);
static bitmap to_purge;
it is set to whether the chain to NAME is a single use chain
or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
-static gimple
+static gimple *
get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
{
bool single_use = true;
do {
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (!has_single_use (name))
{
propagation source. Returns true if so, otherwise false. */
static bool
-can_propagate_from (gimple def_stmt)
+can_propagate_from (gimple *def_stmt)
{
gcc_assert (is_gimple_assign (def_stmt));
remove_prop_source_from_use (tree name)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
bool cfg_changed = false;
do {
routines that deal with gimple exclusively . */
static tree
-rhs_to_tree (tree type, gimple stmt)
+rhs_to_tree (tree type, gimple *stmt)
{
location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
considered simplified. */
static tree
-combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
+combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
were no simplifying combines. */
static tree
-forward_propagate_into_comparison_1 (gimple stmt,
+forward_propagate_into_comparison_1 (gimple *stmt,
enum tree_code code, tree type,
tree op0, tree op1)
{
simplify comparisons against constants. */
if (TREE_CODE (op0) == SSA_NAME)
{
- gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
+ gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
if (def_stmt && can_propagate_from (def_stmt))
{
enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
/* If that wasn't successful, try the second operand. */
if (TREE_CODE (op1) == SSA_NAME)
{
- gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
+ gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
if (def_stmt && can_propagate_from (def_stmt))
{
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
static int
forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree tmp;
bool cfg_changed = false;
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
static bool
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
tree tmp = NULL_TREE;
tree cond = gimple_assign_rhs1 (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
{
enum tree_code def_code;
tree name = cond;
- gimple def_stmt = get_prop_source_stmt (name, true, NULL);
+ gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
if (!def_stmt || !can_propagate_from (def_stmt))
return 0;
relevant data structures to match. */
static void
-tidy_after_forward_propagate_addr (gimple stmt)
+tidy_after_forward_propagate_addr (gimple *stmt)
{
/* We may have turned a trapping insn into a non-trapping insn. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
bool single_use_p)
{
tree lhs, rhs, rhs2, array_ref;
- gimple use_stmt = gsi_stmt (*use_stmt_gsi);
+ gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
enum tree_code rhs_code;
bool res = true;
forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
{
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
bool all = true;
bool single_use_p = parent_single_use_p && has_single_use (name);
tree cond = gimple_switch_index (stmt);
if (TREE_CODE (cond) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (cond);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
if (gimple_assign_cast_p (def_stmt))
{
tree def = gimple_assign_rhs1 (def_stmt);
{
tree p = i ? p1 : p2;
tree off = size_zero_node;
- gimple stmt;
+ gimple *stmt;
enum tree_code code;
/* For each of p1 and p2 we need to iterate at least
static bool
simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
{
- gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
+ gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
tree vuse = gimple_vuse (stmt2);
if (vuse == NULL)
return false;
tree val2 = gimple_call_arg (stmt2, 1);
tree len2 = gimple_call_arg (stmt2, 2);
tree diff, vdef, new_str_cst;
- gimple use_stmt;
+ gimple *use_stmt;
unsigned int ptr1_align;
unsigned HOST_WIDE_INT src_len;
char *src_buf;
static inline void
defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
{
- gimple def;
+ gimple *def;
enum tree_code code1;
tree arg11;
tree arg21;
static bool
simplify_rotate (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree arg[2], rtype, rotcnt = NULL_TREE;
tree def_arg1[2], def_arg2[2];
enum tree_code def_code[2];
tree lhs;
int i;
bool swapped_p = false;
- gimple g;
+ gimple *g;
arg[0] = gimple_assign_rhs1 (stmt);
arg[1] = gimple_assign_rhs2 (stmt);
static bool
simplify_bitfield_ref (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op, op0, op1, op2;
tree elem_type;
unsigned idx, n, size;
static int
simplify_permutation (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op0, op1, op2, op3, arg0, arg1;
enum tree_code code;
bool single_use_op0 = false;
{
enum tree_code code2;
- gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
+ gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
if (!def_stmt2 || !can_propagate_from (def_stmt2))
return 0;
static bool
simplify_vector_constructor (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op, op2, orig, type, elem_type;
unsigned elem_size, nelts, i;
enum tree_code code;
lattice.quick_grow_cleared (num_ssa_names);
int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
int postorder_num = inverted_post_order_compute (postorder);
- auto_vec<gimple, 4> to_fixup;
+ auto_vec<gimple *, 4> to_fixup;
to_purge = BITMAP_ALLOC (NULL);
for (int i = 0; i < postorder_num; ++i)
{
Note we update GSI within the loop as necessary. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs, rhs;
enum tree_code code;
bool rewrite = true;
FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (!is_gimple_assign (use_stmt)
}
if (rewrite)
{
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
if (is_gimple_debug (use_stmt))
tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
TREE_TYPE (TREE_TYPE (rhs)),
unshare_expr (rhs));
- gimple new_stmt
+ gimple *new_stmt
= gimple_build_assign (gimple_assign_lhs (use_stmt),
new_rhs);
/* Rewrite stores of a single-use complex build expression
to component-wise stores. */
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (single_imm_use (lhs, &use_p, &use_stmt)
&& gimple_store_p (use_stmt)
&& !gimple_has_volatile_ops (use_stmt)
tree new_lhs = build1 (REALPART_EXPR,
TREE_TYPE (TREE_TYPE (use_lhs)),
unshare_expr (use_lhs));
- gimple new_stmt = gimple_build_assign (new_lhs, rhs);
+ gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
location_t loc = gimple_location (use_stmt);
gimple_set_location (new_stmt, loc);
gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
Note we update GSI within the loop as necessary. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
- gimple orig_stmt = stmt;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *orig_stmt = stmt;
bool changed = false;
bool was_noreturn = (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt));
fixup by visiting a dominating now noreturn call first. */
while (!to_fixup.is_empty ())
{
- gimple stmt = to_fixup.pop ();
+ gimple *stmt = to_fixup.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Fixing up noreturn call ");
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (TREE_CODE (candidate) == SSA_NAME
&& has_single_use (candidate))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
if (is_gimple_assign (def_stmt)
&& CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
{
static bool
recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
{
- gimple stmt;
+ gimple *stmt;
/* Get at the definition of the result of the bit test. */
if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
&& TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
{
- gimple tmp;
+ gimple *tmp;
/* Both arguments of the BIT_AND_EXPR can be the single-bit
specifying expression. */
static bool
recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
{
- gimple stmt;
+ gimple *stmt;
/* Get at the definition of the result of the bit test. */
if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
basic_block outer_cond_bb, bool outer_inv, bool result_inv)
{
gimple_stmt_iterator gsi;
- gimple inner_stmt, outer_stmt;
+ gimple *inner_stmt, *outer_stmt;
gcond *inner_cond, *outer_cond;
tree name1, name2, bit1, bit2, bits1, bits2;
for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
{
basic_block bb = bbs[i];
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
ssa_op_iter i;
tree op;
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_DEF)
{
unsigned i;
tree b;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
continue;
/* Walk the statements. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree b = gimple_block (stmt);
if (is_gimple_debug (stmt))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree b = gimple_block (stmt);
if (gimple_clobber_p (stmt))
set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
{
int p;
- gimple stmt;
+ gimple *stmt;
use_operand_p use;
basic_block def_bb = NULL;
imm_use_iterator imm_iter;
add it to the list of live on entry blocks. */
FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
{
- gimple use_stmt = USE_STMT (use);
+ gimple *use_stmt = USE_STMT (use);
basic_block add_block = NULL;
if (gimple_code (use_stmt) == GIMPLE_PHI)
{
unsigned i;
tree var;
- gimple stmt;
+ gimple *stmt;
basic_block bb;
edge e;
int num;
int *limit)
{
gimple_stmt_iterator bsi;
- gimple last;
+ gimple *last;
/* Do not copy one block more than once (we do not really want to do
loop peeling here). */
static bool
do_while_loop_p (struct loop *loop)
{
- gimple stmt = last_stmt (loop->latch);
+ gimple *stmt = last_stmt (loop->latch);
/* If the latch of the loop is not empty, it is not a do-while loop. */
if (stmt
!gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) == GIMPLE_COND)
gimple_set_no_warning (stmt, true);
else if (is_gimple_assign (stmt))
unsigned cost; /* Cost of the computation performed by the
statement. */
- vec<gimple> depends; /* Vector of statements that must be also
+ vec<gimple *> depends; /* Vector of statements that must be also
hoisted out of the loop when this statement
is hoisted; i.e. those that define the
operands of the statement and are inside of
/* Maps statements to their lim_aux_data. */
-static hash_map<gimple, lim_aux_data *> *lim_aux_data_map;
+static hash_map<gimple *, lim_aux_data *> *lim_aux_data_map;
/* Description of a memory reference location. */
struct mem_ref_loc
{
tree *ref; /* The reference itself. */
- gimple stmt; /* The statement in that it occurs. */
+ gimple *stmt; /* The statement in that it occurs. */
};
#define MEM_ANALYZABLE(REF) ((REF)->id != UNANALYZABLE_MEM_ID)
static struct lim_aux_data *
-init_lim_data (gimple stmt)
+init_lim_data (gimple *stmt)
{
lim_aux_data *p = XCNEW (struct lim_aux_data);
lim_aux_data_map->put (stmt, p);
}
static struct lim_aux_data *
-get_lim_data (gimple stmt)
+get_lim_data (gimple *stmt)
{
lim_aux_data **p = lim_aux_data_map->get (stmt);
if (!p)
}
static void
-clear_lim_data (gimple stmt)
+clear_lim_data (gimple *stmt)
{
lim_aux_data **p = lim_aux_data_map->get (stmt);
if (!p)
Otherwise return MOVE_IMPOSSIBLE. */
enum move_pos
-movement_possibility (gimple stmt)
+movement_possibility (gimple *stmt)
{
tree lhs;
enum move_pos ret = MOVE_POSSIBLE;
static struct loop *
outermost_invariant_loop (tree def, struct loop *loop)
{
- gimple def_stmt;
+ gimple *def_stmt;
basic_block def_bb;
struct loop *max_loop;
struct lim_aux_data *lim_data;
add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
bool add_cost)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (def);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (def);
basic_block def_bb = gimple_bb (def_stmt);
struct loop *max_loop;
struct lim_aux_data *def_data;
are just ad-hoc constants, similar to costs for inlining. */
static unsigned
-stmt_cost (gimple stmt)
+stmt_cost (gimple *stmt)
{
/* Always try to create possibilities for unswitching. */
if (gimple_code (stmt) == GIMPLE_COND
it is a store or load. Otherwise, returns NULL. */
static tree *
-simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
+simple_mem_ref_in_stmt (gimple *stmt, bool *is_store)
{
tree *lhs, *rhs;
/* Returns the memory reference contained in STMT. */
static im_mem_ref *
-mem_ref_in_stmt (gimple stmt)
+mem_ref_in_stmt (gimple *stmt)
{
bool store;
tree *mem = simple_mem_ref_in_stmt (stmt, &store);
is defined in, and true otherwise. */
static bool
-determine_max_movement (gimple stmt, bool must_preserve_exec)
+determine_max_movement (gimple *stmt, bool must_preserve_exec)
{
basic_block bb = gimple_bb (stmt);
struct loop *loop = bb->loop_father;
if (!add_dependency (val, lim_data, loop, false))
return false;
- gimple def_stmt = SSA_NAME_DEF_STMT (val);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (val);
if (gimple_bb (def_stmt)
&& gimple_bb (def_stmt)->loop_father == loop)
{
if (gimple_phi_num_args (phi) > 1)
{
basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
- gimple cond;
+ gimple *cond;
if (gsi_end_p (gsi_last_bb (dom)))
return false;
cond = gsi_stmt (gsi_last_bb (dom));
operands) is hoisted at least out of the loop LEVEL. */
static void
-set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
+set_level (gimple *stmt, struct loop *orig_loop, struct loop *level)
{
struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
struct lim_aux_data *lim_data;
- gimple dep_stmt;
+ gimple *dep_stmt;
unsigned i;
stmt_loop = find_common_loop (orig_loop, stmt_loop);
information to set it more sanely. */
static void
-set_profitable_level (gimple stmt)
+set_profitable_level (gimple *stmt)
{
set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
}
/* Returns true if STMT is a call that has side effects. */
static bool
-nonpure_call_p (gimple stmt)
+nonpure_call_p (gimple *stmt)
{
if (gimple_code (stmt) != GIMPLE_CALL)
return false;
/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
-static gimple
+static gimple *
rewrite_reciprocal (gimple_stmt_iterator *bsi)
{
gassign *stmt, *stmt1, *stmt2;
/* Check if the pattern at *BSI is a bittest of the form
(A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
-static gimple
+static gimple *
rewrite_bittest (gimple_stmt_iterator *bsi)
{
gassign *stmt;
- gimple stmt1;
+ gimple *stmt1;
gassign *stmt2;
- gimple use_stmt;
+ gimple *use_stmt;
gcond *cond_stmt;
tree lhs, name, t, a, b;
use_operand_p use;
{
enum move_pos pos;
gimple_stmt_iterator bsi;
- gimple stmt;
+ gimple *stmt;
bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
struct lim_aux_data *lim_data;
else
{
basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
- gimple cond = gsi_stmt (gsi_last_bb (dom));
+ gimple *cond = gsi_stmt (gsi_last_bb (dom));
tree arg0 = NULL_TREE, arg1 = NULL_TREE, t;
/* Get the PHI arguments corresponding to the true and false
edges of COND. */
{
edge e;
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
lim_data = get_lim_data (stmt);
if (lim_data == NULL)
static void
force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
{
- gimple stmt;
+ gimple *stmt;
if (!op
|| is_gimple_min_invariant (op))
description REF. The reference occurs in statement STMT. */
static void
-record_mem_ref_loc (im_mem_ref *ref, gimple stmt, tree *loc)
+record_mem_ref_loc (im_mem_ref *ref, gimple *stmt, tree *loc)
{
mem_ref_loc aref;
aref.stmt = stmt;
well. */
static void
-gather_mem_refs_stmt (struct loop *loop, gimple stmt)
+gather_mem_refs_stmt (struct loop *loop, gimple *stmt)
{
tree *mem = NULL;
hashval_t hash;
bool loop_has_only_one_exit;
edge then_old_edge, orig_ex = ex;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
struct prev_flag_edges *prev_edges = (struct prev_flag_edges *) ex->aux;
bool irr = ex->flags & EDGE_IRREDUCIBLE_LOOP;
&& gimple_assign_lhs_ptr (loc->stmt) == loc->ref)
{
gimple_stmt_iterator gsi = gsi_for_stmt (loc->stmt);
- gimple stmt = gimple_build_assign (flag, boolean_true_node);
+ gimple *stmt = gimple_build_assign (flag, boolean_true_node);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
}
return false;
bitmap_obstack_initialize (&lim_bitmap_obstack);
gcc_obstack_init (&mem_ref_obstack);
- lim_aux_data_map = new hash_map<gimple, lim_aux_data *>;
+ lim_aux_data_map = new hash_map<gimple *, lim_aux_data *>;
if (flag_tm)
compute_transaction_bits ();
/* Return true if OP in STMT will be constant after peeling LOOP. */
static bool
-constant_after_peeling (tree op, gimple stmt, struct loop *loop)
+constant_after_peeling (tree op, gimple *stmt, struct loop *loop)
{
affine_iv iv;
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
int num = estimate_num_insns (stmt, &eni_size_weights);
bool likely_eliminated = false;
bool likely_eliminated_last = false;
basic_block bb = path.pop ();
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_CALL)
{
int flags = gimple_call_flags (stmt);
propagate_into_all_uses (tree ssa_name, tree val)
{
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, ssa_name)
{
/* Look for assignments to SSA names with constant RHS. */
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
if (is_gimple_assign (stmt)
unsigned sub_id; /* The id of the sub use. */
enum use_type type; /* Type of the use. */
struct iv *iv; /* The induction variable it is based on. */
- gimple stmt; /* Statement in that it occurs. */
+ gimple *stmt; /* Statement in that it occurs. */
tree *op_p; /* The place where it occurs. */
bitmap related_cands; /* The set of "related" iv candidates, plus the common
important ones. */
bool important; /* Whether this is an "important" candidate, i.e. such
that it should be considered by all uses. */
ENUM_BITFIELD(iv_position) pos : 8; /* Where it is computed. */
- gimple incremented_at;/* For original biv, the statement where it is
+ gimple *incremented_at;/* For original biv, the statement where it is
incremented. */
tree var_before; /* The variable used for it before increment. */
tree var_after; /* The variable used for it after increment. */
emitted in LOOP. */
static bool
-stmt_after_ip_normal_pos (struct loop *loop, gimple stmt)
+stmt_after_ip_normal_pos (struct loop *loop, gimple *stmt)
{
basic_block bb = ip_normal_pos (loop), sbb = gimple_bb (stmt);
if the positions are identical. */
static bool
-stmt_after_inc_pos (struct iv_cand *cand, gimple stmt, bool true_if_equal)
+stmt_after_inc_pos (struct iv_cand *cand, gimple *stmt, bool true_if_equal)
{
basic_block cand_bb = gimple_bb (cand->incremented_at);
basic_block stmt_bb = gimple_bb (stmt);
CAND is incremented in LOOP. */
static bool
-stmt_after_increment (struct loop *loop, struct iv_cand *cand, gimple stmt)
+stmt_after_increment (struct loop *loop, struct iv_cand *cand, gimple *stmt)
{
switch (cand->pos)
{
mark_bivs (struct ivopts_data *data)
{
gphi *phi;
- gimple def;
+ gimple *def;
tree var;
struct iv *iv, *incr_iv;
struct loop *loop = data->current_loop;
parameters to IV. */
static bool
-find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
+find_givs_in_stmt_scev (struct ivopts_data *data, gimple *stmt, affine_iv *iv)
{
tree lhs, stop;
struct loop *loop = data->current_loop;
/* Finds general ivs in statement STMT. */
static void
-find_givs_in_stmt (struct ivopts_data *data, gimple stmt)
+find_givs_in_stmt (struct ivopts_data *data, gimple *stmt)
{
affine_iv iv;
static struct iv_use *
record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
- gimple stmt, enum use_type use_type, tree addr_base = NULL,
+ gimple *stmt, enum use_type use_type, tree addr_base = NULL,
unsigned HOST_WIDE_INT addr_offset = 0)
{
struct iv_use *use = XCNEW (struct iv_use);
static struct iv_use *
record_sub_use (struct ivopts_data *data, tree *use_p,
- struct iv *iv, gimple stmt, enum use_type use_type,
+ struct iv *iv, gimple *stmt, enum use_type use_type,
tree addr_base, unsigned HOST_WIDE_INT addr_offset,
unsigned int id_group)
{
find_interesting_uses_op (struct ivopts_data *data, tree op)
{
struct iv *iv;
- gimple stmt;
+ gimple *stmt;
struct iv_use *use;
if (TREE_CODE (op) != SSA_NAME)
condition and false is returned. */
static bool
-extract_cond_operands (struct ivopts_data *data, gimple stmt,
+extract_cond_operands (struct ivopts_data *data, gimple *stmt,
tree **control_var, tree **bound,
struct iv **iv_var, struct iv **iv_bound)
{
records it. */
static void
-find_interesting_uses_cond (struct ivopts_data *data, gimple stmt)
+find_interesting_uses_cond (struct ivopts_data *data, gimple *stmt)
{
tree *var_p, *bound_p;
struct iv *var_iv;
unsigned i, n;
tree e2, e1;
enum tree_code code;
- gimple stmt;
+ gimple *stmt;
if (expr == NULL_TREE)
return NULL;
struct ifs_ivopts_data
{
struct ivopts_data *ivopts_data;
- gimple stmt;
+ gimple *stmt;
tree step;
};
static struct iv_use *
record_group_use (struct ivopts_data *data, tree *use_p,
- struct iv *iv, gimple stmt, enum use_type use_type)
+ struct iv *iv, gimple *stmt, enum use_type use_type)
{
unsigned int i;
struct iv_use *use;
/* Finds addresses in *OP_P inside STMT. */
static void
-find_interesting_uses_address (struct ivopts_data *data, gimple stmt, tree *op_p)
+find_interesting_uses_address (struct ivopts_data *data, gimple *stmt,
+ tree *op_p)
{
tree base = *op_p, step = size_zero_node;
struct iv *civ;
/* Finds and records invariants used in STMT. */
static void
-find_invariants_stmt (struct ivopts_data *data, gimple stmt)
+find_invariants_stmt (struct ivopts_data *data, gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use_p;
/* Finds interesting uses of induction variables in the statement STMT. */
static void
-find_interesting_uses_stmt (struct ivopts_data *data, gimple stmt)
+find_interesting_uses_stmt (struct ivopts_data *data, gimple *stmt)
{
struct iv *iv;
tree op, *lhs, *rhs;
static struct iv_cand *
add_candidate_1 (struct ivopts_data *data,
tree base, tree step, bool important, enum iv_position pos,
- struct iv_use *use, gimple incremented_at,
+ struct iv_use *use, gimple *incremented_at,
struct iv *orig_iv = NULL)
{
unsigned i;
static void
add_iv_candidate_for_biv (struct ivopts_data *data, struct iv *iv)
{
- gimple phi;
+ gimple *phi;
tree def;
struct iv_cand *cand;
/* Returns variable containing the value of candidate CAND at statement AT. */
static tree
-var_at_stmt (struct loop *loop, struct iv_cand *cand, gimple stmt)
+var_at_stmt (struct loop *loop, struct iv_cand *cand, gimple *stmt)
{
if (stmt_after_increment (loop, cand, stmt))
return cand->var_after;
static bool
get_computation_aff (struct loop *loop,
- struct iv_use *use, struct iv_cand *cand, gimple at,
+ struct iv_use *use, struct iv_cand *cand, gimple *at,
struct aff_tree *aff)
{
tree ubase = use->iv->base;
static tree
get_computation_at (struct loop *loop,
- struct iv_use *use, struct iv_cand *cand, gimple at)
+ struct iv_use *use, struct iv_cand *cand, gimple *at)
{
aff_tree aff;
tree type = get_use_type (use);
static comp_cost
get_computation_cost_at (struct ivopts_data *data,
struct iv_use *use, struct iv_cand *cand,
- bool address_p, bitmap *depends_on, gimple at,
+ bool address_p, bitmap *depends_on, gimple *at,
bool *can_autoinc,
int *inv_expr_id)
{
stores it to VAL. */
static void
-cand_value_at (struct loop *loop, struct iv_cand *cand, gimple at, tree niter,
+cand_value_at (struct loop *loop, struct iv_cand *cand, gimple *at, tree niter,
aff_tree *val)
{
aff_tree step, delta, nit;
if (TREE_CODE (base) == SSA_NAME)
{
- gimple stmt = SSA_NAME_DEF_STMT (base);
+ gimple *stmt = SSA_NAME_DEF_STMT (base);
if (gimple_code (stmt) != GIMPLE_ASSIGN)
return false;
adjust_iv_update_pos (struct iv_cand *cand, struct iv_use *use)
{
tree var_after;
- gimple iv_update, stmt;
+ gimple *iv_update, *stmt;
basic_block bb;
gimple_stmt_iterator gsi, gsi_iv;
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
int count = 0;
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, def)
for (i = 0; i < num_nodes; i++)
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt)
&& !is_inexpensive_builtin (gimple_call_fndecl (stmt)))
return true;
#ifdef ENABLE_CHECKING
/* Check that at least one of the edges entering the EXIT block exits
the loop, or a superloop of that loop, that VAR is defined in. */
- gimple def_stmt = SSA_NAME_DEF_STMT (var);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (var);
basic_block def_bb = gimple_bb (def_stmt);
FOR_EACH_EDGE (e, ei, exit->preds)
{
names are used to USE_BLOCKS, and the ssa names themselves to NEED_PHIS. */
static void
-find_uses_to_rename_stmt (gimple stmt, bitmap *use_blocks, bitmap need_phis,
+find_uses_to_rename_stmt (gimple *stmt, bitmap *use_blocks, bitmap need_phis,
int use_flags)
{
ssa_op_iter iter;
static void
find_uses_to_rename_def (tree def, bitmap *use_blocks, bitmap need_phis)
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator imm_iter;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
/* FOR_EACH_SSA_TREE_OPERAND iterator does not allows
SSA_OP_VIRTUAL_DEFS only. */
if (def_flags == SSA_OP_VIRTUAL_DEFS)
static void
check_loop_closed_ssa_use (basic_block bb, tree use)
{
- gimple def;
+ gimple *def;
basic_block def_bb;
if (TREE_CODE (use) != SSA_NAME || virtual_operand_p (use))
/* Checks invariants of loop closed ssa form in statement STMT in BB. */
static void
-check_loop_closed_ssa_stmt (basic_block bb, gimple stmt)
+check_loop_closed_ssa_stmt (basic_block bb, gimple *stmt)
{
ssa_op_iter iter;
tree var;
basic_block
ip_normal_pos (struct loop *loop)
{
- gimple last;
+ gimple *last;
basic_block bb;
edge exit;
bool *insert_after)
{
basic_block bb = ip_normal_pos (loop), latch = ip_end_pos (loop);
- gimple last = last_stmt (latch);
+ gimple *last = last_stmt (latch);
if (!bb
|| (last && gimple_code (last) != GIMPLE_LABEL))
{
edge e;
tree c0, c1;
- gimple cond;
+ gimple *cond;
enum tree_code cmp;
if (!single_pred_p (bb))
edge e;
basic_block bb;
tree c0, c1;
- gimple cond;
+ gimple *cond;
enum tree_code cmp;
/* Get rid of unnecessary casts, but preserve the value of
unsigned i, n;
tree ret = NULL_TREE, e, ee, e1;
enum tree_code code;
- gimple stmt;
+ gimple *stmt;
if (expr == NULL_TREE)
return expr;
{
edge e;
basic_block bb;
- gimple stmt;
+ gimple *stmt;
tree cond;
int cnt = 0;
basic_block *body;
gimple_stmt_iterator bsi;
unsigned i;
- gimple call;
+ gimple *call;
if (exit != single_exit (loop))
return false;
struct tree_niter_desc *niter,
bool warn, bool every_iteration)
{
- gimple last;
+ gimple *last;
gcond *stmt;
tree type;
tree op0, op1;
static gphi *
chain_of_csts_start (struct loop *loop, tree x)
{
- gimple stmt = SSA_NAME_DEF_STMT (x);
+ gimple *stmt = SSA_NAME_DEF_STMT (x);
tree use;
basic_block bb = gimple_bb (stmt);
enum tree_code code;
static tree
get_val_for (tree x, tree base)
{
- gimple stmt;
+ gimple *stmt;
gcc_checking_assert (is_gimple_min_invariant (base));
tree acnd;
tree op[2], val[2], next[2], aval[2];
gphi *phi;
- gimple cond;
+ gimple *cond;
unsigned i, j;
enum tree_code cmp;
an assignment statement STMT. */
static widest_int
-derive_constant_upper_bound_assign (gimple stmt)
+derive_constant_upper_bound_assign (gimple *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
{
tree subtype, maxt;
widest_int bnd, max, mmax, cst;
- gimple stmt;
+ gimple *stmt;
if (INTEGRAL_TYPE_P (type))
maxt = TYPE_MAX_VALUE (type);
static void
do_warn_aggressive_loop_optimizations (struct loop *loop,
- widest_int i_bound, gimple stmt)
+ widest_int i_bound, gimple *stmt)
{
/* Don't warn if the loop doesn't have known constant bound. */
if (!loop->nb_iterations
if (e == NULL)
return;
- gimple estmt = last_stmt (e->src);
+ gimple *estmt = last_stmt (e->src);
if (warning_at (gimple_location (stmt), OPT_Waggressive_loop_optimizations,
"iteration %E invokes undefined behavior",
wide_int_to_tree (TREE_TYPE (loop->nb_iterations),
static void
record_estimate (struct loop *loop, tree bound, const widest_int &i_bound,
- gimple at_stmt, bool is_exit, bool realistic, bool upper)
+ gimple *at_stmt, bool is_exit, bool realistic, bool upper)
{
widest_int delta;
UPPER is true if we are sure the induction variable does not wrap. */
static void
-record_nonwrapping_iv (struct loop *loop, tree base, tree step, gimple stmt,
+record_nonwrapping_iv (struct loop *loop, tree base, tree step, gimple *stmt,
tree low, tree high, bool realistic, bool upper)
{
tree niter_bound, extreme, delta;
struct ilb_data
{
struct loop *loop;
- gimple stmt;
+ gimple *stmt;
};
static bool
STMT is guaranteed to be executed in every iteration of LOOP.*/
static void
-infer_loop_bounds_from_ref (struct loop *loop, gimple stmt, tree ref)
+infer_loop_bounds_from_ref (struct loop *loop, gimple *stmt, tree ref)
{
struct ilb_data data;
executed in every iteration of LOOP. */
static void
-infer_loop_bounds_from_array (struct loop *loop, gimple stmt)
+infer_loop_bounds_from_array (struct loop *loop, gimple *stmt)
{
if (is_gimple_assign (stmt))
{
that pointer arithmetics in STMT does not overflow. */
static void
-infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple stmt)
+infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple *stmt)
{
tree def, base, step, scev, type, low, high;
tree var, ptr;
that signed arithmetics in STMT does not overflow. */
static void
-infer_loop_bounds_from_signedness (struct loop *loop, gimple stmt)
+infer_loop_bounds_from_signedness (struct loop *loop, gimple *stmt)
{
tree def, base, step, scev, type, low, high;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
infer_loop_bounds_from_array (loop, stmt);
static void
maybe_lower_iteration_bound (struct loop *loop)
{
- hash_set<gimple> *not_executed_last_iteration = NULL;
+ hash_set<gimple *> *not_executed_last_iteration = NULL;
struct nb_iter_bound *elt;
bool found_exit = false;
vec<basic_block> queue = vNULL;
&& wi::ltu_p (elt->bound, loop->nb_iterations_upper_bound))
{
if (!not_executed_last_iteration)
- not_executed_last_iteration = new hash_set<gimple>;
+ not_executed_last_iteration = new hash_set<gimple *>;
not_executed_last_iteration->add (elt->stmt);
}
}
/* Loop for possible exits and statements bounding the execution. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (not_executed_last_iteration->contains (stmt))
{
stmt_found = true;
/* Returns true if statement S1 dominates statement S2. */
bool
-stmt_dominates_stmt_p (gimple s1, gimple s2)
+stmt_dominates_stmt_p (gimple *s1, gimple *s2)
{
basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
bounds computed by discover_iteration_bound_by_body_walk. */
static bool
-n_of_executions_at_most (gimple stmt,
+n_of_executions_at_most (gimple *stmt,
struct nb_iter_bound *niter_bound,
tree niter)
{
static bool
loop_exits_before_overflow (tree base, tree step,
- gimple at_stmt, struct loop *loop)
+ gimple *at_stmt, struct loop *loop)
{
widest_int niter;
struct control_iv *civ;
bool
scev_probably_wraps_p (tree base, tree step,
- gimple at_stmt, struct loop *loop,
+ gimple *at_stmt, struct loop *loop,
bool use_overflow_semantics)
{
/* FIXME: We really need something like
extern bool max_stmt_executions (struct loop *, widest_int *);
extern bool estimated_stmt_executions (struct loop *, widest_int *);
extern void estimate_numbers_of_iterations (void);
-extern bool stmt_dominates_stmt_p (gimple, gimple);
+extern bool stmt_dominates_stmt_p (gimple *, gimple *);
extern bool nowrap_type_p (tree);
-extern bool scev_probably_wraps_p (tree, tree, gimple, struct loop *, bool);
+extern bool scev_probably_wraps_p (tree, tree, gimple *, struct loop *, bool);
extern void free_loop_control_ivs (struct loop *);
extern void free_numbers_of_iterations_estimates_loop (struct loop *);
extern void free_numbers_of_iterations_estimates (void);
struct mem_ref
{
- gimple stmt; /* Statement in that the reference appears. */
+ gimple *stmt; /* Statement in that the reference appears. */
tree mem; /* The reference. */
HOST_WIDE_INT delta; /* Constant offset of the reference. */
struct mem_ref_group *group; /* The group of references it belongs to. */
WRITE_P. The reference occurs in statement STMT. */
static void
-record_ref (struct mem_ref_group *group, gimple stmt, tree mem,
+record_ref (struct mem_ref_group *group, gimple *stmt, tree mem,
HOST_WIDE_INT delta, bool write_p)
{
struct mem_ref **aref;
struct ar_data
{
struct loop *loop; /* Loop of the reference. */
- gimple stmt; /* Statement of the reference. */
+ gimple *stmt; /* Statement of the reference. */
tree *step; /* Step of the memory reference. */
HOST_WIDE_INT *delta; /* Offset of the memory reference. */
};
static bool
analyze_ref (struct loop *loop, tree *ref_p, tree *base,
tree *step, HOST_WIDE_INT *delta,
- gimple stmt)
+ gimple *stmt)
{
struct ar_data ar_data;
tree off;
static bool
gather_memory_references_ref (struct loop *loop, struct mem_ref_group **refs,
- tree ref, bool write_p, gimple stmt)
+ tree ref, bool write_p, gimple *stmt)
{
tree base, step;
HOST_WIDE_INT delta;
basic_block bb;
unsigned i;
gimple_stmt_iterator bsi;
- gimple stmt;
+ gimple *stmt;
tree lhs, rhs;
struct mem_ref_group *refs = NULL;
static tree
tree_may_unswitch_on (basic_block bb, struct loop *loop)
{
- gimple last, def;
+ gimple *last, *def;
gcond *stmt;
tree cond, use;
basic_block def_bb;
simplify_using_entry_checks (struct loop *loop, tree cond)
{
edge e = loop_preheader_edge (loop);
- gimple stmt;
+ gimple *stmt;
while (1)
{
struct loop *nloop;
unsigned i, found;
tree cond = NULL_TREE;
- gimple stmt;
+ gimple *stmt;
bool changed = false;
i = 0;
if (EDGE_COUNT (b->succs) == 2)
{
- gimple stmt = last_stmt (b);
+ gimple *stmt = last_stmt (b);
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
{
/* Returns the loop of the statement STMT. */
static inline struct loop *
-loop_containing_stmt (gimple stmt)
+loop_containing_stmt (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
if (!bb)
/* If non-NULL, the GIMPLE_ASSIGN for a reciprocal computation that
was inserted in BB. */
- gimple recip_def_stmt;
+ gimple *recip_def_stmt;
/* Pointer to a list of "struct occurrence"s for blocks dominated
by BB. */
/* Return whether USE_STMT is a floating-point division by DEF. */
static inline bool
-is_division_by (gimple use_stmt, tree def)
+is_division_by (gimple *use_stmt, tree def)
{
return is_gimple_assign (use_stmt)
&& gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
static inline void
replace_reciprocal (use_operand_p use_p)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
basic_block bb = gimple_bb (use_stmt);
struct occurrence *occ = (struct occurrence *) bb->aux;
FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_division_by (use_stmt, def))
{
register_division_in (gimple_bb (use_stmt));
threshold = targetm.min_divisions_for_recip_mul (TYPE_MODE (TREE_TYPE (def)));
if (count >= threshold)
{
- gimple use_stmt;
+ gimple *use_stmt;
for (occ = occ_head; occ; occ = occ->next)
{
compute_merit (occ);
for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_has_lhs (stmt)
&& (def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF)) != NULL
for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
if (is_gimple_assign (stmt)
&& gimple_assign_rhs_code (stmt) == RDIV_EXPR)
{
tree arg1 = gimple_assign_rhs2 (stmt);
- gimple stmt1;
+ gimple *stmt1;
if (TREE_CODE (arg1) != SSA_NAME)
continue;
fail = false;
FOR_EACH_IMM_USE_FAST (use_p, ui, arg1)
{
- gimple stmt2 = USE_STMT (use_p);
+ gimple *stmt2 = USE_STMT (use_p);
if (is_gimple_debug (stmt2))
continue;
if (!is_gimple_assign (stmt2)
statements in the vector. */
static bool
-maybe_record_sincos (vec<gimple> *stmts,
- basic_block *top_bb, gimple use_stmt)
+maybe_record_sincos (vec<gimple *> *stmts,
+ basic_block *top_bb, gimple *use_stmt)
{
basic_block use_bb = gimple_bb (use_stmt);
if (*top_bb
gimple_stmt_iterator gsi;
imm_use_iterator use_iter;
tree fndecl, res, type;
- gimple def_stmt, use_stmt, stmt;
+ gimple *def_stmt, *use_stmt, *stmt;
int seen_cos = 0, seen_sin = 0, seen_cexpi = 0;
- auto_vec<gimple> stmts;
+ auto_vec<gimple *> stmts;
basic_block top_bb = NULL;
int i;
bool cfg_changed = false;
const char *name, enum tree_code code, tree arg0)
{
tree result = make_temp_ssa_name (type, NULL, name);
- gimple stmt = gimple_build_assign (result, build1 (code, type, arg0));
+ gimple *stmt = gimple_build_assign (result, build1 (code, type, arg0));
gimple_set_location (stmt, loc);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
return result;
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
/* Only the last stmt in a bb could throw, no need to call
statement STMT. */
static inline bool
-verify_symbolic_number_p (struct symbolic_number *n, gimple stmt)
+verify_symbolic_number_p (struct symbolic_number *n, gimple *stmt)
{
tree lhs_type;
accessed and the offset of the access from that base are recorded in N. */
bool
-find_bswap_or_nop_load (gimple stmt, tree ref, struct symbolic_number *n)
+find_bswap_or_nop_load (gimple *stmt, tree ref, struct symbolic_number *n)
{
/* Leaf node is an array or component ref. Memorize its base and
offset from base to compare to other such leaf node. */
symbolic number N1 and N2 whose source statements are respectively
SOURCE_STMT1 and SOURCE_STMT2. */
-static gimple
-perform_symbolic_merge (gimple source_stmt1, struct symbolic_number *n1,
- gimple source_stmt2, struct symbolic_number *n2,
+static gimple *
+perform_symbolic_merge (gimple *source_stmt1, struct symbolic_number *n1,
+ gimple *source_stmt2, struct symbolic_number *n2,
struct symbolic_number *n)
{
int i, size;
uint64_t mask;
- gimple source_stmt;
+ gimple *source_stmt;
struct symbolic_number *n_start;
/* Sources are different, cancel bswap if they are not memory location with
rhs's first tree is the expression of the source operand and NULL
otherwise. */
-static gimple
-find_bswap_or_nop_1 (gimple stmt, struct symbolic_number *n, int limit)
+static gimple *
+find_bswap_or_nop_1 (gimple *stmt, struct symbolic_number *n, int limit)
{
enum tree_code code;
tree rhs1, rhs2 = NULL;
- gimple rhs1_stmt, rhs2_stmt, source_stmt1;
+ gimple *rhs1_stmt, *rhs2_stmt, *source_stmt1;
enum gimple_rhs_class rhs_class;
if (!limit || !is_gimple_assign (stmt))
if (rhs_class == GIMPLE_BINARY_RHS)
{
struct symbolic_number n1, n2;
- gimple source_stmt, source_stmt2;
+ gimple *source_stmt, *source_stmt2;
if (code != BIT_IOR_EXPR)
return NULL;
function returns a stmt whose rhs's first tree is the source
expression. */
-static gimple
-find_bswap_or_nop (gimple stmt, struct symbolic_number *n, bool *bswap)
+static gimple *
+find_bswap_or_nop (gimple *stmt, struct symbolic_number *n, bool *bswap)
{
/* The number which the find_bswap_or_nop_1 result should match in order
to have a full byte swap. The number is shifted to the right
uint64_t cmpxchg = CMPXCHG;
uint64_t cmpnop = CMPNOP;
- gimple source_stmt;
+ gimple *source_stmt;
int limit;
/* The last parameter determines the depth search limit. It usually
changing of basic block. */
static bool
-bswap_replace (gimple cur_stmt, gimple src_stmt, tree fndecl, tree bswap_type,
- tree load_type, struct symbolic_number *n, bool bswap)
+bswap_replace (gimple *cur_stmt, gimple *src_stmt, tree fndecl,
+ tree bswap_type, tree load_type, struct symbolic_number *n,
+ bool bswap)
{
gimple_stmt_iterator gsi;
tree src, tmp, tgt;
- gimple bswap_stmt;
+ gimple *bswap_stmt;
gsi = gsi_for_stmt (cur_stmt);
src = gimple_assign_rhs1 (src_stmt);
gimple_stmt_iterator gsi_ins = gsi_for_stmt (src_stmt);
tree addr_expr, addr_tmp, val_expr, val_tmp;
tree load_offset_ptr, aligned_load_type;
- gimple addr_stmt, load_stmt;
+ gimple *addr_stmt, *load_stmt;
unsigned align;
HOST_WIDE_INT load_offset = 0;
/* Convert the src expression if necessary. */
if (!useless_type_conversion_p (TREE_TYPE (tmp), bswap_type))
{
- gimple convert_stmt;
+ gimple *convert_stmt;
tmp = make_temp_ssa_name (bswap_type, NULL, "bswapsrc");
convert_stmt = gimple_build_assign (tmp, NOP_EXPR, src);
/* Convert the result if necessary. */
if (!useless_type_conversion_p (TREE_TYPE (tgt), bswap_type))
{
- gimple convert_stmt;
+ gimple *convert_stmt;
tmp = make_temp_ssa_name (bswap_type, NULL, "bswapdst");
convert_stmt = gimple_build_assign (tgt, NOP_EXPR, tmp);
variant wouldn't be detected. */
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
{
- gimple src_stmt, cur_stmt = gsi_stmt (gsi);
+ gimple *src_stmt, *cur_stmt = gsi_stmt (gsi);
tree fndecl = NULL_TREE, bswap_type = NULL_TREE, load_type;
enum tree_code code;
struct symbolic_number n;
/* Return true if stmt is a type conversion operation that can be stripped
when used in a widening multiply operation. */
static bool
-widening_mult_conversion_strippable_p (tree result_type, gimple stmt)
+widening_mult_conversion_strippable_p (tree result_type, gimple *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
is_widening_mult_rhs_p (tree type, tree rhs, tree *type_out,
tree *new_rhs_out)
{
- gimple stmt;
+ gimple *stmt;
tree type1, rhs1;
if (TREE_CODE (rhs) == SSA_NAME)
and *TYPE2_OUT would give the operands of the multiplication. */
static bool
-is_widening_mult_p (gimple stmt,
+is_widening_mult_p (gimple *stmt,
tree *type1_out, tree *rhs1_out,
tree *type2_out, tree *rhs2_out)
{
value is true iff we converted the statement. */
static bool
-convert_mult_to_widen (gimple stmt, gimple_stmt_iterator *gsi)
+convert_mult_to_widen (gimple *stmt, gimple_stmt_iterator *gsi)
{
tree lhs, rhs1, rhs2, type, type1, type2;
enum insn_code handler;
is true iff we converted the statement. */
static bool
-convert_plusminus_to_widen (gimple_stmt_iterator *gsi, gimple stmt,
+convert_plusminus_to_widen (gimple_stmt_iterator *gsi, gimple *stmt,
enum tree_code code)
{
- gimple rhs1_stmt = NULL, rhs2_stmt = NULL;
- gimple conv1_stmt = NULL, conv2_stmt = NULL, conv_stmt;
+ gimple *rhs1_stmt = NULL, *rhs2_stmt = NULL;
+ gimple *conv1_stmt = NULL, *conv2_stmt = NULL, *conv_stmt;
tree type, type1, type2, optype;
tree lhs, rhs1, rhs2, mult_rhs1, mult_rhs2, add_rhs;
enum tree_code rhs1_code = ERROR_MARK, rhs2_code = ERROR_MARK;
operations. Returns true if successful and MUL_STMT should be removed. */
static bool
-convert_mult_to_fma (gimple mul_stmt, tree op1, tree op2)
+convert_mult_to_fma (gimple *mul_stmt, tree op1, tree op2)
{
tree mul_result = gimple_get_lhs (mul_stmt);
tree type = TREE_TYPE (mul_result);
- gimple use_stmt, neguse_stmt;
+ gimple *use_stmt, *neguse_stmt;
gassign *fma_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
if (TREE_CODE (rhs2) == SSA_NAME)
{
- gimple stmt2 = SSA_NAME_DEF_STMT (rhs2);
+ gimple *stmt2 = SSA_NAME_DEF_STMT (rhs2);
if (has_single_use (rhs2)
&& is_gimple_assign (stmt2)
&& gimple_assign_rhs_code (stmt2) == MULT_EXPR)
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
enum tree_code code;
if (is_gimple_assign (stmt))
compilations of multiple functions. */
static bitmap_obstack operands_bitmap_obstack;
-static void get_expr_operands (struct function *, gimple, tree *, int);
+static void get_expr_operands (struct function *, gimple *, tree *, int);
/* Number of functions with initialized ssa_operands. */
static int n_initialized = 0;
/* Adds OP to the list of uses of statement STMT after LAST. */
static inline use_optype_p
-add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
+add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
{
use_optype_p new_use;
TODO -- Make build_defs vec of tree *. */
static inline void
-finalize_ssa_defs (struct function *fn, gimple stmt)
+finalize_ssa_defs (struct function *fn, gimple *stmt)
{
/* Pre-pend the vdef we may have built. */
if (build_vdef != NULL_TREE)
TODO -- Make build_uses vec of tree *. */
static inline void
-finalize_ssa_uses (struct function *fn, gimple stmt)
+finalize_ssa_uses (struct function *fn, gimple *stmt)
{
unsigned new_i;
struct use_optype_d new_list;
/* Finalize all the build vectors, fill the new ones into INFO. */
static inline void
-finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
+finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
{
finalize_ssa_defs (fn, stmt);
finalize_ssa_uses (fn, stmt);
static void
add_virtual_operand (struct function *fn,
- gimple stmt ATTRIBUTE_UNUSED, int flags)
+ gimple *stmt ATTRIBUTE_UNUSED, int flags)
{
/* Add virtual operands to the stmt, unless the caller has specifically
requested not to do that (used when adding operands inside an
added to virtual operands. */
static void
-add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
+add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
{
tree var = *var_p;
static void
get_mem_ref_operands (struct function *fn,
- gimple stmt, tree expr, int flags)
+ gimple *stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
static void
-get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
+get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
{
if (!(flags & opf_no_vops)
&& TREE_THIS_VOLATILE (expr))
interpret the operands found. */
static void
-get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
+get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
{
enum tree_code code;
enum tree_code_class codeclass;
build_* operand vectors will have potential operands in them. */
static void
-parse_ssa_operands (struct function *fn, gimple stmt)
+parse_ssa_operands (struct function *fn, gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
size_t i, n, start = 0;
/* Create an operands cache for STMT. */
static void
-build_ssa_operands (struct function *fn, gimple stmt)
+build_ssa_operands (struct function *fn, gimple *stmt)
{
/* Initially assume that the statement has no volatile operands. */
gimple_set_has_volatile_ops (stmt, false);
/* Verifies SSA statement operands. */
DEBUG_FUNCTION bool
-verify_ssa_operands (struct function *fn, gimple stmt)
+verify_ssa_operands (struct function *fn, gimple *stmt)
{
use_operand_p use_p;
def_operand_p def_p;
the stmt operand lists. */
void
-free_stmt_operands (struct function *fn, gimple stmt)
+free_stmt_operands (struct function *fn, gimple *stmt)
{
use_optype_p uses = gimple_use_ops (stmt), last_use;
/* Get the operands of statement STMT. */
void
-update_stmt_operands (struct function *fn, gimple stmt)
+update_stmt_operands (struct function *fn, gimple *stmt)
{
/* If update_stmt_operands is called before SSA is initialized, do
nothing. */
to test the validity of the swap operation. */
void
-swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
+swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
{
tree op0, op1;
op0 = *exp0;
/* Unlink STMTs virtual definition from the IL by propagating its use. */
void
-unlink_stmt_vdef (gimple stmt)
+unlink_stmt_vdef (gimple *stmt)
{
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
tree vdef = gimple_vdef (stmt);
tree vuse = gimple_vuse (stmt);
use, if so, or to NULL otherwise. */
bool
single_imm_use_1 (const ssa_use_operand_t *head,
- use_operand_p *use_p, gimple *stmt)
+ use_operand_p *use_p, gimple **stmt)
{
ssa_use_operand_t *ptr, *single_use = 0;
extern bool ssa_operands_active (struct function *);
extern void init_ssa_operands (struct function *fn);
extern void fini_ssa_operands (struct function *);
-extern bool verify_ssa_operands (struct function *, gimple stmt);
-extern void free_stmt_operands (struct function *, gimple);
-extern void update_stmt_operands (struct function *, gimple);
-extern void swap_ssa_operands (gimple, tree *, tree *);
+extern bool verify_ssa_operands (struct function *, gimple *stmt);
+extern void free_stmt_operands (struct function *, gimple *);
+extern void update_stmt_operands (struct function *, gimple *);
+extern void swap_ssa_operands (gimple *, tree *, tree *);
extern bool verify_imm_links (FILE *f, tree var);
extern void dump_immediate_uses_for (FILE *file, tree var);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
-extern void unlink_stmt_vdef (gimple);
+extern void unlink_stmt_vdef (gimple *);
/* Return the tree pointed-to by USE. */
static inline tree
remove it from the IL. */
static void
-remove_stmt_or_phi (gimple stmt)
+remove_stmt_or_phi (gimple *stmt)
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
phi, NULL is returned. */
static tree
-get_rhs_or_phi_arg (gimple stmt)
+get_rhs_or_phi_arg (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
return degenerate_phi_result (as_a <gphi *> (stmt));
return the "lhs" of the node. */
static tree
-get_lhs_or_phi_result (gimple stmt)
+get_lhs_or_phi_result (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
return gimple_phi_result (stmt);
cleaned up after changing EH information on a statement. */
static bool
-propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs,
+propagate_rhs_into_lhs (gimple *stmt, tree lhs, tree rhs,
bitmap interesting_names, bitmap need_eh_cleanup)
{
bool cfg_altered = false;
{
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
bool all = true;
/* Dump details. */
not set or queried here, but passed along to children. */
static bool
-eliminate_const_or_copy (gimple stmt, bitmap interesting_names,
+eliminate_const_or_copy (gimple *stmt, bitmap interesting_names,
bitmap need_eh_cleanup)
{
tree lhs = get_lhs_or_phi_result (stmt);
interesting_names, need_eh_cleanup);
else
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
use_operand_p use_p;
/* For virtual operands we have to propagate into all uses as
edge, edge, gphi *, tree, tree);
static bool factor_out_conditional_conversion (edge, edge, gphi *, tree, tree);
static int value_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gimple *, tree, tree);
static bool minmax_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gimple *, tree, tree);
static bool abs_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gimple *, tree, tree);
static bool cond_store_replacement (basic_block, basic_block, edge, edge,
hash_set<tree> *);
static bool cond_if_else_store_replacement (basic_block, basic_block, basic_block);
static hash_set<tree> * get_non_trapping ();
-static void replace_phi_edge_with_variable (basic_block, edge, gimple, tree);
+static void replace_phi_edge_with_variable (basic_block, edge, gimple *, tree);
static void hoist_adjacent_loads (basic_block, basic_block,
basic_block, basic_block);
static bool gate_hoist_loads (void);
for (i = 0; i < n; i++)
{
- gimple cond_stmt;
+ gimple *cond_stmt;
gphi *phi;
basic_block bb1, bb2;
edge e1, e2;
static void
replace_phi_edge_with_variable (basic_block cond_block,
- edge e, gimple phi, tree new_tree)
+ edge e, gimple *phi, tree new_tree)
{
basic_block bb = gimple_bb (phi);
basic_block block_to_remove;
factor_out_conditional_conversion (edge e0, edge e1, gphi *phi,
tree arg0, tree arg1)
{
- gimple arg0_def_stmt = NULL, arg1_def_stmt = NULL, new_stmt;
+ gimple *arg0_def_stmt = NULL, *arg1_def_stmt = NULL, *new_stmt;
tree new_arg0 = NULL_TREE, new_arg1 = NULL_TREE;
tree temp, result;
gphi *newphi;
tree arg0, tree arg1)
{
tree result;
- gimple stmt;
+ gimple *stmt;
gassign *new_stmt;
tree cond;
gimple_stmt_iterator gsi;
statement is made dead by that rewriting. */
static bool
-jump_function_from_stmt (tree *arg, gimple stmt)
+jump_function_from_stmt (tree *arg, gimple *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
if (code == ADDR_EXPR)
statement. */
if (TREE_CODE (rhs) == SSA_NAME)
{
- gimple def1 = SSA_NAME_DEF_STMT (rhs);
+ gimple *def1 = SSA_NAME_DEF_STMT (rhs);
/* Verify the defining statement has an EQ_EXPR on the RHS. */
if (is_gimple_assign (def1) && gimple_assign_rhs_code (def1) == EQ_EXPR)
static bool
operand_equal_for_value_replacement (const_tree arg0, const_tree arg1,
- enum tree_code *code, gimple cond)
+ enum tree_code *code, gimple *cond)
{
- gimple def;
+ gimple *def;
tree lhs = gimple_cond_lhs (cond);
tree rhs = gimple_cond_rhs (cond);
static int
value_replacement (basic_block cond_bb, basic_block middle_bb,
- edge e0, edge e1, gimple phi,
+ edge e0, edge e1, gimple *phi,
tree arg0, tree arg1)
{
gimple_stmt_iterator gsi;
- gimple cond;
+ gimple *cond;
edge true_edge, false_edge;
enum tree_code code;
bool emtpy_or_with_defined_p = true;
gsi = gsi_start_nondebug_after_labels_bb (middle_bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
gsi_next_nondebug (&gsi);
if (!is_gimple_assign (stmt))
/* Now optimize (x != 0) ? x + y : y to just y.
The following condition is too restrictive, there can easily be another
stmt in middle_bb, for instance a CONVERT_EXPR for the second argument. */
- gimple assign = last_and_only_stmt (middle_bb);
+ gimple *assign = last_and_only_stmt (middle_bb);
if (!assign || gimple_code (assign) != GIMPLE_ASSIGN
|| gimple_assign_rhs_class (assign) != GIMPLE_BINARY_RHS
|| (!INTEGRAL_TYPE_P (TREE_TYPE (arg0))
static bool
minmax_replacement (basic_block cond_bb, basic_block middle_bb,
- edge e0, edge e1, gimple phi,
+ edge e0, edge e1, gimple *phi,
tree arg0, tree arg1)
{
tree result, type;
b = MAX (a, d);
x = MIN (b, u); */
- gimple assign = last_and_only_stmt (middle_bb);
+ gimple *assign = last_and_only_stmt (middle_bb);
tree lhs, op0, op1, bound;
if (!assign
static bool
abs_replacement (basic_block cond_bb, basic_block middle_bb,
edge e0 ATTRIBUTE_UNUSED, edge e1,
- gimple phi, tree arg0, tree arg1)
+ gimple *phi, tree arg0, tree arg1)
{
tree result;
gassign *new_stmt;
- gimple cond;
+ gimple *cond;
gimple_stmt_iterator gsi;
edge true_edge, false_edge;
- gimple assign;
+ gimple *assign;
edge e;
tree rhs, lhs;
bool negate;
/* And walk the statements in order. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
nt_call_phase++;
cond_store_replacement (basic_block middle_bb, basic_block join_bb,
edge e0, edge e1, hash_set<tree> *nontrap)
{
- gimple assign = last_and_only_stmt (middle_bb);
+ gimple *assign = last_and_only_stmt (middle_bb);
tree lhs, rhs, name, name2;
gphi *newphi;
gassign *new_stmt;
static bool
cond_if_else_store_replacement_1 (basic_block then_bb, basic_block else_bb,
- basic_block join_bb, gimple then_assign,
- gimple else_assign)
+ basic_block join_bb, gimple *then_assign,
+ gimple *else_assign)
{
tree lhs_base, lhs, then_rhs, else_rhs, name;
source_location then_locus, else_locus;
cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
basic_block join_bb)
{
- gimple then_assign = last_and_only_stmt (then_bb);
- gimple else_assign = last_and_only_stmt (else_bb);
+ gimple *then_assign = last_and_only_stmt (then_bb);
+ gimple *else_assign = last_and_only_stmt (else_bb);
vec<data_reference_p> then_datarefs, else_datarefs;
vec<ddr_p> then_ddrs, else_ddrs;
- gimple then_store, else_store;
+ gimple *then_store, *else_store;
bool found, ok = false, res;
struct data_dependence_relation *ddr;
data_reference_p then_dr, else_dr;
}
/* Find pairs of stores with equal LHS. */
- auto_vec<gimple, 1> then_stores, else_stores;
+ auto_vec<gimple *, 1> then_stores, else_stores;
FOR_EACH_VEC_ELT (then_datarefs, i, then_dr)
{
if (DR_IS_READ (then_dr))
/* Return TRUE if STMT has a VUSE whose corresponding VDEF is in BB. */
static bool
-local_mem_dependence (gimple stmt, basic_block bb)
+local_mem_dependence (gimple *stmt, basic_block bb)
{
tree vuse = gimple_vuse (stmt);
- gimple def;
+ gimple *def;
if (!vuse)
return false;
for (gsi = gsi_start_phis (bb3); !gsi_end_p (gsi); gsi_next (&gsi))
{
gphi *phi_stmt = gsi.phi ();
- gimple def1, def2;
+ gimple *def1, *def2;
tree arg1, arg2, ref1, ref2, field1, field2;
tree tree_offset1, tree_offset2, tree_size2, next;
int offset1, offset2, size2;
phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
{
tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator ui2;
bool ok = true;
BB with the virtual operands from USE_STMT. */
static tree
-phiprop_insert_phi (basic_block bb, gphi *phi, gimple use_stmt,
+phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
struct phiprop_d *phivn, size_t n)
{
tree res;
&& (SSA_NAME_VERSION (old_arg) >= n
|| phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (old_arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
old_arg = gimple_assign_rhs1 (def_stmt);
locus = gimple_location (def_stmt);
}
size_t n)
{
tree ptr = PHI_RESULT (phi);
- gimple use_stmt;
+ gimple *use_stmt;
tree res = NULL_TREE;
gimple_stmt_iterator gsi;
imm_use_iterator ui;
&& (SSA_NAME_VERSION (arg) >= n
|| phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
if (!gimple_assign_single_p (def_stmt))
return false;
arg = gimple_assign_rhs1 (def_stmt);
phi_inserted = false;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
{
- gimple def_stmt;
+ gimple *def_stmt;
tree vuse;
/* Only replace loads in blocks that post-dominate the PHI node. That
basic_block phiblock,
basic_block block, bool *same_valid)
{
- gimple phi = SSA_NAME_DEF_STMT (vuse);
+ gimple *phi = SSA_NAME_DEF_STMT (vuse);
ao_ref ref;
edge e = NULL;
bool use_oracle;
case NAME:
{
tree name = PRE_EXPR_NAME (expr);
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
/* If the SSA name is defined by a PHI node in this block,
translate it. */
if (gimple_code (def_stmt) == GIMPLE_PHI
{
tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
- gimple def;
+ gimple *def;
gimple_stmt_iterator gsi;
unsigned id = get_expression_id (expr);
bool res = false;
vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
if (ref->vuse)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
if (!gimple_nop_p (def_stmt)
&& ((gimple_bb (def_stmt) != block
&& !dominated_by_p (CDI_DOMINATORS,
gsi = gsi_start (forced_stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree forcedname = gimple_get_lhs (stmt);
pre_expr nameexpr;
gsi = gsi_start (stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (TREE_CODE (lhs) == SSA_NAME)
bitmap_set_bit (inserted_exprs,
gsi = gsi_start (stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (TREE_CODE (lhs) == SSA_NAME)
bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
/* Loop until the worklist is empty. */
while (sp)
{
- gimple stmt;
+ gimple *stmt;
basic_block dom;
/* Pick a block from the worklist. */
to EXP_GEN. */
if (gimple_vuse (stmt))
{
- gimple def_stmt;
+ gimple *def_stmt;
bool ok = true;
def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
while (!gimple_nop_p (def_stmt)
/* Local state for the eliminate domwalk. */
-static vec<gimple> el_to_remove;
-static vec<gimple> el_to_fixup;
+static vec<gimple *> el_to_remove;
+static vec<gimple *> el_to_fixup;
static unsigned int el_todo;
static vec<tree> el_avail;
static vec<tree> el_avail_stack;
if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (res), sprime);
- gimple stmt = gimple_build_assign (res, sprime);
+ gimple *stmt = gimple_build_assign (res, sprime);
/* ??? It cannot yet be necessary (DOM walk). */
gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
gsi_next (&gsi))
{
tree sprime = NULL_TREE;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (lhs && TREE_CODE (lhs) == SSA_NAME
&& !gimple_has_volatile_ops (stmt)
&& bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
&& gimple_assign_load_p (stmt))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (sprime);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
basic_block def_bb = gimple_bb (def_stmt);
if (gimple_code (def_stmt) == GIMPLE_PHI
&& b->loop_father->header == def_bb)
NECESSARY, true);
pre_stats.eliminations++;
- gimple orig_stmt = stmt;
+ gimple *orig_stmt = stmt;
if (!useless_type_conversion_p (TREE_TYPE (lhs),
TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (lhs), sprime);
if (gimple_assign_single_p (stmt)
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
- gimple old_stmt = stmt;
+ gimple *old_stmt = stmt;
if (is_gimple_call (stmt))
{
/* ??? Only fold calls inplace for now, this may create new
eliminate (bool do_pre)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
need_eh_cleanup = BITMAP_ALLOC (NULL);
need_ab_cleanup = BITMAP_ALLOC (NULL);
mark that statement necessary. Return the stmt, if it is newly
necessary. */
-static inline gimple
+static inline gimple *
mark_operand_necessary (tree op)
{
- gimple stmt;
+ gimple *stmt;
gcc_assert (op);
bitmap worklist;
unsigned i;
bitmap_iterator bi;
- gimple t;
+ gimple *t;
worklist = BITMAP_ALLOC (NULL);
EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
tree arg = PHI_ARG_DEF (t, k);
if (TREE_CODE (arg) == SSA_NAME)
{
- gimple n = mark_operand_necessary (arg);
+ gimple *n = mark_operand_necessary (arg);
if (n)
bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
}
FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
{
- gimple n = mark_operand_necessary (use);
+ gimple *n = mark_operand_necessary (use);
if (n)
bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
}
definition has changed. SSA edges are def-use edges in the SSA
web. For each D-U edge, we store the target statement or PHI node
U. */
-static vec<gimple> interesting_ssa_edges;
+static vec<gimple *> interesting_ssa_edges;
/* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
list of SSA edges is split into two. One contains all SSA edges
don't use a separate worklist for VARYING edges, we end up with
situations where lattice values move from
UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
-static vec<gimple> varying_ssa_edges;
+static vec<gimple *> varying_ssa_edges;
/* Return true if the block worklist empty. */
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (prop_simulate_again_p (use_stmt)
&& !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
/* Simulate the execution of STMT and update the work lists accordingly. */
static void
-simulate_stmt (gimple stmt)
+simulate_stmt (gimple *stmt)
{
enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
edge taken_edge = NULL;
else
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
+ gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
if (!gimple_nop_p (def_stmt)
&& prop_simulate_again_p (def_stmt))
{
was simulated. */
static bool
-process_ssa_edge_worklist (vec<gimple> *worklist, const char *edge_list_name)
+process_ssa_edge_worklist (vec<gimple *> *worklist, const char *edge_list_name)
{
/* Process the next entry from the worklist. */
while (worklist->length () > 0)
basic_block bb;
/* Pull the statement to simulate off the worklist. */
- gimple stmt = worklist->pop ();
+ gimple *stmt = worklist->pop ();
/* If this statement was already visited by simulate_block, then
we don't need to visit it again here. */
for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
{
- gimple stmt = gsi_stmt (j);
+ gimple *stmt = gsi_stmt (j);
/* If this statement is already in the worklist then
"cancel" it. The reevaluation implied by the worklist
as their defining statement. */
void
-move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
+move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
{
tree var;
ssa_op_iter iter;
A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
static void
-finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
- gimple stmt)
+finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
+ gimple *stmt)
{
gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
move_ssa_defining_stmt_for_defs (new_stmt, stmt);
bool
update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
{
- gimple stmt = gsi_stmt (*si_p);
+ gimple *stmt = gsi_stmt (*si_p);
if (valid_gimple_call_p (expr))
{
else if (valid_gimple_rhs_p (expr))
{
tree lhs = gimple_call_lhs (stmt);
- gimple new_stmt;
+ gimple *new_stmt;
/* The call has simplified to an expression
that cannot be represented as a GIMPLE_CALL. */
because they are not interesting for the optimizers. */
bool
-stmt_makes_single_store (gimple stmt)
+stmt_makes_single_store (gimple *stmt)
{
tree lhs;
PROP_VALUE. Return true if at least one reference was replaced. */
static bool
-replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
+replace_uses_in (gimple *stmt, ssa_prop_get_value_fn get_value)
{
bool replaced = false;
use_operand_p use;
ssa_prop_fold_stmt_fn fold_fn;
bool do_dce;
bool something_changed;
- vec<gimple> stmts_to_remove;
- vec<gimple> stmts_to_fixup;
+ vec<gimple *> stmts_to_remove;
+ vec<gimple *> stmts_to_fixup;
bitmap need_eh_cleanup;
};
gsi_next (&i))
{
bool did_replace;
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
enum gimple_code code = gimple_code (stmt);
/* Ignore ASSERT_EXPRs. They are used by VRP to generate
print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
}
- gimple old_stmt = stmt;
+ gimple *old_stmt = stmt;
bool was_noreturn = (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt));
Remove stmts in reverse order to make debug stmt creation possible. */
while (!walker.stmts_to_remove.is_empty ())
{
- gimple stmt = walker.stmts_to_remove.pop ();
+ gimple *stmt = walker.stmts_to_remove.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Removing dead stmt ");
fixup by visiting a dominating now noreturn call first. */
while (!walker.stmts_to_fixup.is_empty ())
{
- gimple stmt = walker.stmts_to_fixup.pop ();
+ gimple *stmt = walker.stmts_to_fixup.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Fixing up noreturn call ");
gimple tuples representation. */
bool
-may_propagate_copy_into_stmt (gimple dest, tree orig)
+may_propagate_copy_into_stmt (gimple *dest, tree orig)
{
tree type_d;
tree type_o;
void
propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_assign (stmt))
{
/* If SIM_P is true, statement S will be simulated again. */
static inline void
-prop_set_simulate_again (gimple s, bool visit_p)
+prop_set_simulate_again (gimple *s, bool visit_p)
{
gimple_set_visited (s, visit_p);
}
/* Return true if statement T should be simulated again. */
static inline bool
-prop_simulate_again_p (gimple s)
+prop_simulate_again_p (gimple *s)
{
return gimple_visited_p (s);
}
/* Call-back functions used by the value propagation engine. */
-typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (gimple, edge *, tree *);
+typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (gimple *, edge *,
+ tree *);
typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (gphi *);
typedef bool (*ssa_prop_fold_stmt_fn) (gimple_stmt_iterator *gsi);
typedef tree (*ssa_prop_get_value_fn) (tree);
extern bool valid_gimple_rhs_p (tree);
-extern void move_ssa_defining_stmt_for_defs (gimple, gimple);
+extern void move_ssa_defining_stmt_for_defs (gimple *, gimple *);
extern bool update_gimple_call (gimple_stmt_iterator *, tree, int, ...);
extern bool update_call_from_tree (gimple_stmt_iterator *, tree);
extern void ssa_propagate (ssa_prop_visit_stmt_fn, ssa_prop_visit_phi_fn);
-extern bool stmt_makes_single_store (gimple);
+extern bool stmt_makes_single_store (gimple *);
extern bool substitute_and_fold (ssa_prop_get_value_fn, ssa_prop_fold_stmt_fn,
bool);
extern bool may_propagate_copy (tree, tree);
-extern bool may_propagate_copy_into_stmt (gimple, tree);
+extern bool may_propagate_copy_into_stmt (gimple *, tree);
extern bool may_propagate_copy_into_asm (tree);
extern void propagate_value (use_operand_p, tree);
extern void replace_exp (use_operand_p, tree);
/* Forward decls. */
static long get_rank (tree);
-static bool reassoc_stmt_dominates_stmt_p (gimple, gimple);
+static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *);
/* Wrapper around gsi_remove, which adjusts gimple_uid of debug stmts
possibly added by gsi_remove. */
bool
reassoc_remove_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (!MAY_HAVE_DEBUG_STMTS || gimple_code (stmt) == GIMPLE_PHI)
return gsi_remove (gsi, true);
gsi_next (&prev);
else
prev = gsi_start_bb (bb);
- gimple end_stmt = gsi_stmt (*gsi);
+ gimple *end_stmt = gsi_stmt (*gsi);
while ((stmt = gsi_stmt (prev)) != end_stmt)
{
gcc_assert (stmt && is_gimple_debug (stmt) && gimple_uid (stmt) == 0);
iteration of the loop. If STMT is some other phi, the rank is the
block rank of its containing block. */
static long
-phi_rank (gimple stmt)
+phi_rank (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
struct loop *father = bb->loop_father;
tree res;
unsigned i;
use_operand_p use;
- gimple use_stmt;
+ gimple *use_stmt;
/* We only care about real loops (those with a latch). */
if (!father->latch)
if (TREE_CODE (arg) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
if (gimple_bb (def_stmt)->loop_father == father)
return bb_rank[father->latch->index] + PHI_LOOP_BIAS;
}
static bool
loop_carried_phi (tree exp)
{
- gimple phi_stmt;
+ gimple *phi_stmt;
long block_rank;
if (TREE_CODE (exp) != SSA_NAME
if (TREE_CODE (e) == SSA_NAME)
{
ssa_op_iter iter;
- gimple stmt;
+ gimple *stmt;
long rank;
tree op;
&& !SSA_NAME_IS_DEFAULT_DEF (oeb->op)
&& SSA_NAME_VERSION (oeb->op) != SSA_NAME_VERSION (oea->op))
{
- gimple stmta = SSA_NAME_DEF_STMT (oea->op);
- gimple stmtb = SSA_NAME_DEF_STMT (oeb->op);
+ gimple *stmta = SSA_NAME_DEF_STMT (oea->op);
+ gimple *stmtb = SSA_NAME_DEF_STMT (oeb->op);
basic_block bba = gimple_bb (stmta);
basic_block bbb = gimple_bb (stmtb);
if (bbb != bba)
operation with tree code CODE, and is inside LOOP. */
static bool
-is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
+is_reassociable_op (gimple *stmt, enum tree_code code, struct loop *loop)
{
basic_block bb = gimple_bb (stmt);
static tree
get_unary_op (tree name, enum tree_code opcode)
{
- gimple stmt = SSA_NAME_DEF_STMT (name);
+ gimple *stmt = SSA_NAME_DEF_STMT (name);
if (!is_gimple_assign (stmt))
return NULL_TREE;
}
-static void linearize_expr_tree (vec<operand_entry_t> *, gimple,
+static void linearize_expr_tree (vec<operand_entry_t> *, gimple *,
bool, bool);
/* Structure for tracking and counting operands. */
to some exponent. */
static bool
-stmt_is_power_of_op (gimple stmt, tree op)
+stmt_is_power_of_op (gimple *stmt, tree op)
{
tree fndecl;
was previously called for STMT and returned TRUE. */
static HOST_WIDE_INT
-decrement_power (gimple stmt)
+decrement_power (gimple *stmt)
{
REAL_VALUE_TYPE c, cint;
HOST_WIDE_INT power;
replace *DEF with OP as well. */
static void
-propagate_op_to_single_use (tree op, gimple stmt, tree *def)
+propagate_op_to_single_use (tree op, gimple *stmt, tree *def)
{
tree lhs;
- gimple use_stmt;
+ gimple *use_stmt;
use_operand_p use;
gimple_stmt_iterator gsi;
static void
zero_one_operation (tree *def, enum tree_code opcode, tree op)
{
- gimple stmt = SSA_NAME_DEF_STMT (*def);
+ gimple *stmt = SSA_NAME_DEF_STMT (*def);
do
{
&& TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
&& has_single_use (gimple_assign_rhs2 (stmt)))
{
- gimple stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+ gimple *stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
if (stmt_is_power_of_op (stmt2, op))
{
if (decrement_power (stmt2) == 1)
stmt_dominates_stmt_p, but uses stmt UIDs to optimize. */
static bool
-reassoc_stmt_dominates_stmt_p (gimple s1, gimple s2)
+reassoc_stmt_dominates_stmt_p (gimple *s1, gimple *s2)
{
basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
unsigned int uid = gimple_uid (s1);
for (gsi_next (&gsi); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple s = gsi_stmt (gsi);
+ gimple *s = gsi_stmt (gsi);
if (gimple_uid (s) != uid)
break;
if (s == s2)
/* Insert STMT after INSERT_POINT. */
static void
-insert_stmt_after (gimple stmt, gimple insert_point)
+insert_stmt_after (gimple *stmt, gimple *insert_point)
{
gimple_stmt_iterator gsi;
basic_block bb;
the result. Places the statement after the definition of either
OP1 or OP2. Returns the new statement. */
-static gimple
+static gimple *
build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
{
- gimple op1def = NULL, op2def = NULL;
+ gimple *op1def = NULL, *op2def = NULL;
gimple_stmt_iterator gsi;
tree op;
gassign *sum;
}
else
{
- gimple insert_point;
+ gimple *insert_point;
if ((!op1def || gimple_nop_p (op1def))
|| (op2def && !gimple_nop_p (op2def)
&& reassoc_stmt_dominates_stmt_p (op1def, op2def)))
FOR_EACH_VEC_ELT (*ops, i, oe1)
{
enum tree_code dcode;
- gimple oe1def;
+ gimple *oe1def;
if (TREE_CODE (oe1->op) != SSA_NAME)
continue;
subops = XCNEWVEC (vec_operand_entry_t_heap, ops->length ());
EXECUTE_IF_SET_IN_BITMAP (candidates, 0, i, sbi0)
{
- gimple oedef;
+ gimple *oedef;
enum tree_code oecode;
unsigned j;
nr_candidates2 = 0;
EXECUTE_IF_SET_IN_BITMAP (candidates, 0, i, sbi0)
{
- gimple oedef;
+ gimple *oedef;
enum tree_code oecode;
unsigned j;
tree op = (*ops)[i]->op;
if (nr_candidates2 >= 2)
{
operand_entry_t oe1, oe2;
- gimple prod;
+ gimple *prod;
int first = bitmap_first_set_bit (candidates2);
/* Build the new addition chain. */
zero_one_operation (&oe1->op, c->oecode, c->op);
EXECUTE_IF_SET_IN_BITMAP (candidates2, first+1, i, sbi0)
{
- gimple sum;
+ gimple *sum;
oe2 = (*ops)[i];
if (dump_file && (dump_flags & TDF_DETAILS))
{
{
tree op1, op2;
enum tree_code lcode, rcode;
- gimple def1, def2;
+ gimple *def1, *def2;
int i;
operand_entry_t oe;
}
else if (!operand_equal_p (t, curr->op, 0))
{
- gimple sum;
+ gimple *sum;
enum tree_code subcode;
tree newop1;
tree newop2;
argument should be a GIMPLE_COND. */
static void
-init_range_entry (struct range_entry *r, tree exp, gimple stmt)
+init_range_entry (struct range_entry *r, tree exp, gimple *stmt)
{
int in_p;
tree low, high;
{
operand_entry_t oe = (*ops)[range->idx];
tree op = oe->op;
- gimple stmt = op ? SSA_NAME_DEF_STMT (op) :
+ gimple *stmt = op ? SSA_NAME_DEF_STMT (op) :
last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
location_t loc = gimple_location (stmt);
tree optype = op ? TREE_TYPE (op) : boolean_type_node;
+ prec - 1 - wi::clz (mask));
operand_entry_t oe = (*ops)[ranges[i].idx];
tree op = oe->op;
- gimple stmt = op ? SSA_NAME_DEF_STMT (op)
+ gimple *stmt = op ? SSA_NAME_DEF_STMT (op)
: last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
location_t loc = gimple_location (stmt);
tree optype = op ? TREE_TYPE (op) : boolean_type_node;
gimple_seq_add_seq_without_update (&seq, seq2);
gcc_assert (TREE_CODE (exp) == SSA_NAME);
gimple_set_visited (SSA_NAME_DEF_STMT (exp), true);
- gimple g = gimple_build_assign (make_ssa_name (optype),
+ gimple *g = gimple_build_assign (make_ssa_name (optype),
BIT_IOR_EXPR, tem, exp);
gimple_set_location (g, loc);
gimple_seq_add_stmt_without_update (&seq, g);
the last block of a range test. */
static bool
-final_range_test_p (gimple stmt)
+final_range_test_p (gimple *stmt)
{
basic_block bb, rhs_bb;
edge e;
tree lhs, rhs;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (!gimple_assign_cast_p (stmt))
return false;
{
edge_iterator ei, ei2;
edge e, e2;
- gimple stmt;
+ gimple *stmt;
gphi_iterator gsi;
bool other_edge_seen = false;
bool is_cond;
}
else
{
- gimple test_last = last_stmt (test_bb);
+ gimple *test_last = last_stmt (test_bb);
if (gimple_code (test_last) != GIMPLE_COND
&& gimple_phi_arg_def (phi, e2->dest_idx)
== gimple_assign_lhs (test_last)
no_side_effect_bb (basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple last;
+ gimple *last;
if (!gimple_seq_empty_p (phi_nodes (bb)))
return false;
last = last_stmt (bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
imm_use_iterator imm_iter;
use_operand_p use_p;
return false;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (gimple_bb (use_stmt) != bb)
get_ops (tree var, enum tree_code code, vec<operand_entry_t> *ops,
struct loop *loop)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
tree rhs[2];
int i;
update_ops (tree var, enum tree_code code, vec<operand_entry_t> ops,
unsigned int *pidx, struct loop *loop)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
tree rhs[4];
int i;
/* Inter-bb range test optimization. */
static void
-maybe_optimize_range_tests (gimple stmt)
+maybe_optimize_range_tests (gimple *stmt)
{
basic_block first_bb = gimple_bb (stmt);
basic_block last_bb = first_bb;
if (gimple_code (stmt) != GIMPLE_COND)
{
use_operand_p use_p;
- gimple phi;
+ gimple *phi;
edge e2;
unsigned int d;
{
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt, cast_stmt = NULL;
+ gimple *use_stmt, *cast_stmt = NULL;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, bbinfo[idx].op)
if (is_gimple_debug (use_stmt))
update" operation. */
static bool
-is_phi_for_stmt (gimple stmt, tree operand)
+is_phi_for_stmt (gimple *stmt, tree operand)
{
- gimple def_stmt;
+ gimple *def_stmt;
gphi *def_phi;
tree lhs;
use_operand_p arg_p;
static void
remove_visited_stmt_chain (tree var)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
while (1)
static void
swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
- unsigned int opindex, gimple stmt)
+ unsigned int opindex, gimple *stmt)
{
operand_entry_t oe1, oe2, oe3;
/* If definition of RHS1 or RHS2 dominates STMT, return the later of those
two definitions, otherwise return STMT. */
-static inline gimple
-find_insert_point (gimple stmt, tree rhs1, tree rhs2)
+static inline gimple *
+find_insert_point (gimple *stmt, tree rhs1, tree rhs2)
{
if (TREE_CODE (rhs1) == SSA_NAME
&& reassoc_stmt_dominates_stmt_p (stmt, SSA_NAME_DEF_STMT (rhs1)))
order. Return new lhs. */
static tree
-rewrite_expr_tree (gimple stmt, unsigned int opindex,
+rewrite_expr_tree (gimple *stmt, unsigned int opindex,
vec<operand_entry_t> ops, bool changed)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
return lhs), force creation of a new SSA_NAME. */
if (changed || ((rhs1 != oe2->op || rhs2 != oe1->op) && opindex))
{
- gimple insert_point = find_insert_point (stmt, oe1->op, oe2->op);
+ gimple *insert_point
+ = find_insert_point (stmt, oe1->op, oe2->op);
lhs = make_ssa_name (TREE_TYPE (lhs));
stmt
= gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
unsigned int uid = gimple_uid (stmt);
- gimple insert_point = find_insert_point (stmt, new_rhs1, oe->op);
+ gimple *insert_point = find_insert_point (stmt, new_rhs1, oe->op);
lhs = make_ssa_name (TREE_TYPE (lhs));
stmt = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
enum tree_code opcode = gimple_assign_rhs_code (stmt);
int op_num = ops.length ();
int stmt_num = op_num - 1;
- gimple *stmts = XALLOCAVEC (gimple, stmt_num);
+ gimple **stmts = XALLOCAVEC (gimple *, stmt_num);
int op_index = op_num - 1;
int stmt_index = 0;
int ready_stmts_end = 0;
Recurse on D if necessary. */
static void
-linearize_expr (gimple stmt)
+linearize_expr (gimple *stmt)
{
gimple_stmt_iterator gsi;
- gimple binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
- gimple binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
- gimple oldbinrhs = binrhs;
+ gimple *binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+ gimple *binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+ gimple *oldbinrhs = binrhs;
enum tree_code rhscode = gimple_assign_rhs_code (stmt);
- gimple newbinrhs = NULL;
+ gimple *newbinrhs = NULL;
struct loop *loop = loop_containing_stmt (stmt);
tree lhs = gimple_assign_lhs (stmt);
/* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
it. Otherwise, return NULL. */
-static gimple
+static gimple *
get_single_immediate_use (tree lhs)
{
use_operand_p immuse;
- gimple immusestmt;
+ gimple *immusestmt;
if (TREE_CODE (lhs) == SSA_NAME
&& single_imm_use (lhs, &immuse, &immusestmt)
static tree
negate_value (tree tonegate, gimple_stmt_iterator *gsip)
{
- gimple negatedefstmt = NULL;
+ gimple *negatedefstmt = NULL;
tree resultofnegate;
gimple_stmt_iterator gsi;
unsigned int uid;
tree rhs1 = gimple_assign_rhs1 (negatedefstmt);
tree rhs2 = gimple_assign_rhs2 (negatedefstmt);
tree lhs = gimple_assign_lhs (negatedefstmt);
- gimple g;
+ gimple *g;
gsi = gsi_for_stmt (negatedefstmt);
rhs1 = negate_value (rhs1, &gsi);
uid = gimple_uid (gsi_stmt (gsi));
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_uid (stmt) != 0)
break;
gimple_set_uid (stmt, uid);
exposes the adds to reassociation. */
static bool
-should_break_up_subtract (gimple stmt)
+should_break_up_subtract (gimple *stmt)
{
tree lhs = gimple_assign_lhs (stmt);
tree binlhs = gimple_assign_rhs1 (stmt);
tree binrhs = gimple_assign_rhs2 (stmt);
- gimple immusestmt;
+ gimple *immusestmt;
struct loop *loop = loop_containing_stmt (stmt);
if (TREE_CODE (binlhs) == SSA_NAME
/* Transform STMT from A - B into A + -B. */
static void
-break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
+break_up_subtract (gimple *stmt, gimple_stmt_iterator *gsip)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
tree rhs2 = gimple_assign_rhs2 (stmt);
If any of these conditions does not hold, return FALSE. */
static bool
-acceptable_pow_call (gimple stmt, tree *base, HOST_WIDE_INT *exponent)
+acceptable_pow_call (gimple *stmt, tree *base, HOST_WIDE_INT *exponent)
{
tree fndecl, arg1;
REAL_VALUE_TYPE c, cint;
Place the operands of the expression tree in the vector named OPS. */
static void
-linearize_expr_tree (vec<operand_entry_t> *ops, gimple stmt,
+linearize_expr_tree (vec<operand_entry_t> *ops, gimple *stmt,
bool is_associative, bool set_visited)
{
tree binlhs = gimple_assign_rhs1 (stmt);
tree binrhs = gimple_assign_rhs2 (stmt);
- gimple binlhsdef = NULL, binrhsdef = NULL;
+ gimple *binlhsdef = NULL, *binrhsdef = NULL;
bool binlhsisreassoc = false;
bool binrhsisreassoc = false;
enum tree_code rhscode = gimple_assign_rhs_code (stmt);
FOR_EACH_VEC_ELT (plus_negates, i, negate)
{
- gimple user = get_single_immediate_use (negate);
+ gimple *user = get_single_immediate_use (negate);
if (!user || !is_gimple_assign (user))
continue;
This pushes down the negate which we possibly can merge
into some other operation, hence insert it into the
plus_negates vector. */
- gimple feed = SSA_NAME_DEF_STMT (negate);
+ gimple *feed = SSA_NAME_DEF_STMT (negate);
tree a = gimple_assign_rhs1 (feed);
tree b = gimple_assign_rhs2 (user);
gimple_stmt_iterator gsi = gsi_for_stmt (feed);
gimple_stmt_iterator gsi2 = gsi_for_stmt (user);
tree x = make_ssa_name (TREE_TYPE (gimple_assign_lhs (feed)));
- gimple g = gimple_build_assign (x, PLUS_EXPR, a, b);
+ gimple *g = gimple_build_assign (x, PLUS_EXPR, a, b);
gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, x);
user = gsi_stmt (gsi2);
{
/* Transform "x = -a; y = b - x" into "y = b + a", getting
rid of one operation. */
- gimple feed = SSA_NAME_DEF_STMT (negate);
+ gimple *feed = SSA_NAME_DEF_STMT (negate);
tree a = gimple_assign_rhs1 (feed);
tree rhs1 = gimple_assign_rhs1 (user);
gimple_stmt_iterator gsi = gsi_for_stmt (user);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_visited (stmt, false);
gimple_set_uid (stmt, uid++);
SSA name representing the value of the replacement sequence. */
static tree
-attempt_builtin_powi (gimple stmt, vec<operand_entry_t> *ops)
+attempt_builtin_powi (gimple *stmt, vec<operand_entry_t> *ops)
{
unsigned i, j, vec_len;
int ii;
tree type = TREE_TYPE (gimple_get_lhs (stmt));
tree powi_fndecl = mathfn_built_in (type, BUILT_IN_POWI);
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- gimple mul_stmt, pow_stmt;
+ gimple *mul_stmt, *pow_stmt;
/* Nothing to do if BUILT_IN_POWI doesn't exist for this type and
target. */
/* Transform STMT at *GSI into a copy by replacing its rhs with NEW_RHS. */
static void
-transform_stmt_to_copy (gimple_stmt_iterator *gsi, gimple stmt, tree new_rhs)
+transform_stmt_to_copy (gimple_stmt_iterator *gsi, gimple *stmt, tree new_rhs)
{
tree rhs1;
/* Transform STMT at *GSI into a multiply of RHS1 and RHS2. */
static void
-transform_stmt_to_multiply (gimple_stmt_iterator *gsi, gimple stmt,
+transform_stmt_to_multiply (gimple_stmt_iterator *gsi, gimple *stmt,
tree rhs1, tree rhs2)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
gimple_stmt_iterator gsi;
basic_block son;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && !gimple_visited_p (stmt))
maybe_optimize_range_tests (stmt);
reassociated operands. */
if (powi_result)
{
- gimple mul_stmt, lhs_stmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *mul_stmt, *lhs_stmt = SSA_NAME_DEF_STMT (lhs);
tree type = TREE_TYPE (lhs);
tree target_ssa = make_temp_ssa_name (type, NULL,
"reassocpow");
FOR_EACH_VEC_ELT (reassoc_branch_fixups, i, var)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (var);
- gimple use_stmt;
+ gimple *def_stmt = SSA_NAME_DEF_STMT (var);
+ gimple *use_stmt;
use_operand_p use;
bool ok = single_imm_use (var, &use, &use_stmt);
gcc_assert (ok
basic_block merge_bb = split_block (then_bb, use_stmt)->dest;
gimple_stmt_iterator gsi = gsi_for_stmt (def_stmt);
- gimple g = gimple_build_cond (NE_EXPR, var,
- build_zero_cst (TREE_TYPE (var)),
- NULL_TREE, NULL_TREE);
+ gimple *g = gimple_build_cond (NE_EXPR, var,
+ build_zero_cst (TREE_TYPE (var)),
+ NULL_TREE, NULL_TREE);
location_t loc = gimple_location (use_stmt);
gimple_set_location (g, loc);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
vn_get_expr_for (tree name)
{
vn_ssa_aux_t vn = VN_INFO (name);
- gimple def_stmt;
+ gimple *def_stmt;
tree expr = NULL_TREE;
enum tree_code code;
associated with. */
enum vn_kind
-vn_get_stmt_kind (gimple stmt)
+vn_get_stmt_kind (gimple *stmt)
{
switch (gimple_code (stmt))
{
unsigned int i = *i_p;
vn_reference_op_t op = &(*ops)[i];
vn_reference_op_t mem_op = &(*ops)[i - 1];
- gimple def_stmt;
+ gimple *def_stmt;
enum tree_code code;
offset_int off;
bool disambiguate_only)
{
vn_reference_t vr = (vn_reference_t)vr_;
- gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base;
HOST_WIDE_INT offset, maxsize;
static vec<vn_reference_op_s>
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
{
tree rhs1 = gimple_assign_rhs1 (def_stmt);
- gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
+ gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
if (is_gimple_assign (def_stmt2)
&& (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
|| gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
lhs = SSA_VAL (lhs);
if (TREE_CODE (lhs) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
if (gimple_assign_single_p (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
lhs = gimple_assign_rhs1 (def_stmt);
/* Return the number of operands for a vn_nary ops structure from STMT. */
static unsigned int
-vn_nary_length_from_stmt (gimple stmt)
+vn_nary_length_from_stmt (gimple *stmt)
{
switch (gimple_assign_rhs_code (stmt))
{
/* Initialize VNO from STMT. */
static void
-init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
+init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
{
unsigned i;
vn_nary_op_t from the hashtable if it exists. */
tree
-vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
+vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
{
vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s,
RESULT. */
vn_nary_op_t
-vn_nary_op_insert_stmt (gimple stmt, tree result)
+vn_nary_op_insert_stmt (gimple *stmt, tree result)
{
vn_nary_op_t vno1
= alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
it does not exist in the hash table. */
static tree
-vn_phi_lookup (gimple phi)
+vn_phi_lookup (gimple *phi)
{
vn_phi_s **slot;
struct vn_phi_s vp1;
RESULT. */
static vn_phi_t
-vn_phi_insert (gimple phi, tree result)
+vn_phi_insert (gimple *phi, tree result)
{
vn_phi_s **slot;
vn_phi_t vp1 = current_info->phis_pool->allocate ();
{
ssa_op_iter iter;
def_operand_p defp;
- gimple stmt = SSA_NAME_DEF_STMT (use);
+ gimple *stmt = SSA_NAME_DEF_STMT (use);
if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
{
Return true if a value number changed. */
static bool
-defs_to_varying (gimple stmt)
+defs_to_varying (gimple *stmt)
{
bool changed = false;
ssa_op_iter iter;
value number of LHS has changed as a result. */
static bool
-visit_nary_op (tree lhs, gimple stmt)
+visit_nary_op (tree lhs, gimple *stmt)
{
bool changed = false;
tree result = vn_nary_op_lookup_stmt (stmt, NULL);
and return true if the value number of the LHS has changed as a result. */
static bool
-visit_reference_op_load (tree lhs, tree op, gimple stmt)
+visit_reference_op_load (tree lhs, tree op, gimple *stmt)
{
bool changed = false;
tree last_vuse;
and return true if the value number of the LHS has changed as a result. */
static bool
-visit_reference_op_store (tree lhs, tree op, gimple stmt)
+visit_reference_op_store (tree lhs, tree op, gimple *stmt)
{
bool changed = false;
vn_reference_t vnresult = NULL;
changed. */
static bool
-visit_phi (gimple phi)
+visit_phi (gimple *phi)
{
bool changed = false;
tree result;
/* Return true if STMT contains constants. */
static bool
-stmt_has_constants (gimple stmt)
+stmt_has_constants (gimple *stmt)
{
tree tem;
simplified. */
static tree
-simplify_binary_expression (gimple stmt)
+simplify_binary_expression (gimple *stmt)
{
tree result = NULL_TREE;
tree op0 = gimple_assign_rhs1 (stmt);
visit_use (tree use)
{
bool changed = false;
- gimple stmt = SSA_NAME_DEF_STMT (use);
+ gimple *stmt = SSA_NAME_DEF_STMT (use);
mark_use_processed (use);
{
const tree opa = *((const tree *)pa);
const tree opb = *((const tree *)pb);
- gimple opstmta = SSA_NAME_DEF_STMT (opa);
- gimple opstmtb = SSA_NAME_DEF_STMT (opb);
+ gimple *opstmta = SSA_NAME_DEF_STMT (opa);
+ gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
basic_block bba;
basic_block bbb;
vec<ssa_op_iter> itervec = vNULL;
vec<tree> namevec = vNULL;
use_operand_p usep = NULL;
- gimple defstmt;
+ gimple *defstmt;
tree use;
ssa_op_iter iter;
break;
if (e2 && (e2->flags & EDGE_EXECUTABLE))
{
- gimple stmt = last_stmt (e->src);
+ gimple *stmt = last_stmt (e->src);
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
{
}
/* Finally look at the last stmt. */
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (!stmt)
return;
} *vn_constant_t;
enum vn_kind { VN_NONE, VN_CONSTANT, VN_NARY, VN_REFERENCE, VN_PHI };
-enum vn_kind vn_get_stmt_kind (gimple);
+enum vn_kind vn_get_stmt_kind (gimple *);
/* Hash the type TYPE using bits that distinguishes it in the
types_compatible_p sense. */
bool run_scc_vn (vn_lookup_kind);
void free_scc_vn (void);
tree vn_nary_op_lookup (tree, vn_nary_op_t *);
-tree vn_nary_op_lookup_stmt (gimple, vn_nary_op_t *);
+tree vn_nary_op_lookup_stmt (gimple *, vn_nary_op_t *);
tree vn_nary_op_lookup_pieces (unsigned int, enum tree_code,
tree, tree *, vn_nary_op_t *);
vn_nary_op_t vn_nary_op_insert (tree, tree);
/* Given a statement STMT, construct a hash table element. */
-expr_hash_elt::expr_hash_elt (gimple stmt, tree orig_lhs)
+expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
{
enum gimple_code code = gimple_code (stmt);
struct hashable_expr *expr = this->expr ();
class expr_hash_elt
{
public:
- expr_hash_elt (gimple, tree);
+ expr_hash_elt (gimple *, tree);
expr_hash_elt (tree);
expr_hash_elt (struct hashable_expr *, tree);
expr_hash_elt (class expr_hash_elt &);
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple firstuse = NULL;
+ gimple *firstuse = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
{
if (is_gimple_debug (USE_STMT (use_p)))
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
{
- gimple usestmt = USE_STMT (use_p);
+ gimple *usestmt = USE_STMT (use_p);
basic_block useblock;
if (gphi *phi = dyn_cast <gphi *> (usestmt))
static basic_block
select_best_block (basic_block early_bb,
basic_block late_bb,
- gimple stmt)
+ gimple *stmt)
{
basic_block best_bb = late_bb;
basic_block temp_bb = late_bb;
statement before that STMT should be moved. */
static bool
-statement_sink_location (gimple stmt, basic_block frombb,
+statement_sink_location (gimple *stmt, basic_block frombb,
gimple_stmt_iterator *togsi)
{
- gimple use;
+ gimple *use;
use_operand_p one_use = NULL_USE_OPERAND_P;
basic_block sinkbb;
use_operand_p use_p;
{
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
/* A killing definition is not a use. */
if ((gimple_has_lhs (use_stmt)
basic_block found = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_vuse (stmt))
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
basic_block bb = gimple_bb (use_stmt);
/* For PHI nodes the block we know sth about
is the incoming block with the use. */
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_stmt_iterator togsi;
if (!statement_sink_location (stmt, bb, &togsi))
{
imm_use_iterator iter;
use_operand_p use_p;
- gimple vuse_stmt;
+ gimple *vuse_stmt;
FOR_EACH_IMM_USE_STMT (vuse_stmt, iter, gimple_vdef (stmt))
if (gimple_code (vuse_stmt) != GIMPLE_PHI)
/* Any of the corresponding pointers for querying alias oracle. */
tree ptr;
/* Statement for delayed length computation. */
- gimple stmt;
+ gimple *stmt;
/* Pointer to '\0' if known, if NULL, it can be computed as
ptr + length. */
tree endptr;
*x = '\0' store that could be removed if it is immediately overwritten. */
struct laststmt_struct
{
- gimple stmt;
+ gimple *stmt;
tree len;
int stridx;
} laststmt;
HOST_WIDE_INT off = 0;
for (i = 0; i < 5; i++)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (e);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (e);
if (!is_gimple_assign (def_stmt)
|| gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR)
return 0;
if (si->stmt)
{
- gimple stmt = si->stmt, lenstmt;
+ gimple *stmt = si->stmt, *lenstmt;
bool with_bounds = gimple_call_with_bounds_p (stmt);
tree callee, lhs, fn, tem;
location_t loc;
might change due to stores in stmt. */
static bool
-maybe_invalidate (gimple stmt)
+maybe_invalidate (gimple *stmt)
{
strinfo si;
unsigned int i;
return;
while (1)
{
- gimple stmt = SSA_NAME_DEF_STMT (ptr);
+ gimple *stmt = SSA_NAME_DEF_STMT (ptr);
if (!is_gimple_assign (stmt))
return;
ptr = gimple_assign_rhs1 (stmt);
strinfo. */
static void
-adjust_last_stmt (strinfo si, gimple stmt, bool is_strcat)
+adjust_last_stmt (strinfo si, gimple *stmt, bool is_strcat)
{
tree vuse, callee, len;
struct laststmt_struct last = laststmt;
}
else if (TREE_CODE (len) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (len);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (len);
if (!is_gimple_assign (def_stmt)
|| gimple_assign_rhs_code (def_stmt) != PLUS_EXPR
|| gimple_assign_rhs1 (def_stmt) != last.len
{
int idx;
tree src;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
if (lhs == NULL_TREE)
{
int idx;
tree src;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
bool with_bounds = gimple_call_with_bounds_p (stmt);
int idx, didx;
tree src, dst, srclen, len, lhs, args, type, fn, oldlen;
bool success;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
strinfo si, dsi, olddsi, zsi;
location_t loc;
bool with_bounds = gimple_call_with_bounds_p (stmt);
{
int idx, didx;
tree src, dst, len, lhs, oldlen, newlen;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
strinfo si, dsi, olddsi;
bool with_bounds = gimple_call_with_bounds_p (stmt);
if (idx > 0)
{
- gimple def_stmt;
+ gimple *def_stmt;
/* Handle memcpy (x, y, l) where l is strlen (y) + 1. */
si = get_strinfo (idx);
int idx, didx;
tree src, dst, srclen, dstlen, len, lhs, args, type, fn, objsz, endptr;
bool success;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
strinfo si, dsi;
location_t loc;
bool with_bounds = gimple_call_with_bounds_p (stmt);
static void
handle_builtin_malloc (enum built_in_function bcode, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
gcc_assert (get_stridx (lhs) == 0);
int idx = new_stridx (lhs);
static bool
handle_builtin_memset (gimple_stmt_iterator *gsi)
{
- gimple stmt2 = gsi_stmt (*gsi);
+ gimple *stmt2 = gsi_stmt (*gsi);
if (!integer_zerop (gimple_call_arg (stmt2, 1)))
return true;
tree ptr = gimple_call_arg (stmt2, 0);
strinfo si1 = get_strinfo (idx1);
if (!si1)
return true;
- gimple stmt1 = si1->stmt;
+ gimple *stmt1 = si1->stmt;
if (!stmt1 || !is_gimple_call (stmt1))
return true;
tree callee1 = gimple_call_fndecl (stmt1);
unlink_stmt_vdef (stmt2);
if (lhs)
{
- gimple assign = gimple_build_assign (lhs, ptr);
+ gimple *assign = gimple_build_assign (lhs, ptr);
gsi_replace (gsi, assign, false);
}
else
static void
handle_pointer_plus (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_assign_lhs (stmt), off;
int idx = get_stridx (gimple_assign_rhs1 (stmt));
strinfo si, zsi;
zsi = zero_length_string (lhs, si);
else if (TREE_CODE (off) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (off);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (off);
if (gimple_assign_single_p (def_stmt)
&& operand_equal_p (si->length, gimple_assign_rhs1 (def_stmt), 0))
zsi = zero_length_string (lhs, si);
{
int idx = -1;
strinfo si = NULL;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree ssaname = NULL_TREE, lhs = gimple_assign_lhs (stmt);
if (TREE_CODE (lhs) == MEM_REF
static bool
strlen_optimize_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_call (stmt))
{
been invalidated. */
static void
-do_invalidate (basic_block dombb, gimple phi, bitmap visited, int *count)
+do_invalidate (basic_block dombb, gimple *phi, bitmap visited, int *count)
{
unsigned int i, n = gimple_phi_num_args (phi);
for (i = 0; i < n; i++)
{
tree vuse = gimple_phi_arg_def (phi, i);
- gimple stmt = SSA_NAME_DEF_STMT (vuse);
+ gimple *stmt = SSA_NAME_DEF_STMT (vuse);
basic_block bb = gimple_bb (stmt);
if (bb == NULL
|| bb == dombb
/* A map mapping call statements to per-stmt variables for uses
and clobbers specific to the call. */
-static hash_map<gimple, varinfo_t> *call_stmt_vars;
+static hash_map<gimple *, varinfo_t> *call_stmt_vars;
/* Lookup or create the variable for the call statement CALL. */
when building alias sets and computing alias grouping heuristics. */
static void
-find_func_aliases (struct function *fn, gimple origt)
+find_func_aliases (struct function *fn, gimple *origt)
{
- gimple t = origt;
+ gimple *t = origt;
auto_vec<ce_s, 16> lhsc;
auto_vec<ce_s, 16> rhsc;
struct constraint_expr *c;
IPA constraint builder. */
static void
-find_func_clobbers (struct function *fn, gimple origt)
+find_func_clobbers (struct function *fn, gimple *origt)
{
- gimple t = origt;
+ gimple *t = origt;
auto_vec<ce_s, 16> lhsc;
auto_vec<ce_s, 16> rhsc;
varinfo_t fi;
constraints.create (8);
varmap.create (8);
vi_for_tree = new hash_map<tree, varinfo_t>;
- call_stmt_vars = new hash_map<gimple, varinfo_t>;
+ call_stmt_vars = new hash_map<gimple *, varinfo_t>;
memset (&stats, 0, sizeof (stats));
shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
find_func_aliases (cfun, stmt);
}
base zero. */
static bool
-visit_loadstore (gimple, tree base, tree ref, void *clique_)
+visit_loadstore (gimple *, tree base, tree ref, void *clique_)
{
unsigned short clique = (uintptr_t)clique_;
if (TREE_CODE (base) == MEM_REF
{
/* Now look at possible dereferences of ptr. */
imm_use_iterator ui;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
{
/* ??? Calls and asms. */
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
walk_stmt_load_store_ops (stmt, (void *)(uintptr_t)clique,
visit_loadstore, visit_loadstore);
}
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
find_func_aliases (func, stmt);
find_func_clobbers (func, stmt);
used SSA_NAMEs. */
static bool
-stmt_local_def (gimple stmt)
+stmt_local_def (gimple *stmt)
{
basic_block bb, def_bb;
imm_use_iterator iter;
static void
gsi_advance_fw_nondebug_nonlocal (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
while (true)
{
/* Update BB_DEP_BB, given the dependencies in STMT. */
static void
-stmt_update_dep_bb (gimple stmt)
+stmt_update_dep_bb (gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use;
unsigned int first = bitmap_first_set_bit (e->bbs);
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, first);
int size = 0;
- gimple stmt;
+ gimple *stmt;
tree arg;
unsigned int s;
bitmap_iterator bs;
{
unsigned int i, first1, first2;
gimple_stmt_iterator gsi1, gsi2;
- gimple s1, s2;
+ gimple *s1, *s2;
basic_block bb1, bb2;
if (e1->hashval != e2->hashval)
for (gimple_stmt_iterator i = gsi_last_bb (bb); !gsi_end_p (i);
gsi_prev_nondebug (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
if (gimple_vdef (stmt) == NULL_TREE)
continue;
gimple_bb (s2) are members of SAME_SUCC. */
static bool
-gimple_equal_p (same_succ same_succ, gimple s1, gimple s2)
+gimple_equal_p (same_succ same_succ, gimple *s1, gimple *s2)
{
unsigned int i;
tree lhs1, lhs2;
gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
bool *vuse_escaped)
{
- gimple stmt;
+ gimple *stmt;
tree lvuse;
while (true)
while (!gsi_end_p (gsi1) && !gsi_end_p (gsi2))
{
- gimple stmt1 = gsi_stmt (gsi1);
- gimple stmt2 = gsi_stmt (gsi2);
+ gimple *stmt1 = gsi_stmt (gsi1);
+ gimple *stmt2 = gsi_stmt (gsi2);
/* What could be better than this here is to blacklist the bb
containing the stmt, when encountering the stmt f.i. in
bb_has_non_vop_phi (basic_block bb)
{
gimple_seq phis = phi_nodes (bb);
- gimple phi;
+ gimple *phi;
if (phis == NULL)
return false;
defs. */
static void
-update_debug_stmt (gimple stmt)
+update_debug_stmt (gimple *stmt)
{
use_operand_p use_p;
ssa_op_iter oi;
FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, oi, SSA_OP_USE)
{
tree name = USE_FROM_PTR (use_p);
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
basic_block bbdef = gimple_bb (def_stmt);
if (bbdef == NULL || bbuse == bbdef
|| dominated_by_p (CDI_DOMINATORS, bbuse, bbdef))
EXECUTE_IF_SET_IN_BITMAP (update_bbs, 0, i, bi)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
bb = BASIC_BLOCK_FOR_FN (cfun, i);
is available. */
static inline bool
-ter_is_replaceable_p (gimple stmt)
+ter_is_replaceable_p (gimple *stmt)
{
if (ssa_is_replaceable_p (stmt))
{
use_operand_p use_p;
tree def;
- gimple use_stmt;
+ gimple *use_stmt;
location_t locus1, locus2;
tree block1, block2;
/* Create an expression entry for a replaceable expression. */
static void
-process_replaceable (temp_expr_table *tab, gimple stmt, int call_cnt)
+process_replaceable (temp_expr_table *tab, gimple *stmt, int call_cnt)
{
tree var, def, basevar;
int version;
walk_stmt_load_store_addr_ops. */
static bool
-find_ssaname_in_store (gimple, tree, tree t, void *data)
+find_ssaname_in_store (gimple *, tree, tree t, void *data)
{
return walk_tree (&t, find_ssaname, data, NULL) != NULL_TREE;
}
find_replaceable_in_bb (temp_expr_table *tab, basic_block bb)
{
gimple_stmt_iterator bsi;
- gimple stmt;
+ gimple *stmt;
tree def, use, fndecl;
int partition;
var_map map = tab->map;
assignments which we cannot expand correctly. */
if (gimple_vdef (stmt))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (use);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (use);
while (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == SSA_NAME)
def_stmt
/* Array to record value-handles per SSA_NAME. */
vec<tree> ssa_name_values;
-typedef tree (pfn_simplify) (gimple, gimple, class avail_exprs_stack *);
+typedef tree (pfn_simplify) (gimple *, gimple *, class avail_exprs_stack *);
/* Set the value for the SSA name NAME to VALUE. */
BB. If no such ASSERT_EXPR is found, return OP. */
static tree
-lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
+lhs_of_dominating_assert (tree op, basic_block bb, gimple *stmt)
{
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
use_operand_p use_p;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
May return NULL_TREE if no simplification is possible. */
static tree
-fold_assignment_stmt (gimple stmt)
+fold_assignment_stmt (gimple *stmt)
{
enum tree_code subcode = gimple_assign_rhs_code (stmt);
a context sensitive equivalence which may help us simplify
later statements in E->dest. */
-static gimple
+static gimple *
record_temporary_equivalences_from_stmts_at_dest (edge e,
const_and_copies *const_and_copies,
avail_exprs_stack *avail_exprs_stack,
pfn_simplify simplify,
bool backedge_seen)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
gimple_stmt_iterator gsi;
int max_stmt_count;
necessarily valid. We use this callback rather than the ones provided by
DOM/VRP to achieve that effect. */
static tree
-dummy_simplify (gimple stmt1 ATTRIBUTE_UNUSED, gimple stmt2 ATTRIBUTE_UNUSED,
+dummy_simplify (gimple *stmt1 ATTRIBUTE_UNUSED, gimple *stmt2 ATTRIBUTE_UNUSED,
class avail_exprs_stack *avail_exprs_stack ATTRIBUTE_UNUSED)
{
return NULL_TREE;
static tree
simplify_control_stmt_condition (edge e,
- gimple stmt,
+ gimple *stmt,
class avail_exprs_stack *avail_exprs_stack,
gcond *dummy_cond,
pfn_simplify simplify,
for (gimple_stmt_iterator si = gsi;
i * 4 <= alloc_count * 3 && !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!is_gimple_debug (stmt))
break;
i++;
duplicates in FEWVARS. */
for (gimple_stmt_iterator si = gsi; !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!is_gimple_debug (stmt))
break;
for (gimple_stmt_iterator si = gsi_last_bb (bb);
!gsi_end_p (si); gsi_prev (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!is_gimple_debug (stmt))
continue;
{
basic_block bb = taken_edge->dest;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
tree cond;
/* The key property of these blocks is that they need not be duplicated
bool seen_loop_phi)
{
tree var = SSA_NAME_VAR (expr);
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
basic_block var_bb = gimple_bb (def_stmt);
if (var == NULL || var_bb == NULL)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* Do not count empty statements and labels. */
if (gimple_code (stmt) != GIMPLE_NOP
&& gimple_code (stmt) != GIMPLE_LABEL
/* Now walk each statement recording any context sensitive
temporary equivalences we can detect. */
- gimple stmt
+ gimple *stmt
= record_temporary_equivalences_from_stmts_at_dest (e, const_and_copies,
avail_exprs_stack,
simplify,
bool handle_dominating_asserts,
class const_and_copies *const_and_copies,
class avail_exprs_stack *avail_exprs_stack,
- tree (*simplify) (gimple, gimple,
+ tree (*simplify) (gimple *, gimple *,
class avail_exprs_stack *))
{
bitmap visited = BITMAP_ALLOC (NULL);
extern void thread_across_edge (gcond *, edge, bool,
const_and_copies *,
avail_exprs_stack *,
- tree (*) (gimple, gimple, avail_exprs_stack *));
+ tree (*) (gimple *, gimple *,
+ avail_exprs_stack *));
#endif /* GCC_TREE_SSA_THREADEDGE_H */
static bool
bb_ends_with_multiway_branch (basic_block bb ATTRIBUTE_UNUSED)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
return true;
if (stmt && gimple_code (stmt) == GIMPLE_GOTO
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple stmt;
+ gimple *stmt;
/* If the block does not end with a COND_EXPR or SWITCH_EXPR
then there is nothing to do. */
/* Walk over the PHI nodes, unpropagating values. */
for (gsi = gsi_start (phis) ; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
tree arg = PHI_ARG_DEF (phi, e->dest_idx);
tree res = PHI_RESULT (phi);
warn_uninit (enum opt_code wc, tree t, tree expr, tree var,
const char *gmsgid, void *data, location_t phiarg_loc)
{
- gimple context = (gimple) data;
+ gimple *context = (gimple *) data;
location_t location, cfun_loc;
expanded_location xloc, floc;
single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)), bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
use_operand_p use_p;
ssa_op_iter op_iter;
tree use;
redundant. Can be enhanced to be more general. */
static bool
-can_skip_redundant_opnd (tree opnd, gimple phi)
+can_skip_redundant_opnd (tree opnd, gimple *phi)
{
- gimple op_def;
+ gimple *op_def;
tree phi_def;
int i, n;
pred_chain t_chain = vNULL;
for (j = 0; j < one_cd_chain.length (); j++)
{
- gimple cond_stmt;
+ gimple *cond_stmt;
gimple_stmt_iterator gsi;
basic_block guard_bb;
pred_info one_pred;
static void
collect_phi_def_edges (gphi *phi, basic_block cd_root,
vec<edge> *edges,
- hash_set<gimple> *visited_phis)
+ hash_set<gimple *> *visited_phis)
{
size_t i, n;
edge opnd_edge;
}
else
{
- gimple def = SSA_NAME_DEF_STMT (opnd);
+ gimple *def = SSA_NAME_DEF_STMT (opnd);
if (gimple_code (def) == GIMPLE_PHI
&& dominated_by_p (CDI_DOMINATORS,
if (!cd_root)
return false;
- hash_set<gimple> visited_phis;
+ hash_set<gimple *> visited_phis;
collect_phi_def_edges (phi, cd_root, &def_edges, &visited_phis);
n = def_edges.length ();
/* Dumps the predicates (PREDS) for USESTMT. */
static void
-dump_predicates (gimple usestmt, pred_chain_union preds,
+dump_predicates (gimple *usestmt, pred_chain_union preds,
const char* msg)
{
size_t i, j;
/* Forward declaration. */
static bool
-is_use_properly_guarded (gimple use_stmt,
+is_use_properly_guarded (gimple *use_stmt,
basic_block use_bb,
gphi *phi,
unsigned uninit_opnds,
if (is_value_included_in (flag_arg, boundary_cst, cmp_code))
{
tree opnd;
- gimple opnd_def;
+ gimple *opnd_def;
/* Now that we know that this undefined edge is not
pruned. If the operand is defined by another phi,
hash_set<gphi *> *visited_phis)
{
unsigned int i, n;
- gimple flag_def = 0;
+ gimple *flag_def = 0;
tree boundary_cst = 0;
enum tree_code cmp_code;
bool swap_cond = false;
if (!is_neq_zero_form_p (*a_pred))
continue;
- gimple def_stmt = SSA_NAME_DEF_STMT (a_pred->pred_lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (a_pred->pred_lhs);
if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
continue;
if (gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR)
size_t i, j, n;
bool simplified = false;
pred_chain_union s_preds = vNULL;
- gimple def_stmt;
+ gimple *def_stmt;
n = preds->length ();
for (i = 0; i < n; i++)
/* This function simplifies predicates in PREDS. */
static void
-simplify_preds (pred_chain_union *preds, gimple use_or_def, bool is_use)
+simplify_preds (pred_chain_union *preds, gimple *use_or_def, bool is_use)
{
size_t i, n;
bool changed = false;
CMP_ASSIGN with comparison rhs. */
static pred_info
-get_pred_info_from_cmp (gimple cmp_assign)
+get_pred_info_from_cmp (gimple *cmp_assign)
{
pred_info n_pred;
n_pred.pred_lhs = gimple_assign_rhs1 (cmp_assign);
will be updated to that value. */
static bool
-is_degenerated_phi (gimple phi, pred_info *pred_p)
+is_degenerated_phi (gimple *phi, pred_info *pred_p)
{
int i, n;
tree op0;
- gimple def0;
+ gimple *def0;
pred_info pred0;
n = gimple_phi_num_args (phi);
for (i = 1; i < n; ++i)
{
- gimple def;
+ gimple *def;
pred_info pred;
tree op = gimple_phi_arg_def (phi, i);
return;
}
- gimple def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
if (gimple_code (def_stmt) == GIMPLE_PHI
&& is_degenerated_phi (def_stmt, &pred))
return;
}
- gimple def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
if (gimple_code (def_stmt) == GIMPLE_ASSIGN)
and_or_code = gimple_assign_rhs_code (def_stmt);
if (and_or_code != BIT_IOR_EXPR
/* Normalize predicate chains PREDS and returns the normalized one. */
static pred_chain_union
-normalize_preds (pred_chain_union preds, gimple use_or_def, bool is_use)
+normalize_preds (pred_chain_union preds, gimple *use_or_def, bool is_use)
{
pred_chain_union norm_preds = vNULL;
size_t n = preds.length ();
VISITED_PHIS is a pointer set of phis being visited. */
static bool
-is_use_properly_guarded (gimple use_stmt,
+is_use_properly_guarded (gimple *use_stmt,
basic_block use_bb,
gphi *phi,
unsigned uninit_opnds,
function. ADDED_TO_WORKLIST is the pointer set tracking
if the new phi is already in the worklist. */
-static gimple
+static gimple *
find_uninit_use (gphi *phi, unsigned uninit_opnds,
vec<gphi *> *worklist,
hash_set<gphi *> *added_to_worklist)
{
tree phi_result;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
pred_chain_union def_preds = vNULL;
- gimple ret = NULL;
+ gimple *ret = NULL;
phi_result = gimple_phi_result (phi);
hash_set<gphi *> *added_to_worklist)
{
unsigned uninit_opnds;
- gimple uninit_use_stmt = 0;
+ gimple *uninit_use_stmt = 0;
tree uninit_op;
int phiarg_index;
location_t loc;
copying and removing. */
void
-gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
+gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
{
if (MAY_HAVE_DEBUG_STMTS)
{
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple stmt;
- gimple def_stmt = NULL;
+ gimple *stmt;
+ gimple *def_stmt = NULL;
int usecount = 0;
tree value = NULL;
void
insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
ssa_op_iter op_iter;
def_operand_p def_p;
/* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
void
-reset_debug_uses (gimple stmt)
+reset_debug_uses (gimple *stmt)
{
ssa_op_iter op_iter;
def_operand_p def_p;
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
if (!MAY_HAVE_DEBUG_STMTS)
return;
{
bool remove_now = true;
tree var = ssa_name (j);
- gimple stmt;
+ gimple *stmt;
imm_use_iterator uit;
FOR_EACH_IMM_USE_STMT (stmt, uit, var)
if (remove_now)
{
- gimple def = SSA_NAME_DEF_STMT (var);
+ gimple *def = SSA_NAME_DEF_STMT (var);
gimple_stmt_iterator gsi = gsi_for_stmt (def);
if (gimple_code (def) == GIMPLE_PHI)
static bool
verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
- gimple stmt, bool is_virtual)
+ gimple *stmt, bool is_virtual)
{
if (verify_ssa_name (ssa_name, is_virtual))
goto err;
static bool
verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
- gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
+ gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
{
bool err = false;
tree ssa_name = USE_FROM_PTR (use_p);
tree name = ssa_name (i);
if (name)
{
- gimple stmt;
+ gimple *stmt;
TREE_VISITED (name) = 0;
verify_ssa_name (name, virtual_operand_p (name));
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
use_operand_p use_p;
if (check_modified_stmt && gimple_modified_p (stmt))
bool
ssa_undefined_value_p (tree t, bool partial)
{
- gimple def_stmt;
+ gimple *def_stmt;
tree var = SSA_NAME_VAR (t);
if (!var)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
enum gimple_code code = gimple_code (stmt);
tree decl;
FOR_EACH_BB_FN (bb, cfun)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* Re-write TARGET_MEM_REFs of symbols we want to
rewrite into SSA form. */
? REALPART_EXPR : IMAGPART_EXPR,
TREE_TYPE (other),
TREE_OPERAND (lhs, 0));
- gimple load = gimple_build_assign (other, lrhs);
+ gimple *load = gimple_build_assign (other, lrhs);
location_t loc = gimple_location (stmt);
gimple_set_location (load, loc);
gimple_set_vuse (load, gimple_vuse (stmt));
extern void redirect_edge_var_map_destroy (void);
extern edge ssa_redirect_edge (edge, basic_block);
extern void flush_pending_stmts (edge);
-extern void gimple_replace_ssa_lhs (gimple, tree);
+extern void gimple_replace_ssa_lhs (gimple *, tree);
extern tree target_for_debug_bind (tree);
extern void insert_debug_temp_for_var_def (gimple_stmt_iterator *, tree);
extern void insert_debug_temps_for_defs (gimple_stmt_iterator *);
-extern void reset_debug_uses (gimple);
+extern void reset_debug_uses (gimple *);
extern void release_defs_bitset (bitmap toremove);
extern void verify_ssa (bool, bool);
extern void init_tree_ssa (struct function *);
used without a preceding definition). */
tree
-make_ssa_name_fn (struct function *fn, tree var, gimple stmt)
+make_ssa_name_fn (struct function *fn, tree var, gimple *stmt)
{
tree t;
use_operand_p imm;
statement STMT in function FN. */
tree
-copy_ssa_name_fn (struct function *fn, tree name, gimple stmt)
+copy_ssa_name_fn (struct function *fn, tree name, gimple *stmt)
{
tree new_name;
in function FN. */
tree
-duplicate_ssa_name_fn (struct function *fn, tree name, gimple stmt)
+duplicate_ssa_name_fn (struct function *fn, tree name, gimple *stmt)
{
tree new_name = copy_ssa_name_fn (fn, name, stmt);
if (POINTER_TYPE_P (TREE_TYPE (name)))
/* Release all the SSA_NAMEs created by STMT. */
void
-release_defs (gimple stmt)
+release_defs (gimple *stmt)
{
tree def;
ssa_op_iter iter;
extern void init_ssanames (struct function *, int);
extern void fini_ssanames (void);
extern void ssanames_print_statistics (void);
-extern tree make_ssa_name_fn (struct function *, tree, gimple);
+extern tree make_ssa_name_fn (struct function *, tree, gimple *);
extern void release_ssa_name_fn (struct function *, tree);
extern bool get_ptr_info_alignment (struct ptr_info_def *, unsigned int *,
unsigned int *);
unsigned int);
extern struct ptr_info_def *get_ptr_info (tree);
-extern tree copy_ssa_name_fn (struct function *, tree, gimple);
+extern tree copy_ssa_name_fn (struct function *, tree, gimple *);
extern void duplicate_ssa_name_ptr_info (tree, struct ptr_info_def *);
-extern tree duplicate_ssa_name_fn (struct function *, tree, gimple);
+extern tree duplicate_ssa_name_fn (struct function *, tree, gimple *);
extern void duplicate_ssa_name_range_info (tree, enum value_range_type,
struct range_info_def *);
extern void reset_flow_sensitive_info (tree);
-extern void release_defs (gimple);
+extern void release_defs (gimple *);
extern void replace_ssa_name_symbol (tree, tree);
in function cfun. */
static inline tree
-make_ssa_name (tree var, gimple stmt = NULL)
+make_ssa_name (tree var, gimple *stmt = NULL)
{
return make_ssa_name_fn (cfun, var, stmt);
}
statement STMT in function cfun. */
static inline tree
-copy_ssa_name (tree var, gimple stmt = NULL)
+copy_ssa_name (tree var, gimple *stmt = NULL)
{
return copy_ssa_name_fn (cfun, var, stmt);
}
in function cfun. */
static inline tree
-duplicate_ssa_name (tree var, gimple stmt)
+duplicate_ssa_name (tree var, gimple *stmt)
{
return duplicate_ssa_name_fn (cfun, var, stmt);
}
in function cfun. Arrange so that it uses NAME in dumps. */
static inline tree
-make_temp_ssa_name (tree type, gimple stmt, const char *name)
+make_temp_ssa_name (tree type, gimple *stmt, const char *name)
{
tree ssa_name;
gcc_checking_assert (TYPE_P (type));
bool gpr_p)
{
tree lhs, orig_lhs;
- gimple stmt;
+ gimple *stmt;
unsigned HOST_WIDE_INT ret = 0, val, counter_val;
unsigned int max_size;
for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree use;
ssa_op_iter iter;
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree callee, ap;
if (!is_gimple_call (stmt))
!gsi_end_p (i) && !va_list_escapes;
gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
/* Don't look at __builtin_va_{start,end}, they are ok. */
if (is_gimple_call (stmt))
/* Return true if STMT is IFN_VA_ARG. */
static bool
-gimple_call_ifn_va_arg_p (gimple stmt)
+gimple_call_ifn_va_arg_p (gimple *stmt)
{
return (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt)
FOR_EACH_BB_FN (bb, fun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree ap, expr, lhs, type;
gimple_seq pre = NULL, post = NULL;
/* The first load statement that loads a temporary from a new static array.
*/
- gimple arr_ref_first;
+ gimple *arr_ref_first;
/* The last load statement that loads a temporary from a new static array. */
- gimple arr_ref_last;
+ gimple *arr_ref_last;
/* String reason why the case wasn't a good candidate that is written to the
dump file, if there is one. */
gphi *phi, tree tidx, struct switch_conv_info *info)
{
tree name, cst;
- gimple load;
+ gimple *load;
gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
location_t loc = gimple_location (swtch);
{
tree arr_index_type;
tree tidx, sub, utype;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
gphi_iterator gpi;
int i;
FOR_EACH_BB_FN (bb, fun)
{
const char *failure_reason;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
{
if (dump_file)
containing the value of EXPR at GSI. */
static tree
-independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
+independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi)
{
basic_block bb, call_bb, at_bb;
edge e;
find_tail_calls (basic_block bb, struct tailcall **ret)
{
tree ass_var = NULL_TREE, ret_var, func, param;
- gimple stmt;
+ gimple *stmt;
gcall *call = NULL;
gimple_stmt_iterator gsi, agsi;
bool tail_recursion;
eliminate_tail_call (struct tailcall *t)
{
tree param, rslt;
- gimple stmt, call;
+ gimple *stmt, *call;
tree arg;
size_t idx;
basic_block bb, first;
gphi *phi;
gphi_iterator gpi;
gimple_stmt_iterator gsi;
- gimple orig_stmt;
+ gimple *orig_stmt;
stmt = orig_stmt = gsi_stmt (t->call_gsi);
bb = gsi_bb (t->call_gsi);
gsi_next (&gsi);
while (!gsi_end_p (gsi))
{
- gimple t = gsi_stmt (gsi);
+ gimple *t = gsi_stmt (gsi);
/* Do not remove the return statement, so that redirect_edge_and_branch
sees how the block ends. */
if (gimple_code (t) == GIMPLE_RETURN)
bool changed = false;
basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
tree param;
- gimple stmt;
+ gimple *stmt;
edge_iterator ei;
if (!suitable_for_tail_opt_p ())
types. */
tree
-vect_get_smallest_scalar_type (gimple stmt, HOST_WIDE_INT *lhs_size_unit,
+vect_get_smallest_scalar_type (gimple *stmt, HOST_WIDE_INT *lhs_size_unit,
HOST_WIDE_INT *rhs_size_unit)
{
tree scalar_type = gimple_expr_type (stmt);
if (STMT_VINFO_GROUPED_ACCESS (stmtinfo_a)
|| STMT_VINFO_GROUPED_ACCESS (stmtinfo_b))
{
- gimple earlier_stmt;
+ gimple *earlier_stmt;
earlier_stmt = get_earlier_stmt (DR_STMT (dra), DR_STMT (drb));
if (DR_IS_WRITE
(STMT_VINFO_DATA_REF (vinfo_for_stmt (earlier_stmt))))
corresponding scalar load, and vector store can be only after its
corresponding scalar store. So the order of the acceses is preserved in
case the load is before the store. */
- gimple earlier_stmt = get_earlier_stmt (DR_STMT (dra), DR_STMT (drb));
+ gimple *earlier_stmt = get_earlier_stmt (DR_STMT (dra), DR_STMT (drb));
if (DR_IS_READ (STMT_VINFO_DATA_REF (vinfo_for_stmt (earlier_stmt))))
{
/* That only holds for load-store pairs taking part in vectorization. */
static bool
vect_compute_data_ref_alignment (struct data_reference *dr)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = NULL;
FOR_EACH_VEC_ELT (datarefs, i, dr)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!STMT_VINFO_RELEVANT_P (stmt_info))
static bool
vector_alignment_reachable_p (struct data_reference *dr)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
unsigned int *outside_cost,
stmt_vector_for_cost *body_cost_vec)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
int nunits = TYPE_VECTOR_SUBPARTS (STMT_VINFO_VECTYPE (stmt_info));
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
vect_peel_info elem = *slot;
int save_misalignment, dummy;
unsigned int inside_cost = 0, outside_cost = 0, i;
- gimple stmt = DR_STMT (elem->dr);
+ gimple *stmt = DR_STMT (elem->dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
vec<data_reference_p> datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
bool do_peeling = false;
bool do_versioning = false;
bool stat;
- gimple stmt;
+ gimple *stmt;
stmt_vec_info stmt_info;
unsigned int npeel = 0;
bool all_misalignments_unknown = true;
unsigned max_peel = npeel;
if (max_peel == 0)
{
- gimple dr_stmt = DR_STMT (dr0);
+ gimple *dr_stmt = DR_STMT (dr0);
stmt_vec_info vinfo = vinfo_for_stmt (dr_stmt);
tree vtype = STMT_VINFO_VECTYPE (vinfo);
max_peel = TYPE_VECTOR_SUBPARTS (vtype) - 1;
if (!supportable_dr_alignment)
{
- gimple stmt;
+ gimple *stmt;
int mask;
tree vectype;
if (do_versioning)
{
- vec<gimple> may_misalign_stmts
+ vec<gimple *> may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
- gimple stmt;
+ gimple *stmt;
/* It can now be assumed that the data references in the statements
in LOOP_VINFO_MAY_MISALIGN_STMTS will be aligned in the version
tree step = DR_STEP (dr);
tree scalar_type = TREE_TYPE (DR_REF (dr));
HOST_WIDE_INT type_size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type));
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) == stmt)
{
/* First stmt in the interleaving chain. Check the chain. */
- gimple next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
+ gimple *next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
struct data_reference *data_ref = dr;
unsigned int count = 1;
tree prev_init = DR_INIT (data_ref);
- gimple prev = stmt;
+ gimple *prev = stmt;
HOST_WIDE_INT diff, gaps = 0;
while (next)
if (!vect_analyze_group_access_1 (dr))
{
/* Dissolve the group if present. */
- gimple next, stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (dr)));
+ gimple *next;
+ gimple *stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (dr)));
while (stmt)
{
stmt_vec_info vinfo = vinfo_for_stmt (stmt);
{
tree step = DR_STEP (dr);
tree scalar_type = TREE_TYPE (DR_REF (dr));
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = NULL;
FOR_EACH_VEC_ELT (may_alias_ddrs, i, ddr)
{
struct data_reference *dr_a, *dr_b;
- gimple dr_group_first_a, dr_group_first_b;
+ gimple *dr_group_first_a, *dr_group_first_b;
tree segment_length_a, segment_length_b;
- gimple stmt_a, stmt_b;
+ gimple *stmt_a, *stmt_b;
dr_a = DDR_A (ddr);
stmt_a = DR_STMT (DDR_A (ddr));
or scatter store and if so, return a builtin decl for that operation. */
tree
-vect_check_gather_scatter (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
+vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo, tree *basep,
tree *offp, int *scalep)
{
HOST_WIDE_INT scale = 1, pbitpos, pbitsize;
&& integer_zerop (TREE_OPERAND (base, 1))
&& !expr_invariant_in_loop_p (loop, TREE_OPERAND (base, 0)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
+ gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
if (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
base = TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0);
if (TREE_CODE (off) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (off);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (off);
if (expr_invariant_in_loop_p (loop, off))
return NULL_TREE;
for (gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
++*n_stmts;
bb = BB_VINFO_BB (bb_vinfo);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
++*n_stmts;
FOR_EACH_VEC_ELT (datarefs, i, dr)
{
- gimple stmt;
+ gimple *stmt;
stmt_vec_info stmt_info;
tree base, offset, init;
enum { SG_NONE, GATHER, SCATTER } gatherscatter = SG_NONE;
off = TREE_OPERAND (off, 0);
if (TREE_CODE (off) == SSA_NAME)
{
- gimple def = SSA_NAME_DEF_STMT (off);
+ gimple *def = SSA_NAME_DEF_STMT (off);
tree reft = TREE_TYPE (DR_REF (newdr));
if (is_gimple_call (def)
&& gimple_call_internal_p (def)
FORNOW: We are only handling array accesses with step 1. */
tree
-vect_create_addr_base_for_vector_ref (gimple stmt,
+vect_create_addr_base_for_vector_ref (gimple *stmt,
gimple_seq *new_stmt_list,
tree offset,
struct loop *loop,
4. Return the pointer. */
tree
-vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
+vect_create_data_ref_ptr (gimple *stmt, tree aggr_type, struct loop *at_loop,
tree offset, tree *initial_address,
- gimple_stmt_iterator *gsi, gimple *ptr_incr,
+ gimple_stmt_iterator *gsi, gimple **ptr_incr,
bool only_init, bool *inv_p, tree byte_offset)
{
const char *base_name;
gimple_stmt_iterator incr_gsi;
bool insert_after;
tree indx_before_incr, indx_after_incr;
- gimple incr;
+ gimple *incr;
tree step;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
/* Likewise for any of the data references in the stmt group. */
else if (STMT_VINFO_GROUP_SIZE (stmt_info) > 1)
{
- gimple orig_stmt = STMT_VINFO_GROUP_FIRST_ELEMENT (stmt_info);
+ gimple *orig_stmt = STMT_VINFO_GROUP_FIRST_ELEMENT (stmt_info);
do
{
stmt_vec_info sinfo = vinfo_for_stmt (orig_stmt);
*/
tree
-bump_vector_ptr (tree dataref_ptr, gimple ptr_incr, gimple_stmt_iterator *gsi,
- gimple stmt, tree bump)
+bump_vector_ptr (tree dataref_ptr, gimple *ptr_incr, gimple_stmt_iterator *gsi,
+ gimple *stmt, tree bump)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
void
vect_permute_store_chain (vec<tree> dr_chain,
unsigned int length,
- gimple stmt,
+ gimple *stmt,
gimple_stmt_iterator *gsi,
vec<tree> *result_chain)
{
tree vect1, vect2, high, low;
- gimple perm_stmt;
+ gimple *perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
tree perm_mask_low, perm_mask_high;
tree data_ref;
Return value - the result of the loop-header phi node. */
tree
-vect_setup_realignment (gimple stmt, gimple_stmt_iterator *gsi,
+vect_setup_realignment (gimple *stmt, gimple_stmt_iterator *gsi,
tree *realignment_token,
enum dr_alignment_support alignment_support_scheme,
tree init_addr,
edge pe = NULL;
tree scalar_dest = gimple_assign_lhs (stmt);
tree vec_dest;
- gimple inc;
+ gimple *inc;
tree ptr;
tree data_ref;
basic_block new_bb;
static void
vect_permute_load_chain (vec<tree> dr_chain,
unsigned int length,
- gimple stmt,
+ gimple *stmt,
gimple_stmt_iterator *gsi,
vec<tree> *result_chain)
{
tree data_ref, first_vect, second_vect;
tree perm_mask_even, perm_mask_odd;
tree perm3_mask_low, perm3_mask_high;
- gimple perm_stmt;
+ gimple *perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
unsigned int i, j, log_length = exact_log2 (length);
unsigned nelt = TYPE_VECTOR_SUBPARTS (vectype);
static bool
vect_shift_permute_load_chain (vec<tree> dr_chain,
unsigned int length,
- gimple stmt,
+ gimple *stmt,
gimple_stmt_iterator *gsi,
vec<tree> *result_chain)
{
tree vect[3], vect_shift[3], data_ref, first_vect, second_vect;
tree perm2_mask1, perm2_mask2, perm3_mask;
tree select_mask, shift1_mask, shift2_mask, shift3_mask, shift4_mask;
- gimple perm_stmt;
+ gimple *perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
unsigned int i;
*/
void
-vect_transform_grouped_load (gimple stmt, vec<tree> dr_chain, int size,
+vect_transform_grouped_load (gimple *stmt, vec<tree> dr_chain, int size,
gimple_stmt_iterator *gsi)
{
machine_mode mode;
for each vector to the associated scalar statement. */
void
-vect_record_grouped_load_vectors (gimple stmt, vec<tree> result_chain)
+vect_record_grouped_load_vectors (gimple *stmt, vec<tree> result_chain)
{
- gimple first_stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt));
- gimple next_stmt, new_stmt;
+ gimple *first_stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt));
+ gimple *next_stmt, *new_stmt;
unsigned int i, gap_count;
tree tmp_data_ref;
{
if (!GROUP_SAME_DR_STMT (vinfo_for_stmt (next_stmt)))
{
- gimple prev_stmt =
+ gimple *prev_stmt =
STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt));
- gimple rel_stmt =
+ gimple *rel_stmt =
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (prev_stmt));
while (rel_stmt)
{
vect_supportable_dr_alignment (struct data_reference *dr,
bool check_aligned_accesses)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
machine_mode mode = TYPE_MODE (vectype);
&& expand_vec_cond_expr_p (type, type))
{
tree zero, cst, cond;
- gimple stmt;
+ gimple *stmt;
zero = build_zero_cst (type);
cond = build2 (LT_EXPR, type, op0, zero);
bool all_same = true;
constructor_elt *elt;
tree *cst;
- gimple g;
+ gimple *g;
tree base = NULL_TREE;
optab op;
vector_element (gimple_stmt_iterator *gsi, tree vect, tree idx, tree *ptmpvec)
{
tree vect_type, vect_elt_type;
- gimple asgn;
+ gimple *asgn;
tree tmpvec;
tree arraytype;
bool need_asgn = true;
simplification by looking through intermediate vector results. */
if (TREE_CODE (vect) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vect);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vect);
if (is_gimple_assign (def_stmt)
&& (gimple_assign_rhs_code (def_stmt) == VECTOR_CST
|| gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR))
if (TREE_CODE (mask) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (mask);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (mask);
if (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == VECTOR_CST)
mask = gimple_assign_rhs1 (def_stmt);
if (VECTOR_INTEGER_TYPE_P (TREE_TYPE (rhs2)))
{
tree first;
- gimple def_stmt;
+ gimple *def_stmt;
if ((TREE_CODE (rhs2) == VECTOR_CST
&& (first = uniform_vector_p (rhs2)) != NULL_TREE)
static void
rename_variables_in_bb (basic_block bb, bool rename_from_outer_loop)
{
- gimple stmt;
+ gimple *stmt;
use_operand_p use_p;
ssa_op_iter iter;
edge e;
tree orig_def = ai->from;
tree new_def = ai->to;
imm_use_iterator imm_iter;
- gimple stmt;
+ gimple *stmt;
basic_block bbdef = gimple_bb (SSA_NAME_DEF_STMT (orig_def));
gcc_assert (dom_info_available_p (CDI_DOMINATORS));
transformations. */
static void
-adjust_phi_and_debug_stmts (gimple update_phi, edge e, tree new_def)
+adjust_phi_and_debug_stmts (gimple *update_phi, edge e, tree new_def)
{
tree orig_def = PHI_ARG_DEF_FROM_EDGE (update_phi, e);
set this earlier. Verify the PHI has the same value. */
if (new_name)
{
- gimple phi = SSA_NAME_DEF_STMT (new_name);
+ gimple *phi = SSA_NAME_DEF_STMT (new_name);
gcc_assert (gimple_code (phi) == GIMPLE_PHI
&& gimple_bb (phi) == *new_exit_bb
&& (PHI_ARG_DEF_FROM_EDGE (phi, single_exit (loop))
!gsi_end_p (gsi_from) && !gsi_end_p (gsi_to);
gsi_next (&gsi_from), gsi_next (&gsi_to))
{
- gimple from_phi = gsi_stmt (gsi_from);
- gimple to_phi = gsi_stmt (gsi_to);
+ gimple *from_phi = gsi_stmt (gsi_from);
+ gimple *to_phi = gsi_stmt (gsi_to);
tree from_arg = PHI_ARG_DEF_FROM_EDGE (from_phi, from);
tree to_arg = PHI_ARG_DEF_FROM_EDGE (to_phi, to);
if (TREE_CODE (from_arg) == SSA_NAME
gphi *new_phi = create_phi_node (new_vop, exit_e->dest);
tree vop = PHI_ARG_DEF_FROM_EDGE (phi, EDGE_SUCC (loop->latch, 0));
imm_use_iterator imm_iter;
- gimple stmt;
+ gimple *stmt;
use_operand_p use_p;
add_phi_arg (new_phi, vop, exit_e, UNKNOWN_LOCATION);
source_location
find_loop_location (struct loop *loop)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
basic_block bb;
gimple_stmt_iterator si;
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block bb = loop->header;
- gimple phi;
+ gimple *phi;
gphi_iterator gsi;
/* Analyze phi functions of the loop header. */
tree iters, iters_name;
edge pe;
basic_block new_bb;
- gimple dr_stmt = DR_STMT (dr);
+ gimple *dr_stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (dr_stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
int vectype_align = TYPE_ALIGN (vectype) / BITS_PER_UNIT;
gimple_seq *cond_expr_stmt_list)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
- vec<gimple> may_misalign_stmts
+ vec<gimple *> may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
- gimple ref_stmt;
+ gimple *ref_stmt;
int mask = LOOP_VINFO_PTR_MASK (loop_vinfo);
tree mask_cst;
unsigned int i;
char tmp_name[20];
tree or_tmp_name = NULL_TREE;
tree and_tmp_name;
- gimple and_stmt;
+ gimple *and_stmt;
tree ptrsize_zero;
tree part_cond_expr;
tree addr_base;
tree addr_tmp_name;
tree new_or_tmp_name;
- gimple addr_stmt, or_stmt;
+ gimple *addr_stmt, *or_stmt;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (ref_stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
bool negative = tree_int_cst_compare
stmt_vec_info stmt_info;
int i;
HOST_WIDE_INT dummy;
- gimple stmt, pattern_stmt = NULL;
+ gimple *stmt, *pattern_stmt = NULL;
gimple_seq pattern_def_seq = NULL;
gimple_stmt_iterator pattern_def_si = gsi_none ();
bool analyze_pattern_stmt = false;
gsi_next (&pattern_def_si);
if (pattern_def_seq != NULL)
{
- gimple pattern_def_stmt = NULL;
+ gimple *pattern_def_stmt = NULL;
stmt_vec_info pattern_def_stmt_info = NULL;
while (!gsi_end_p (pattern_def_si))
{
basic_block bb = loop->header;
tree init, step;
- auto_vec<gimple, 64> worklist;
+ auto_vec<gimple *, 64> worklist;
gphi_iterator gsi;
bool double_reduc;
/* Second - identify all reductions and nested cycles. */
while (worklist.length () > 0)
{
- gimple phi = worklist.pop ();
+ gimple *phi = worklist.pop ();
tree def = PHI_RESULT (phi);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
- gimple reduc_stmt;
+ gimple *reduc_stmt;
bool nested_cycle;
if (dump_enabled_p ())
/* Transfer group and reduction information from STMT to its pattern stmt. */
static void
-vect_fixup_reduc_chain (gimple stmt)
+vect_fixup_reduc_chain (gimple *stmt)
{
- gimple firstp = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt));
- gimple stmtp;
+ gimple *firstp = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt));
+ gimple *stmtp;
gcc_assert (!GROUP_FIRST_ELEMENT (vinfo_for_stmt (firstp))
&& GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)));
GROUP_SIZE (vinfo_for_stmt (firstp)) = GROUP_SIZE (vinfo_for_stmt (stmt));
static void
vect_fixup_scalar_cycles_with_patterns (loop_vec_info loop_vinfo)
{
- gimple first;
+ gimple *first;
unsigned i;
FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo), i, first)
gcc_assert (loop->inner && bb->loop_father == loop->inner);
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gimple *phi = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (phi);
loop_vec_info inner_loop_vinfo =
STMT_VINFO_LOOP_VINFO (stmt_info);
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info inner_loop_vinfo =
STMT_VINFO_LOOP_VINFO (stmt_info);
/* bb in current nest. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gimple *phi = gsi_stmt (si);
gimple_set_uid (phi, 0);
set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res, NULL));
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
gimple_set_uid (stmt, 0);
set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res, NULL));
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); )
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
/* We may have broken canonical form by moving a constant
into RHS1 of a commutative op. Fix such occurrences. */
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!is_gimple_assign (stmt) && !is_gimple_call (stmt))
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (STMT_VINFO_IN_PATTERN_P (stmt_info)
&& STMT_VINFO_RELATED_STMT (stmt_info))
if (STMT_VINFO_RELEVANT_P (stmt_info))
{
tree phi_op;
- gimple op_def_stmt;
+ gimple *op_def_stmt;
if (gimple_phi_num_args (phi) != 1)
return false;
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!gimple_clobber_p (stmt)
&& !vect_analyze_stmt (stmt, &need_to_vectorize, NULL))
return false;
STMT is printed with a message MSG. */
static void
-report_vect_op (int msg_type, gimple stmt, const char *msg)
+report_vect_op (int msg_type, gimple *stmt, const char *msg)
{
dump_printf_loc (msg_type, vect_location, "%s", msg);
dump_gimple_stmt (msg_type, TDF_SLIM, stmt, 0);
Return TRUE if a reduction chain was detected. */
static bool
-vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
+vect_is_slp_reduction (loop_vec_info loop_info, gimple *phi,
+ gimple *first_stmt)
{
struct loop *loop = (gimple_bb (phi))->loop_father;
struct loop *vect_loop = LOOP_VINFO_LOOP (loop_info);
enum tree_code code;
- gimple current_stmt = NULL, loop_use_stmt = NULL, first, next_stmt;
+ gimple *current_stmt = NULL, *loop_use_stmt = NULL, *first, *next_stmt;
stmt_vec_info use_stmt_info, current_stmt_info;
tree lhs;
imm_use_iterator imm_iter;
n_out_of_loop_uses = 0;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (gimple_assign_rhs2 (next_stmt) == lhs)
{
tree op = gimple_assign_rhs1 (next_stmt);
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
if (TREE_CODE (op) == SSA_NAME)
def_stmt = SSA_NAME_DEF_STMT (op);
else
{
tree op = gimple_assign_rhs2 (next_stmt);
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
if (TREE_CODE (op) == SSA_NAME)
def_stmt = SSA_NAME_DEF_STMT (op);
"res -= RHS" into "rhs += -RHS" when it seems worthwhile.
*/
-static gimple
-vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
+static gimple *
+vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple *phi,
bool check_reduction, bool *double_reduc,
bool modify, bool need_wrapping_integral_overflow)
{
struct loop *vect_loop = LOOP_VINFO_LOOP (loop_info);
edge latch_e = loop_latch_edge (loop);
tree loop_arg = PHI_ARG_DEF_FROM_EDGE (phi, latch_e);
- gimple def_stmt, def1 = NULL, def2 = NULL;
+ gimple *def_stmt, *def1 = NULL, *def2 = NULL;
enum tree_code orig_code, code;
tree op1, op2, op3 = NULL_TREE, op4 = NULL_TREE;
tree type;
nloop_uses = 0;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, name)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
nloop_uses = 0;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, name)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (flow_bb_inside_loop_p (loop, gimple_bb (use_stmt)))
{
tree rhs = gimple_assign_rhs2 (def_stmt);
tree negrhs = make_ssa_name (TREE_TYPE (rhs));
- gimple negate_stmt = gimple_build_assign (negrhs, NEGATE_EXPR, rhs);
+ gimple *negate_stmt = gimple_build_assign (negrhs, NEGATE_EXPR, rhs);
gimple_stmt_iterator gsi = gsi_for_stmt (def_stmt);
set_vinfo_for_stmt (negate_stmt, new_stmt_vec_info (negate_stmt,
loop_info, NULL));
/* Wrapper around vect_is_simple_reduction_1, that won't modify code
in-place. Arguments as there. */
-static gimple
-vect_is_simple_reduction (loop_vec_info loop_info, gimple phi,
+static gimple *
+vect_is_simple_reduction (loop_vec_info loop_info, gimple *phi,
bool check_reduction, bool *double_reduc,
bool need_wrapping_integral_overflow)
{
in-place if it enables detection of more reductions. Arguments
as there. */
-gimple
-vect_force_simple_reduction (loop_vec_info loop_info, gimple phi,
+gimple *
+vect_force_simple_reduction (loop_vec_info loop_info, gimple *phi,
bool check_reduction, bool *double_reduc,
bool need_wrapping_integral_overflow)
{
/* Return the reduction operand (with index REDUC_INDEX) of STMT. */
static tree
-get_reduction_op (gimple stmt, int reduc_index)
+get_reduction_op (gimple *stmt, int reduc_index)
{
switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
{
enum tree_code code;
optab optab;
tree vectype;
- gimple stmt, orig_stmt;
+ gimple *stmt, *orig_stmt;
tree reduction_op;
machine_mode mode;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
[X, X + S, X + 2*S, X + 3*S]. */
static tree
-get_initial_def_for_induction (gimple iv_phi)
+get_initial_def_for_induction (gimple *iv_phi)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (iv_phi);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
tree new_vec, vec_init, vec_step, t;
tree new_var;
tree new_name;
- gimple init_stmt, new_stmt;
+ gimple *init_stmt, *new_stmt;
gphi *induction_phi;
tree induc_def, vec_def, vec_dest;
tree init_expr, step_expr;
gimple_seq stmts = NULL;
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple exit_phi;
+ gimple *exit_phi;
edge latch_e;
tree loop_arg;
gimple_stmt_iterator si;
exit_phi = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, loop_arg)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
A cost model should help decide between these two schemes. */
tree
-get_initial_def_for_reduction (gimple stmt, tree init_val,
+get_initial_def_for_reduction (gimple *stmt, tree init_val,
tree *adjustment_def)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
tree init_value;
REAL_VALUE_TYPE real_init_val = dconst0;
int int_init_val = 0;
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
gcc_assert (vectype);
nunits = TYPE_VECTOR_SUBPARTS (vectype);
*/
static void
-vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
+vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
int ncopies, enum tree_code reduc_code,
- vec<gimple> reduction_phis,
+ vec<gimple *> reduction_phis,
int reduc_index, bool double_reduc,
slp_tree slp_node)
{
basic_block exit_bb;
tree scalar_dest;
tree scalar_type;
- gimple new_phi = NULL, phi;
+ gimple *new_phi = NULL, *phi;
gimple_stmt_iterator exit_gsi;
tree vec_dest;
tree new_temp = NULL_TREE, new_dest, new_name, new_scalar_dest;
- gimple epilog_stmt = NULL;
+ gimple *epilog_stmt = NULL;
enum tree_code code = gimple_assign_rhs_code (stmt);
- gimple exit_phi;
+ gimple *exit_phi;
tree bitsize;
tree adjustment_def = NULL;
tree vec_initial_def = NULL;
tree orig_name, scalar_result;
imm_use_iterator imm_iter, phi_imm_iter;
use_operand_p use_p, phi_use_p;
- gimple use_stmt, orig_stmt, reduction_phi = NULL;
+ gimple *use_stmt, *orig_stmt, *reduction_phi = NULL;
bool nested_in_vect_loop = false;
- auto_vec<gimple> new_phis;
- auto_vec<gimple> inner_phis;
+ auto_vec<gimple *> new_phis;
+ auto_vec<gimple *> inner_phis;
enum vect_def_type dt = vect_unknown_def_type;
int j, i;
auto_vec<tree> scalar_results;
unsigned int group_size = 1, k, ratio;
auto_vec<tree> vec_initial_defs;
- auto_vec<gimple> phis;
+ auto_vec<gimple *> phis;
bool slp_reduc = false;
tree new_phi_result;
- gimple inner_phi = NULL;
+ gimple *inner_phi = NULL;
if (slp_node)
group_size = SLP_TREE_SCALAR_STMTS (slp_node).length ();
vec_dest = vect_create_destination_var (scalar_dest, vectype);
for (k = 1; k < new_phis.length (); k++)
{
- gimple next_phi = new_phis[k];
+ gimple *next_phi = new_phis[k];
tree second_vect = PHI_RESULT (next_phi);
tmp = build2 (code, vectype, first_vect, second_vect);
if (slp_reduc)
{
tree res, first_res, new_res;
- gimple new_stmt;
+ gimple *new_stmt;
/* Reduce multiple scalar results in case of SLP unrolling. */
for (j = group_size; scalar_results.iterate (j, &res);
exit phi node. */
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
- gimple dest_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1];
+ gimple *dest_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1];
/* Handle reduction patterns. */
if (STMT_VINFO_RELATED_STMT (vinfo_for_stmt (dest_stmt)))
dest_stmt = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (dest_stmt));
if (slp_reduc)
{
- gimple current_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[k];
+ gimple *current_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[k];
orig_stmt = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (current_stmt));
/* SLP statements can't participate in patterns. */
stmt_vec_info new_phi_vinfo;
tree vect_phi_init, preheader_arg, vect_phi_res, init_def;
basic_block bb = gimple_bb (use_stmt);
- gimple use;
+ gimple *use;
/* Check that USE_STMT is really double reduction phi
node. */
does *NOT* necessarily hold for reduction patterns. */
bool
-vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
optab optab, reduc_optab;
tree new_temp = NULL_TREE;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
gphi *new_phi = NULL;
tree scalar_type;
bool is_simple_use;
- gimple orig_stmt;
+ gimple *orig_stmt;
stmt_vec_info orig_stmt_info;
tree expr = NULL_TREE;
int i;
stmt_vec_info prev_stmt_info, prev_phi_info;
bool single_defuse_cycle = false;
tree reduc_def = NULL_TREE;
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
int j;
tree ops[3];
bool nested_cycle = false, found_nested_cycle_def = false;
- gimple reduc_def_stmt = NULL;
+ gimple *reduc_def_stmt = NULL;
bool double_reduc = false, dummy;
basic_block def_bb;
struct loop * def_stmt_loop, *outer_loop = NULL;
tree def_arg;
- gimple def_arg_stmt;
+ gimple *def_arg_stmt;
auto_vec<tree> vec_oprnds0;
auto_vec<tree> vec_oprnds1;
auto_vec<tree> vect_defs;
- auto_vec<gimple> phis;
+ auto_vec<gimple *> phis;
int vec_num;
tree def0, def1, tem, op0, op1 = NULL_TREE;
bool first_p = true;
return false;
}
- gimple tmp = vect_is_simple_reduction (loop_vinfo, reduc_def_stmt,
- !nested_cycle, &dummy, false);
+ gimple *tmp = vect_is_simple_reduction (loop_vinfo, reduc_def_stmt,
+ !nested_cycle, &dummy, false);
if (orig_stmt)
gcc_assert (tmp == orig_stmt
|| GROUP_FIRST_ELEMENT (vinfo_for_stmt (tmp)) == orig_stmt);
if (!slp_node)
{
enum vect_def_type dt;
- gimple dummy_stmt;
+ gimple *dummy_stmt;
tree dummy;
vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo, NULL,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
bool
-vectorizable_induction (gimple phi, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
- gimple *vec_stmt)
+vectorizable_induction (gimple *phi,
+ gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
+ gimple **vec_stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (phi);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple exit_phi;
+ gimple *exit_phi;
edge latch_e;
tree loop_arg;
loop_arg = PHI_ARG_DEF_FROM_EDGE (phi, latch_e);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, loop_arg)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
it can be supported. */
bool
-vectorizable_live_operation (gimple stmt,
+vectorizable_live_operation (gimple *stmt,
gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
- gimple *vec_stmt)
+ gimple **vec_stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
int op_type;
tree op;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
enum tree_code code;
enum gimple_rhs_class rhs_class;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (gimple_code (use_stmt) == GIMPLE_PHI
&& gimple_bb (use_stmt) == merge_bb)
{
/* Kill any debug uses outside LOOP of SSA names defined in STMT. */
static void
-vect_loop_kill_debug_uses (struct loop *loop, gimple stmt)
+vect_loop_kill_debug_uses (struct loop *loop, gimple *stmt)
{
ssa_op_iter op_iter;
imm_use_iterator imm_iter;
def_operand_p def_p;
- gimple ustmt;
+ gimple *ustmt;
FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
{
if (!is_gimple_val (ni_minus_gap_name))
{
var = create_tmp_var (TREE_TYPE (ni_name), "ni_gap");
- gimple stmts = NULL;
+ gimple *stmts = NULL;
ni_minus_gap_name = force_gimple_operand (ni_minus_gap_name, &stmts,
true, var);
gsi_insert_seq_on_edge_immediate (pe, stmts);
if (!is_gimple_val (ratio_name))
{
var = create_tmp_var (TREE_TYPE (ni_name), "bnd");
- gimple stmts = NULL;
+ gimple *stmts = NULL;
ratio_name = force_gimple_operand (ratio_name, &stmts, true, var);
gsi_insert_seq_on_edge_immediate (pe, stmts);
}
if (!is_gimple_val (ratio_mult_vf_name))
{
var = create_tmp_var (TREE_TYPE (ni_name), "ratio_mult_vf");
- gimple stmts = NULL;
+ gimple *stmts = NULL;
ratio_mult_vf_name = force_gimple_operand (ratio_mult_vf_name, &stmts,
true, var);
gsi_insert_seq_on_edge_immediate (pe, stmts);
int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
bool grouped_store;
bool slp_scheduled = false;
- gimple stmt, pattern_stmt;
+ gimple *stmt, *pattern_stmt;
gimple_seq pattern_def_seq = NULL;
gimple_stmt_iterator pattern_def_si = gsi_none ();
bool transform_pattern_stmt = false;
gsi_next (&pattern_def_si);
if (pattern_def_seq != NULL)
{
- gimple pattern_def_stmt = NULL;
+ gimple *pattern_def_stmt = NULL;
stmt_vec_info pattern_def_stmt_info = NULL;
while (!gsi_end_p (pattern_def_si))
else
{
/* Free the attached stmt_vec_info and remove the stmt. */
- gimple store = gsi_stmt (si);
+ gimple *store = gsi_stmt (si);
free_stmt_vec_info (store);
unlink_stmt_vdef (store);
gsi_remove (&si, true);
#include "builtins.h"
/* Pattern recognition functions */
-static gimple vect_recog_widen_sum_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_widen_sum_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_widen_mult_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_widen_mult_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_dot_prod_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_dot_prod_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_sad_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_sad_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_pow_pattern (vec<gimple> *, tree *, tree *);
-static gimple vect_recog_over_widening_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_pow_pattern (vec<gimple *> *, tree *, tree *);
+static gimple *vect_recog_over_widening_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_widen_shift_pattern (vec<gimple> *,
+static gimple *vect_recog_widen_shift_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_rotate_pattern (vec<gimple> *, tree *, tree *);
-static gimple vect_recog_vector_vector_shift_pattern (vec<gimple> *,
+static gimple *vect_recog_rotate_pattern (vec<gimple *> *, tree *, tree *);
+static gimple *vect_recog_vector_vector_shift_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_divmod_pattern (vec<gimple> *,
+static gimple *vect_recog_divmod_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_mult_pattern (vec<gimple> *,
+static gimple *vect_recog_mult_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_mixed_size_cond_pattern (vec<gimple> *,
+static gimple *vect_recog_mixed_size_cond_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_bool_pattern (vec<gimple> *, tree *, tree *);
+static gimple *vect_recog_bool_pattern (vec<gimple *> *, tree *, tree *);
static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
vect_recog_widen_mult_pattern,
vect_recog_widen_sum_pattern,
vect_recog_bool_pattern};
static inline void
-append_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
+append_pattern_def_seq (stmt_vec_info stmt_info, gimple *stmt)
{
gimple_seq_add_stmt_without_update (&STMT_VINFO_PATTERN_DEF_SEQ (stmt_info),
stmt);
}
static inline void
-new_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
+new_pattern_def_seq (stmt_vec_info stmt_info, gimple *stmt)
{
STMT_VINFO_PATTERN_DEF_SEQ (stmt_info) = NULL;
append_pattern_def_seq (stmt_info, stmt);
to be defined as well. */
static bool
-vect_same_loop_or_bb_p (gimple stmt1, gimple stmt2)
+vect_same_loop_or_bb_p (gimple *stmt1, gimple *stmt2)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt1);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
/* If the LHS of DEF_STMT has a single use, and that statement is
in the same loop or basic block, return it. */
-static gimple
-vect_single_imm_use (gimple def_stmt)
+static gimple *
+vect_single_imm_use (gimple *def_stmt)
{
tree lhs = gimple_assign_lhs (def_stmt);
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (!single_imm_use (lhs, &use_p, &use_stmt))
return NULL;
unsigned. */
static bool
-type_conversion_p (tree name, gimple use_stmt, bool check_sign,
- tree *orig_type, gimple *def_stmt, bool *promotion)
+type_conversion_p (tree name, gimple *use_stmt, bool check_sign,
+ tree *orig_type, gimple **def_stmt, bool *promotion)
{
tree dummy;
- gimple dummy_gimple;
+ gimple *dummy_gimple;
loop_vec_info loop_vinfo;
stmt_vec_info stmt_vinfo;
tree type = TREE_TYPE (name);
is NULL, the caller must set SSA_NAME_DEF_STMT for the returned SSA var. */
static tree
-vect_recog_temp_ssa_var (tree type, gimple stmt)
+vect_recog_temp_ssa_var (tree type, gimple *stmt)
{
return make_temp_ssa_name (type, stmt, "patt");
}
the correct order (as is the case when this computation is in an
inner-loop nested in an outer-loop that us being vectorized). */
-static gimple
-vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_dot_prod_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple stmt, last_stmt = (*stmts)[0];
+ gimple *stmt, *last_stmt = (*stmts)[0];
tree oprnd0, oprnd1;
tree oprnd00, oprnd01;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree type, half_type;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
tree prod_type;
loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
struct loop *loop;
}
else
{
- gimple def_stmt;
+ gimple *def_stmt;
oprnd0 = gimple_assign_rhs1 (last_stmt);
oprnd1 = gimple_assign_rhs2 (last_stmt);
else
{
tree half_type0, half_type1;
- gimple def_stmt;
+ gimple *def_stmt;
tree oprnd0, oprnd1;
oprnd0 = gimple_assign_rhs1 (stmt);
SAD_EXPR <x_t, y_t, sum_0>
*/
-static gimple
-vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_sad_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = (*stmts)[0];
+ gimple *last_stmt = (*stmts)[0];
tree sad_oprnd0, sad_oprnd1;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree half_type;
{
/* Has been detected as widening-summation? */
- gimple stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
+ gimple *stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
sum_type = gimple_expr_type (stmt);
if (gimple_assign_rhs_code (stmt) != WIDEN_SUM_EXPR)
return NULL;
}
else
{
- gimple def_stmt;
+ gimple *def_stmt;
plus_oprnd0 = gimple_assign_rhs1 (last_stmt);
plus_oprnd1 = gimple_assign_rhs2 (last_stmt);
return NULL;
tree abs_type = half_type;
- gimple abs_stmt = SSA_NAME_DEF_STMT (plus_oprnd0);
+ gimple *abs_stmt = SSA_NAME_DEF_STMT (plus_oprnd0);
/* It could not be the sad pattern if the abs_stmt is outside the loop. */
if (!gimple_bb (abs_stmt) || !flow_bb_inside_loop_p (loop, gimple_bb (abs_stmt)))
if (TREE_CODE (abs_oprnd) != SSA_NAME)
return NULL;
- gimple diff_stmt = SSA_NAME_DEF_STMT (abs_oprnd);
+ gimple *diff_stmt = SSA_NAME_DEF_STMT (abs_oprnd);
/* It could not be the sad pattern if the diff_stmt is outside the loop. */
if (!gimple_bb (diff_stmt)
return NULL;
tree half_type0, half_type1;
- gimple def_stmt;
+ gimple *def_stmt;
tree minus_oprnd0 = gimple_assign_rhs1 (diff_stmt);
tree minus_oprnd1 = gimple_assign_rhs2 (diff_stmt);
/* Pattern detected. Create a stmt to be used to replace the pattern: */
tree var = vect_recog_temp_ssa_var (sum_type, NULL);
- gimple pattern_stmt = gimple_build_assign (var, SAD_EXPR, sad_oprnd0,
- sad_oprnd1, plus_oprnd1);
+ gimple *pattern_stmt = gimple_build_assign (var, SAD_EXPR, sad_oprnd0,
+ sad_oprnd1, plus_oprnd1);
if (dump_enabled_p ())
{
with a_it = (interm_type) a_t; Store such operation in *WSTMT. */
static bool
-vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
+vect_handle_widen_op_by_const (gimple *stmt, enum tree_code code,
tree const_oprnd, tree *oprnd,
- gimple *wstmt, tree type,
- tree *half_type, gimple def_stmt)
+ gimple **wstmt, tree type,
+ tree *half_type, gimple *def_stmt)
{
tree new_type, new_oprnd;
returned stmt will be this type conversion stmt.
*/
-static gimple
-vect_recog_widen_mult_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_widen_mult_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
- gimple def_stmt0, def_stmt1;
+ gimple *last_stmt = stmts->pop ();
+ gimple *def_stmt0, *def_stmt1;
tree oprnd0, oprnd1;
tree type, half_type0, half_type1;
- gimple new_stmt = NULL, pattern_stmt = NULL;
+ gimple *new_stmt = NULL, *pattern_stmt = NULL;
tree vectype, vecitype;
tree var;
enum tree_code dummy_code;
return NULL;
tree* oprnd = NULL;
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
if (TYPE_PRECISION (half_type0) < TYPE_PRECISION (half_type1))
{
Use unsigned TYPE as the type for WIDEN_MULT_EXPR. */
if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (half_type0))
{
- gimple use_stmt;
+ gimple *use_stmt;
tree use_lhs;
tree use_type;
x = sqrt (x)
*/
-static gimple
-vect_recog_pow_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_pow_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = (*stmts)[0];
+ gimple *last_stmt = (*stmts)[0];
tree fn, base, exp = NULL;
- gimple stmt;
+ gimple *stmt;
tree var;
if (!is_gimple_call (last_stmt) || gimple_call_lhs (last_stmt) == NULL)
the correct order (as is the case when this computation is in an
inner-loop nested in an outer-loop that us being vectorized). */
-static gimple
-vect_recog_widen_sum_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_widen_sum_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple stmt, last_stmt = (*stmts)[0];
+ gimple *stmt, *last_stmt = (*stmts)[0];
tree oprnd0, oprnd1;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree type, half_type;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
struct loop *loop;
tree var;
the second pattern statement. */
static bool
-vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
- tree *op0, tree *op1, gimple *new_def_stmt,
- vec<gimple> *stmts)
+vect_operation_fits_smaller_type (gimple *stmt, tree def, tree *new_type,
+ tree *op0, tree *op1, gimple **new_def_stmt,
+ vec<gimple *> *stmts)
{
enum tree_code code;
tree const_oprnd, oprnd;
tree interm_type = NULL_TREE, half_type, new_oprnd, type;
- gimple def_stmt, new_stmt;
+ gimple *def_stmt, *new_stmt;
bool first = false;
bool promotion;
be 'type' or some intermediate type. For now, we expect S5 to be a type
demotion operation. We also check that S3 and S4 have only one use. */
-static gimple
-vect_recog_over_widening_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_over_widening_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple stmt = stmts->pop ();
- gimple pattern_stmt = NULL, new_def_stmt, prev_stmt = NULL, use_stmt = NULL;
+ gimple *stmt = stmts->pop ();
+ gimple *pattern_stmt = NULL, *new_def_stmt, *prev_stmt = NULL,
+ *use_stmt = NULL;
tree op0, op1, vectype = NULL_TREE, use_lhs, use_type;
tree var = NULL_TREE, new_type = NULL_TREE, new_oprnd;
bool first;
stmts that constitute the pattern. In this case it will be:
WIDEN_LSHIFT_EXPR <a_t, CONST>. */
-static gimple
-vect_recog_widen_shift_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_widen_shift_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
- gimple def_stmt0;
+ gimple *last_stmt = stmts->pop ();
+ gimple *def_stmt0;
tree oprnd0, oprnd1;
tree type, half_type0;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
tree vectype, vectype_out = NULL_TREE;
tree var;
enum tree_code dummy_code;
int dummy_int;
vec<tree> dummy_vec;
- gimple use_stmt;
+ gimple *use_stmt;
bool promotion;
if (!is_gimple_assign (last_stmt) || !vinfo_for_stmt (last_stmt))
}
/* Check if this a widening operation. */
- gimple wstmt = NULL;
+ gimple *wstmt = NULL;
if (!vect_handle_widen_op_by_const (last_stmt, LSHIFT_EXPR, oprnd1,
&oprnd0, &wstmt,
type, &half_type0, def_stmt0))
* Return value: A new stmt that will be used to replace the rotate
S0 stmt. */
-static gimple
-vect_recog_rotate_pattern (vec<gimple> *stmts, tree *type_in, tree *type_out)
+static gimple *
+vect_recog_rotate_pattern (vec<gimple *> *stmts, tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, lhs, var, var1, var2, vectype, type, stype, def, def2;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
* Return value: A new stmt that will be used to replace the shift/rotate
S3 stmt. */
-static gimple
-vect_recog_vector_vector_shift_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_vector_vector_shift_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, lhs, var;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
* Return value: A new stmt that will be used to replace the multiplication
S1 or S2 stmt. */
-static gimple
-vect_recog_mult_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_mult_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, vectype, itype;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
optab optab;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
int power2_val, power2_neg_val;
* Return value: A new stmt that will be used to replace the division
S1 or modulo S4 stmt. */
-static gimple
-vect_recog_divmod_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_divmod_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, vectype, itype, cond;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
a_it = x_t CMP y_t ? b_it : c_it;
a_T = (TYPE) a_it; */
-static gimple
-vect_recog_mixed_size_cond_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_mixed_size_cond_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = (*stmts)[0];
+ gimple *last_stmt = (*stmts)[0];
tree cond_expr, then_clause, else_clause;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
tree orig_type0 = NULL_TREE, orig_type1 = NULL_TREE;
- gimple def_stmt0 = NULL, def_stmt1 = NULL;
+ gimple *def_stmt0 = NULL, *def_stmt1 = NULL;
bool promotion;
tree comp_scalar_type;
static bool
check_bool_pattern (tree var, loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
{
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
tree def, rhs1;
enum tree_code rhs_code;
adjust_bool_pattern_cast (tree type, tree var)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (SSA_NAME_DEF_STMT (var));
- gimple cast_stmt, pattern_stmt;
+ gimple *cast_stmt, *pattern_stmt;
gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo));
pattern_stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
static tree
adjust_bool_pattern (tree var, tree out_type, tree trueval,
- vec<gimple> *stmts)
+ vec<gimple *> *stmts)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
enum tree_code rhs_code, def_rhs_code;
tree itype, cond_expr, rhs1, rhs2, irhs1, irhs2;
location_t loc;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
rhs1 = gimple_assign_rhs1 (stmt);
rhs2 = gimple_assign_rhs2 (stmt);
if (TYPE_PRECISION (TREE_TYPE (irhs1))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
{
- gimple tstmt;
+ gimple *tstmt;
stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
irhs2 = adjust_bool_pattern (rhs2, out_type, irhs1, stmts);
tstmt = stmts->pop ();
if (TYPE_PRECISION (TREE_TYPE (irhs2))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
{
- gimple tstmt;
+ gimple *tstmt;
stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
irhs1 = adjust_bool_pattern (rhs1, out_type, irhs2, stmts);
tstmt = stmts->pop ();
S3' c_T = a_T | b_T;
but the above is more efficient. */
-static gimple
-vect_recog_bool_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_bool_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
enum tree_code rhs_code;
tree var, lhs, rhs, vectype;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
- gimple pattern_stmt;
+ gimple *pattern_stmt;
if (!is_gimple_assign (last_stmt))
return NULL;
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
{
tree rhs2 = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
- gimple cast_stmt = gimple_build_assign (rhs2, NOP_EXPR, rhs);
+ gimple *cast_stmt = gimple_build_assign (rhs2, NOP_EXPR, rhs);
new_pattern_def_seq (stmt_vinfo, cast_stmt);
rhs = rhs2;
}
/* Mark statements that are involved in a pattern. */
static inline void
-vect_mark_pattern_stmts (gimple orig_stmt, gimple pattern_stmt,
+vect_mark_pattern_stmts (gimple *orig_stmt, gimple *pattern_stmt,
tree pattern_vectype)
{
stmt_vec_info pattern_stmt_info, def_stmt_info;
stmt_vec_info orig_stmt_info = vinfo_for_stmt (orig_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (orig_stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (orig_stmt_info);
- gimple def_stmt;
+ gimple *def_stmt;
pattern_stmt_info = vinfo_for_stmt (pattern_stmt);
if (pattern_stmt_info == NULL)
static void
vect_pattern_recog_1 (vect_recog_func_ptr vect_recog_func,
gimple_stmt_iterator si,
- vec<gimple> *stmts_to_replace)
+ vec<gimple *> *stmts_to_replace)
{
- gimple stmt = gsi_stmt (si), pattern_stmt;
+ gimple *stmt = gsi_stmt (si), *pattern_stmt;
stmt_vec_info stmt_info;
loop_vec_info loop_vinfo;
tree pattern_vectype;
tree type_in, type_out;
enum tree_code code;
int i;
- gimple next;
+ gimple *next;
stmts_to_replace->truncate (0);
stmts_to_replace->quick_push (stmt);
gimple_stmt_iterator si;
unsigned int i, j;
vect_recog_func_ptr vect_recog_func;
- auto_vec<gimple, 1> stmts_to_replace;
- gimple stmt;
+ auto_vec<gimple *, 1> stmts_to_replace;
+ gimple *stmt;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
source_location
find_bb_location (basic_block bb)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
gimple_stmt_iterator si;
if (!bb)
/* Create an SLP node for SCALAR_STMTS. */
static slp_tree
-vect_create_new_slp_node (vec<gimple> scalar_stmts)
+vect_create_new_slp_node (vec<gimple *> scalar_stmts)
{
slp_tree node;
- gimple stmt = scalar_stmts[0];
+ gimple *stmt = scalar_stmts[0];
unsigned int nops;
if (is_gimple_call (stmt))
from FIRST_STMT. Return -1 if the data-ref is not a part of the chain. */
static int
-vect_get_place_in_interleaving_chain (gimple stmt, gimple first_stmt)
+vect_get_place_in_interleaving_chain (gimple *stmt, gimple *first_stmt)
{
- gimple next_stmt = first_stmt;
+ gimple *next_stmt = first_stmt;
int result = 0;
if (first_stmt != GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
static int
vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- gimple stmt, unsigned stmt_num,
+ gimple *stmt, unsigned stmt_num,
vec<slp_oprnd_info> *oprnds_info)
{
tree oprnd;
unsigned int i, number_of_oprnds;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt = vect_uninitialized_def;
struct loop *loop = NULL;
bool pattern = false;
static bool
vect_build_slp_tree_1 (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- vec<gimple> stmts, unsigned int group_size,
+ vec<gimple *> stmts, unsigned int group_size,
unsigned nops, unsigned int *max_nunits,
unsigned int vectorization_factor, bool *matches,
bool *two_operators)
{
unsigned int i;
- gimple first_stmt = stmts[0], stmt = stmts[0];
+ gimple *first_stmt = stmts[0], *stmt = stmts[0];
enum tree_code first_stmt_code = ERROR_MARK;
enum tree_code alt_stmt_code = ERROR_MARK;
enum tree_code rhs_code = ERROR_MARK;
machine_mode optab_op2_mode;
machine_mode vec_mode;
HOST_WIDE_INT dummy;
- gimple first_load = NULL, prev_first_load = NULL;
+ gimple *first_load = NULL, *prev_first_load = NULL;
tree cond;
/* For every stmt in NODE find its def stmt/s. */
if (rhs_code == CALL_EXPR)
{
- gimple first_stmt = stmts[0];
+ gimple *first_stmt = stmts[0];
if (gimple_call_num_args (stmt) != nops
|| !operand_equal_p (gimple_call_fn (first_stmt),
gimple_call_fn (stmt), 0)
unsigned max_tree_size)
{
unsigned nops, i, this_tree_size = 0;
- gimple stmt;
+ gimple *stmt;
matches[0] = false;
for (j = 0; j < group_size; ++j)
if (!matches[j])
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (*node)[j];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (*node)[j];
swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
gimple_assign_rhs2_ptr (stmt));
}
vect_print_slp_tree (int dump_kind, slp_tree node)
{
int i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
if (!node)
vect_mark_slp_stmts (slp_tree node, enum slp_vect_type mark, int j)
{
int i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
if (!node)
vect_mark_slp_stmts_relevant (slp_tree node)
{
int i;
- gimple stmt;
+ gimple *stmt;
stmt_vec_info stmt_info;
slp_tree child;
vect_slp_rearrange_stmts (slp_tree node, unsigned int group_size,
vec<unsigned> permutation)
{
- gimple stmt;
- vec<gimple> tmp_stmts;
+ gimple *stmt;
+ vec<gimple *> tmp_stmts;
unsigned int i;
slp_tree child;
unsigned int group_size = SLP_INSTANCE_GROUP_SIZE (slp_instn);
unsigned int i, j, k, next;
slp_tree node;
- gimple stmt, load, next_load, first_load;
+ gimple *stmt, *load, *next_load, *first_load;
struct data_reference *dr;
if (dump_enabled_p ())
/* Find the last store in SLP INSTANCE. */
-static gimple
+static gimple *
vect_find_last_scalar_stmt_in_slp (slp_tree node)
{
- gimple last = NULL, stmt;
+ gimple *last = NULL, *stmt;
for (int i = 0; SLP_TREE_SCALAR_STMTS (node).iterate (i, &stmt); i++)
{
{
unsigned i;
slp_tree child;
- gimple stmt, s;
+ gimple *stmt, *s;
stmt_vec_info stmt_info;
tree lhs;
unsigned group_size = SLP_INSTANCE_GROUP_SIZE (instance);
for (i = 0; i < gimple_num_ops (stmt); ++i)
{
tree def, op = gimple_op (stmt, i);
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
if (!op || op == lhs)
continue;
operation is widening like DOT_PROD or SAD. */
if (!STMT_VINFO_GROUPED_ACCESS (stmt_info))
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (node)[0];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[0];
switch (gimple_assign_rhs_code (stmt))
{
case DOT_PROD_EXPR:
static bool
vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- gimple stmt, unsigned max_tree_size)
+ gimple *stmt, unsigned max_tree_size)
{
slp_instance new_instance;
slp_tree node;
unsigned int group_size = GROUP_SIZE (vinfo_for_stmt (stmt));
unsigned int unrolling_factor = 1, nunits;
tree vectype, scalar_type = NULL_TREE;
- gimple next;
+ gimple *next;
unsigned int vectorization_factor = 0;
int i;
unsigned int max_nunits = 0;
vec<slp_tree> loads;
struct data_reference *dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt));
- vec<gimple> scalar_stmts;
+ vec<gimple *> scalar_stmts;
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
else
{
/* Collect reduction statements. */
- vec<gimple> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
+ vec<gimple *> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
for (i = 0; reductions.iterate (i, &next); i++)
scalar_stmts.safe_push (next);
}
{
vec<unsigned> load_permutation;
int j;
- gimple load, first_stmt;
+ gimple *load, *first_stmt;
bool this_load_permuted = false;
load_permutation.create (group_size);
first_stmt = GROUP_FIRST_ELEMENT
unsigned max_tree_size)
{
unsigned int i;
- vec<gimple> grouped_stores;
- vec<gimple> reductions = vNULL;
- vec<gimple> reduc_chains = vNULL;
- gimple first_element;
+ vec<gimple *> grouped_stores;
+ vec<gimple *> reductions = vNULL;
+ vec<gimple *> reduc_chains = vNULL;
+ gimple *first_element;
bool ok = false;
if (dump_enabled_p ())
static void
vect_detect_hybrid_slp_stmts (slp_tree node, unsigned i, slp_vect_type stype)
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (node)[i];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[i];
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
stmt_vec_info use_vinfo, stmt_vinfo = vinfo_for_stmt (stmt);
slp_tree child;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
if (TREE_CODE (*tp) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (*tp))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (*tp);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (*tp);
if (flow_bb_inside_loop_p (loopp, gimple_bb (def_stmt))
&& PURE_SLP_STMT (vinfo_for_stmt (def_stmt)))
{
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (STMT_VINFO_IN_PATTERN_P (stmt_info))
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, 0);
set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, NULL, res));
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (stmt_info)
{
bool dummy;
int i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
if (!node)
{
unsigned scalar_cost = 0;
unsigned i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_DEF)
{
imm_use_iterator use_iter;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, DEF_FROM_PTR (def_p))
if (!is_gimple_debug (use_stmt)
&& (gimple_code (use_stmt) == GIMPLE_PHI
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt)
&& !gimple_nop_p (stmt)
&& gimple_code (stmt) != GIMPLE_LABEL)
unsigned int op_num, unsigned int number_of_vectors,
int reduc_index)
{
- vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
- gimple stmt = stmts[0];
+ vec<gimple *> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+ gimple *stmt = stmts[0];
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned nunits;
tree vec_cst;
bool constant_p, is_store;
tree neutral_op = NULL;
enum tree_code code = gimple_expr_code (stmt);
- gimple def_stmt;
+ gimple *def_stmt;
struct loop *loop;
gimple_seq ctor_seq = NULL;
else
{
tree new_temp = make_ssa_name (TREE_TYPE (vector_type));
- gimple init_stmt;
+ gimple *init_stmt;
op = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (vector_type), op);
init_stmt
= gimple_build_assign (new_temp, VIEW_CONVERT_EXPR, op);
vect_get_slp_vect_defs (slp_tree slp_node, vec<tree> *vec_oprnds)
{
tree vec_oprnd;
- gimple vec_def_stmt;
+ gimple *vec_def_stmt;
unsigned int i;
gcc_assert (SLP_TREE_VEC_STMTS (slp_node).exists ());
vect_get_slp_defs (vec<tree> ops, slp_tree slp_node,
vec<vec<tree> > *vec_oprnds, int reduc_index)
{
- gimple first_stmt;
+ gimple *first_stmt;
int number_of_vects = 0, i;
unsigned int child_index = 0;
HOST_WIDE_INT lhs_size_unit, rhs_size_unit;
/* We have to check both pattern and original def, if available. */
if (child)
{
- gimple first_def = SLP_TREE_SCALAR_STMTS (child)[0];
- gimple related
+ gimple *first_def = SLP_TREE_SCALAR_STMTS (child)[0];
+ gimple *related
= STMT_VINFO_RELATED_STMT (vinfo_for_stmt (first_def));
if (operand_equal_p (oprnd, gimple_get_lhs (first_def), 0)
the created stmts must be inserted. */
static inline void
-vect_create_mask_and_perm (gimple stmt,
+vect_create_mask_and_perm (gimple *stmt,
tree mask, int first_vec_indx, int second_vec_indx,
gimple_stmt_iterator *gsi, slp_tree node,
tree vectype, vec<tree> dr_chain,
int ncopies, int vect_stmts_counter)
{
tree perm_dest;
- gimple perm_stmt = NULL;
+ gimple *perm_stmt = NULL;
int i, stride;
tree first_vec, second_vec, data_ref;
the next vector, i.e., the current first vector is not needed. */
static bool
-vect_get_mask_element (gimple stmt, int first_mask_element, int m,
+vect_get_mask_element (gimple *stmt, int first_mask_element, int m,
int mask_nunits, bool only_one_vec, int index,
unsigned char *mask, int *current_mask_element,
bool *need_next_vector, int *number_of_mask_fixes,
gimple_stmt_iterator *gsi, int vf,
slp_instance slp_node_instance, bool analyze_only)
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (node)[0];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[0];
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree mask_element_type = NULL_TREE, mask_type;
int i, j, k, nunits, vec_index = 0;
vect_schedule_slp_instance (slp_tree node, slp_instance instance,
unsigned int vectorization_factor)
{
- gimple stmt;
+ gimple *stmt;
bool grouped_store, is_store;
gimple_stmt_iterator si;
stmt_vec_info stmt_info;
{
enum tree_code code0 = gimple_assign_rhs_code (stmt);
enum tree_code ocode;
- gimple ostmt;
+ gimple *ostmt;
unsigned char *mask = XALLOCAVEC (unsigned char, group_size);
bool allsame = true;
FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, ostmt)
mask[i] = 0;
if (!allsame)
{
- vec<gimple> v0;
- vec<gimple> v1;
+ vec<gimple *> v0;
+ vec<gimple *> v1;
unsigned j;
tree tmask = NULL_TREE;
vect_transform_stmt (stmt, &si, &grouped_store, node, instance);
Unfortunately that isn't too great and at least for
plus/minus we'd eventually like to match targets
vector addsub instructions. */
- gimple vstmt;
+ gimple *vstmt;
vstmt = gimple_build_assign (make_ssa_name (vectype),
VEC_PERM_EXPR,
gimple_assign_lhs (v0[j]),
static void
vect_remove_slp_scalar_calls (slp_tree node)
{
- gimple stmt, new_stmt;
+ gimple *stmt, *new_stmt;
gimple_stmt_iterator gsi;
int i;
slp_tree child;
FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
slp_tree root = SLP_INSTANCE_TREE (instance);
- gimple store;
+ gimple *store;
unsigned int j;
gimple_stmt_iterator gsi;
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info;
if (dump_enabled_p ())
bool
stmt_in_inner_loop_p (struct _stmt_vec_info *stmt_info)
{
- gimple stmt = STMT_VINFO_STMT (stmt_info);
+ gimple *stmt = STMT_VINFO_STMT (stmt_info);
basic_block bb = gimple_bb (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop* loop;
with scalar destination SCALAR_DEST. */
static tree
-read_vector_array (gimple stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
+read_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
tree array, unsigned HOST_WIDE_INT n)
{
tree vect_type, vect, vect_name, array_ref;
- gimple new_stmt;
+ gimple *new_stmt;
gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
vect_type = TREE_TYPE (TREE_TYPE (array));
The store is part of the vectorization of STMT. */
static void
-write_vector_array (gimple stmt, gimple_stmt_iterator *gsi, tree vect,
+write_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree vect,
tree array, unsigned HOST_WIDE_INT n)
{
tree array_ref;
- gimple new_stmt;
+ gimple *new_stmt;
array_ref = build4 (ARRAY_REF, TREE_TYPE (vect), array,
build_int_cst (size_type_node, n),
Mark STMT as "relevant for vectorization" and add it to WORKLIST. */
static void
-vect_mark_relevant (vec<gimple> *worklist, gimple stmt,
+vect_mark_relevant (vec<gimple *> *worklist, gimple *stmt,
enum vect_relevant relevant, bool live_p,
bool used_in_pattern)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
enum vect_relevant save_relevant = STMT_VINFO_RELEVANT (stmt_info);
bool save_live_p = STMT_VINFO_LIVE_P (stmt_info);
- gimple pattern_stmt;
+ gimple *pattern_stmt;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
tree lhs;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
CHECKME: what other side effects would the vectorizer allow? */
static bool
-vect_stmt_relevant_p (gimple stmt, loop_vec_info loop_vinfo,
+vect_stmt_relevant_p (gimple *stmt, loop_vec_info loop_vinfo,
enum vect_relevant *relevant, bool *live_p)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
used in STMT for anything other than indexing an array. */
static bool
-exist_non_indexing_operands_for_use_p (tree use, gimple stmt)
+exist_non_indexing_operands_for_use_p (tree use, gimple *stmt)
{
tree operand;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
Return true if everything is as expected. Return false otherwise. */
static bool
-process_use (gimple stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
- enum vect_relevant relevant, vec<gimple> *worklist,
+process_use (gimple *stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
+ enum vect_relevant relevant, vec<gimple *> *worklist,
bool force)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
stmt_vec_info dstmt_vinfo;
basic_block bb, def_bb;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
/* case 1: we are only interested in uses that need to be vectorized. Uses
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
unsigned int nbbs = loop->num_nodes;
gimple_stmt_iterator si;
- gimple stmt;
+ gimple *stmt;
unsigned int i;
stmt_vec_info stmt_vinfo;
basic_block bb;
- gimple phi;
+ gimple *phi;
bool live_p;
enum vect_relevant relevant, tmp_relevant;
enum vect_def_type def_type;
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_mark_stmts_to_be_vectorized ===\n");
- auto_vec<gimple, 64> worklist;
+ auto_vec<gimple *, 64> worklist;
/* 1. Init worklist. */
for (i = 0; i < nbbs; i++)
static int
vect_cost_group_size (stmt_vec_info stmt_info)
{
- gimple first_stmt = GROUP_FIRST_ELEMENT (stmt_info);
+ gimple *first_stmt = GROUP_FIRST_ELEMENT (stmt_info);
if (first_stmt == STMT_VINFO_STMT (stmt_info))
return GROUP_SIZE (stmt_info);
int group_size;
unsigned int inside_cost = 0, prologue_cost = 0;
struct data_reference *first_dr;
- gimple first_stmt;
+ gimple *first_stmt;
if (dt == vect_constant_def || dt == vect_external_def)
prologue_cost += record_stmt_cost (prologue_cost_vec, 1, scalar_to_vec,
stmt_vector_for_cost *body_cost_vec)
{
int alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
switch (alignment_support_scheme)
stmt_vector_for_cost *body_cost_vec)
{
int group_size;
- gimple first_stmt;
+ gimple *first_stmt;
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info), *first_dr;
unsigned int inside_cost = 0, prologue_cost = 0;
bool record_prologue_costs)
{
int alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
switch (alignment_support_scheme)
the loop preheader for the vectorized stmt STMT. */
static void
-vect_init_vector_1 (gimple stmt, gimple new_stmt, gimple_stmt_iterator *gsi)
+vect_init_vector_1 (gimple *stmt, gimple *new_stmt, gimple_stmt_iterator *gsi)
{
if (gsi)
vect_finish_stmt_generation (stmt, new_stmt, gsi);
It will be used in the vectorization of STMT. */
tree
-vect_init_vector (gimple stmt, tree val, tree type, gimple_stmt_iterator *gsi)
+vect_init_vector (gimple *stmt, tree val, tree type, gimple_stmt_iterator *gsi)
{
tree new_var;
- gimple init_stmt;
+ gimple *init_stmt;
tree vec_oprnd;
tree new_temp;
needs to be introduced. */
tree
-vect_get_vec_def_for_operand (tree op, gimple stmt, tree *scalar_def)
+vect_get_vec_def_for_operand (tree op, gimple *stmt, tree *scalar_def)
{
tree vec_oprnd;
- gimple vec_stmt;
- gimple def_stmt;
+ gimple *vec_stmt;
+ gimple *def_stmt;
stmt_vec_info def_stmt_info = NULL;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned int nunits;
tree
vect_get_vec_def_for_stmt_copy (enum vect_def_type dt, tree vec_oprnd)
{
- gimple vec_stmt_for_operand;
+ gimple *vec_stmt_for_operand;
stmt_vec_info def_stmt_info;
/* Do nothing; can reuse same def. */
and -1 otherwise. */
void
-vect_get_vec_defs (tree op0, tree op1, gimple stmt,
+vect_get_vec_defs (tree op0, tree op1, gimple *stmt,
vec<tree> *vec_oprnds0,
vec<tree> *vec_oprnds1,
slp_tree slp_node, int reduc_index)
Insert a new stmt. */
void
-vect_finish_stmt_generation (gimple stmt, gimple vec_stmt,
+vect_finish_stmt_generation (gimple *stmt, gimple *vec_stmt,
gimple_stmt_iterator *gsi)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!gsi_end_p (*gsi)
&& gimple_has_mem_ops (vec_stmt))
{
- gimple at_stmt = gsi_stmt (*gsi);
+ gimple *at_stmt = gsi_stmt (*gsi);
tree vuse = gimple_vuse (at_stmt);
if (vuse && TREE_CODE (vuse) == SSA_NAME)
{
}
-static tree permute_vec_elements (tree, tree, tree, gimple,
+static tree permute_vec_elements (tree, tree, tree, gimple *,
gimple_stmt_iterator *);
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_mask_load_store (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest = NULL;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
tree elem_type;
- gimple new_stmt;
+ gimple *new_stmt;
tree dummy;
tree dataref_ptr = NULL_TREE;
- gimple ptr_incr;
+ gimple *ptr_incr;
int nunits = TYPE_VECTOR_SUBPARTS (vectype);
int ncopies;
int i, j;
enum vect_def_type gather_dt = vect_unknown_def_type;
bool is_store;
tree mask;
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
enum vect_def_type dt;
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_call (gimple gs, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
slp_tree slp_node)
{
gcall *stmt;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
tree fndecl, new_temp, def, rhs_type;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[3]
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
int ncopies, j;
vec<tree> vargs = vNULL;
enum { NARROW, NONE, WIDEN } modifier;
tree cst = build_vector (vectype_out, v);
tree new_var
= vect_get_new_vect_var (vectype_out, vect_simple_var, "cst_");
- gimple init_stmt = gimple_build_assign (new_var, cst);
+ gimple *init_stmt = gimple_build_assign (new_var, cst);
new_temp = make_ssa_name (new_var, init_stmt);
gimple_assign_set_lhs (init_stmt, new_temp);
vect_init_vector_1 (stmt, init_stmt, NULL);
vectorized loop. */
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
basic_block use_bb = gimple_bb (use_stmt);
vect_simd_lane_linear (tree op, struct loop *loop,
struct simd_call_arg_info *arginfo)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!is_gimple_assign (def_stmt)
|| gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL;
tree fndecl, new_temp, def;
- gimple def_stmt;
- gimple new_stmt = NULL;
+ gimple *def_stmt;
+ gimple *new_stmt = NULL;
int ncopies, j;
vec<simd_call_arg_info> arginfo = vNULL;
vec<tree> vargs = vNULL;
needs to be created (DECL is a function-decl of a target-builtin).
STMT is the original scalar stmt that we are vectorizing. */
-static gimple
+static gimple *
vect_gen_widened_results_half (enum tree_code code,
tree decl,
tree vec_oprnd0, tree vec_oprnd1, int op_type,
tree vec_dest, gimple_stmt_iterator *gsi,
- gimple stmt)
+ gimple *stmt)
{
- gimple new_stmt;
+ gimple *new_stmt;
tree new_temp;
/* Generate half of the widened result: */
The vectors are collected into VEC_OPRNDS. */
static void
-vect_get_loop_based_defs (tree *oprnd, gimple stmt, enum vect_def_type dt,
+vect_get_loop_based_defs (tree *oprnd, gimple *stmt, enum vect_def_type dt,
vec<tree> *vec_oprnds, int multi_step_cvt)
{
tree vec_oprnd;
static void
vect_create_vectorized_demotion_stmts (vec<tree> *vec_oprnds,
- int multi_step_cvt, gimple stmt,
+ int multi_step_cvt, gimple *stmt,
vec<tree> vec_dsts,
gimple_stmt_iterator *gsi,
slp_tree slp_node, enum tree_code code,
{
unsigned int i;
tree vop0, vop1, new_tmp, vec_dest;
- gimple new_stmt;
+ gimple *new_stmt;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
vec_dest = vec_dsts.pop ();
static void
vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
vec<tree> *vec_oprnds1,
- gimple stmt, tree vec_dest,
+ gimple *stmt, tree vec_dest,
gimple_stmt_iterator *gsi,
enum tree_code code1,
enum tree_code code2, tree decl1,
{
int i;
tree vop0, vop1, new_tmp1, new_tmp2;
- gimple new_stmt1, new_stmt2;
+ gimple *new_stmt1, *new_stmt2;
vec<tree> vec_tmp = vNULL;
vec_tmp.create (vec_oprnds0->length () * 2);
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
tree decl1 = NULL_TREE, decl2 = NULL_TREE;
tree new_temp;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info;
int nunits_in;
int nunits_out;
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_assignment (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
tree new_temp;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
int ncopies;
int i, j;
vec<tree> vec_oprnds = vNULL;
tree vop;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info = NULL;
enum tree_code code;
tree vectype_in;
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_shift (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
int icode;
machine_mode optab_op2_mode;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info;
int nunits_in;
int nunits_out;
a scalar shift. */
if (slp_node)
{
- vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
- gimple slpstmt;
+ vec<gimple *> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+ gimple *slpstmt;
FOR_EACH_VEC_ELT (stmts, k, slpstmt)
if (!operand_equal_p (gimple_assign_rhs2 (slpstmt), op1, 0))
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
optab optab;
bool target_support_p;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[3]
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info;
int nunits_in;
int nunits_out;
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
slp_tree slp_node)
{
tree scalar_dest;
tree dummy;
enum dr_alignment_support alignment_support_scheme;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
stmt_vec_info prev_stmt_info = NULL;
tree dataref_ptr = NULL_TREE;
tree dataref_offset = NULL_TREE;
- gimple ptr_incr = NULL;
+ gimple *ptr_incr = NULL;
int ncopies;
int j;
- gimple next_stmt, first_stmt = NULL;
+ gimple *next_stmt, *first_stmt = NULL;
bool grouped_store = false;
bool store_lanes_p = false;
unsigned int group_size, i;
int scatter_scale = 1;
enum vect_def_type scatter_idx_dt = vect_unknown_def_type;
enum vect_def_type scatter_src_dt = vect_unknown_def_type;
- gimple new_stmt;
+ gimple *new_stmt;
if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
return false;
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
scatter_decl = vect_check_gather_scatter (stmt, loop_vinfo, &scatter_base,
&scatter_off, &scatter_scale);
{
gimple_stmt_iterator incr_gsi;
bool insert_after;
- gimple incr;
+ gimple *incr;
tree offvar;
tree ivstep;
tree running_off;
for (i = 0; i < nstores; i++)
{
tree newref, newoff;
- gimple incr, assign;
+ gimple *incr, *assign;
tree size = TYPE_SIZE (ltype);
/* Extract the i'th component. */
tree pos = fold_build2 (MULT_EXPR, bitsizetype,
tree new_temp = make_ssa_name (perm_dest);
/* Generate the permute statement. */
- gimple perm_stmt
+ gimple *perm_stmt
= gimple_build_assign (new_temp, VEC_PERM_EXPR, vec_oprnd,
vec_oprnd, perm_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
permuted vector variable. */
static tree
-permute_vec_elements (tree x, tree y, tree mask_vec, gimple stmt,
+permute_vec_elements (tree x, tree y, tree mask_vec, gimple *stmt,
gimple_stmt_iterator *gsi)
{
tree vectype = TREE_TYPE (x);
tree perm_dest, data_ref;
- gimple perm_stmt;
+ gimple *perm_stmt;
perm_dest = vect_create_destination_var (gimple_get_lhs (stmt), vectype);
data_ref = make_ssa_name (perm_dest);
otherwise returns false. */
static bool
-hoist_defs_of_uses (gimple stmt, struct loop *loop)
+hoist_defs_of_uses (gimple *stmt, struct loop *loop)
{
ssa_op_iter i;
tree op;
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!gimple_nop_p (def_stmt)
&& flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
{
return false;
FOR_EACH_SSA_TREE_OPERAND (op2, def_stmt, i2, SSA_OP_USE)
{
- gimple def_stmt2 = SSA_NAME_DEF_STMT (op2);
+ gimple *def_stmt2 = SSA_NAME_DEF_STMT (op2);
if (!gimple_nop_p (def_stmt2)
&& flow_bb_inside_loop_p (loop, gimple_bb (def_stmt2)))
return false;
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!gimple_nop_p (def_stmt)
&& flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
{
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
slp_tree slp_node, slp_instance slp_node_instance)
{
tree scalar_dest;
tree elem_type;
tree new_temp;
machine_mode mode;
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
tree dummy;
enum dr_alignment_support alignment_support_scheme;
tree dataref_ptr = NULL_TREE;
tree dataref_offset = NULL_TREE;
- gimple ptr_incr = NULL;
+ gimple *ptr_incr = NULL;
int ncopies;
int i, j, group_size = -1, group_gap_adj;
tree msq = NULL_TREE, lsq;
vec<tree> dr_chain = vNULL;
bool grouped_load = false;
bool load_lanes_p = false;
- gimple first_stmt;
+ gimple *first_stmt;
bool inv_p;
bool negative = false;
bool compute_in_loop = false;
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
{
gimple_stmt_iterator incr_gsi;
bool insert_after;
- gimple incr;
+ gimple *incr;
tree offvar;
tree ivstep;
tree running_off;
for (i = 0; i < nloads; i++)
{
tree newref, newoff;
- gimple incr;
+ gimple *incr;
newref = build2 (MEM_REF, ltype, running_off, alias_off);
newref = force_gimple_operand_gsi (gsi, newref, true,
vect_finish_stmt_generation (stmt, new_stmt, gsi);
tree newoff = copy_ssa_name (running_off);
- gimple incr = gimple_build_assign (newoff, POINTER_PLUS_EXPR,
+ gimple *incr = gimple_build_assign (newoff, POINTER_PLUS_EXPR,
running_off, stride_step);
vect_finish_stmt_generation (stmt, incr, gsi);
condition operands are supportable using vec_is_simple_use. */
static bool
-vect_is_simple_cond (tree cond, gimple stmt, loop_vec_info loop_vinfo,
+vect_is_simple_cond (tree cond, gimple *stmt, loop_vec_info loop_vinfo,
bb_vec_info bb_vinfo, tree *comp_vectype)
{
tree lhs, rhs;
if (TREE_CODE (lhs) == SSA_NAME)
{
- gimple lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
if (!vect_is_simple_use_1 (lhs, stmt, loop_vinfo, bb_vinfo,
&lhs_def_stmt, &def, &dt, &vectype1))
return false;
if (TREE_CODE (rhs) == SSA_NAME)
{
- gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
if (!vect_is_simple_use_1 (rhs, stmt, loop_vinfo, bb_vinfo,
&rhs_def_stmt, &def, &dt, &vectype2))
return false;
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
bool
-vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, tree reduc_def, int reduc_index,
+vectorizable_condition (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, tree reduc_def, int reduc_index,
slp_tree slp_node)
{
tree scalar_dest = NULL_TREE;
if (TREE_CODE (then_clause) == SSA_NAME)
{
- gimple then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
+ gimple *then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
if (!vect_is_simple_use (then_clause, stmt, loop_vinfo, bb_vinfo,
&then_def_stmt, &def, &dt))
return false;
if (TREE_CODE (else_clause) == SSA_NAME)
{
- gimple else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
+ gimple *else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
if (!vect_is_simple_use (else_clause, stmt, loop_vinfo, bb_vinfo,
&else_def_stmt, &def, &dt))
return false;
}
else
{
- gimple gtemp;
+ gimple *gtemp;
vec_cond_lhs =
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0),
stmt, NULL);
/* Make sure the statement is vectorizable. */
bool
-vect_analyze_stmt (gimple stmt, bool *need_to_vectorize, slp_tree node)
+vect_analyze_stmt (gimple *stmt, bool *need_to_vectorize, slp_tree node)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
enum vect_relevant relevance = STMT_VINFO_RELEVANT (stmt_info);
bool ok;
tree scalar_type, vectype;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
gimple_seq pattern_def_seq;
if (dump_enabled_p ())
for (si = gsi_start (pattern_def_seq); !gsi_end_p (si); gsi_next (&si))
{
- gimple pattern_def_stmt = gsi_stmt (si);
+ gimple *pattern_def_stmt = gsi_stmt (si);
if (STMT_VINFO_RELEVANT_P (vinfo_for_stmt (pattern_def_stmt))
|| STMT_VINFO_LIVE_P (vinfo_for_stmt (pattern_def_stmt)))
{
Create a vectorized stmt to replace STMT, and insert it at BSI. */
bool
-vect_transform_stmt (gimple stmt, gimple_stmt_iterator *gsi,
+vect_transform_stmt (gimple *stmt, gimple_stmt_iterator *gsi,
bool *grouped_store, slp_tree slp_node,
slp_instance slp_node_instance)
{
bool is_store = false;
- gimple vec_stmt = NULL;
+ gimple *vec_stmt = NULL;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
bool done;
- gimple old_vec_stmt = STMT_VINFO_VEC_STMT (stmt_info);
+ gimple *old_vec_stmt = STMT_VINFO_VEC_STMT (stmt_info);
switch (STMT_VINFO_TYPE (stmt_info))
{
imm_use_iterator imm_iter;
use_operand_p use_p;
tree scalar_dest;
- gimple exit_phi;
+ gimple *exit_phi;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
stmt_vec_info. */
void
-vect_remove_stores (gimple first_stmt)
+vect_remove_stores (gimple *first_stmt)
{
- gimple next = first_stmt;
- gimple tmp;
+ gimple *next = first_stmt;
+ gimple *tmp;
gimple_stmt_iterator next_si;
while (next)
Create and initialize a new stmt_vec_info struct for STMT. */
stmt_vec_info
-new_stmt_vec_info (gimple stmt, loop_vec_info loop_vinfo,
+new_stmt_vec_info (gimple *stmt, loop_vec_info loop_vinfo,
bb_vec_info bb_vinfo)
{
stmt_vec_info res;
/* Free stmt vectorization related info. */
void
-free_stmt_vec_info (gimple stmt)
+free_stmt_vec_info (gimple *stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (patt_info)
{
gimple_seq seq = STMT_VINFO_PATTERN_DEF_SEQ (patt_info);
- gimple patt_stmt = STMT_VINFO_STMT (patt_info);
+ gimple *patt_stmt = STMT_VINFO_STMT (patt_info);
gimple_set_bb (patt_stmt, NULL);
tree lhs = gimple_get_lhs (patt_stmt);
if (TREE_CODE (lhs) == SSA_NAME)
gimple_stmt_iterator si;
for (si = gsi_start (seq); !gsi_end_p (si); gsi_next (&si))
{
- gimple seq_stmt = gsi_stmt (si);
+ gimple *seq_stmt = gsi_stmt (si);
gimple_set_bb (seq_stmt, NULL);
lhs = gimple_get_lhs (patt_stmt);
if (TREE_CODE (lhs) == SSA_NAME)
For now, operands defined outside the basic block are not supported. */
bool
-vect_is_simple_use (tree operand, gimple stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, gimple *def_stmt,
+vect_is_simple_use (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
+ bb_vec_info bb_vinfo, gimple **def_stmt,
tree *def, enum vect_def_type *dt)
{
*def_stmt = NULL;
scalar operand. */
bool
-vect_is_simple_use_1 (tree operand, gimple stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, gimple *def_stmt,
+vect_is_simple_use_1 (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
+ bb_vec_info bb_vinfo, gimple **def_stmt,
tree *def, enum vect_def_type *dt, tree *vectype)
{
if (!vect_is_simple_use (operand, stmt, loop_vinfo, bb_vinfo, def_stmt,
widening operation (short in the above example). */
bool
-supportable_widening_operation (enum tree_code code, gimple stmt,
+supportable_widening_operation (enum tree_code code, gimple *stmt,
tree vectype_out, tree vectype_in,
enum tree_code *code1, enum tree_code *code2,
int *multi_step_cvt,
by STMT is only directly used in the reduction statement. */
tree lhs = gimple_assign_lhs (stmt);
use_operand_p dummy;
- gimple use_stmt;
+ gimple *use_stmt;
stmt_vec_info use_stmt_info = NULL;
if (single_imm_use (lhs, &dummy, &use_stmt)
&& (use_stmt_info = vinfo_for_stmt (use_stmt))
{
unsigned int vf = 1;
enum internal_fn ifn;
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree t;
if (!is_gimple_call (stmt)
|| !gimple_call_internal_p (stmt))
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
continue;
switch (gimple_call_internal_fn (stmt))
if (lhs == NULL_TREE)
continue;
imm_use_iterator use_iter;
- gimple use_stmt;
+ gimple *use_stmt;
ns.simduid = DECL_UID (SSA_NAME_VAR (gimple_call_arg (stmt, 0)));
FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, lhs)
if (!is_gimple_debug (use_stmt))
/* If LOOP has been versioned during ifcvt, return the internal call
guarding it. */
-static gimple
+static gimple *
vect_loop_vectorized_call (struct loop *loop)
{
basic_block bb = loop_preheader_edge (loop)->src;
- gimple g;
+ gimple *g;
do
{
g = last_stmt (bb);
update any immediate uses of it's LHS. */
static void
-fold_loop_vectorized_call (gimple g, tree value)
+fold_loop_vectorized_call (gimple *g, tree value)
{
tree lhs = gimple_call_lhs (g);
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (g);
update_call_from_tree (&gsi, value);
represented by LOOP_VINFO. LOOP_VECTORIZED_CALL is the internal
call guarding the loop which has been if converted. */
static void
-set_uid_loop_bbs (loop_vec_info loop_vinfo, gimple loop_vectorized_call)
+set_uid_loop_bbs (loop_vec_info loop_vinfo, gimple *loop_vectorized_call)
{
tree arg = gimple_call_arg (loop_vectorized_call, 1);
basic_block *bbs;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
gimple_set_uid (phi, 0);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, 0);
}
}
if (!dbg_cnt (vect_loop))
break;
- gimple loop_vectorized_call = vect_loop_vectorized_call (loop);
+ gimple *loop_vectorized_call = vect_loop_vectorized_call (loop);
if (loop_vectorized_call)
set_uid_loop_bbs (loop_vinfo, loop_vectorized_call);
if (LOCATION_LOCUS (vect_location) != UNKNOWN_LOCATION
loop = get_loop (cfun, i);
if (loop && loop->dont_vectorize)
{
- gimple g = vect_loop_vectorized_call (loop);
+ gimple *g = vect_loop_vectorized_call (loop);
if (g)
{
fold_loop_vectorized_call (g, boolean_false_node);
struct stmt_info_for_cost {
int count;
enum vect_cost_for_stmt kind;
- gimple stmt;
+ gimple *stmt;
int misalign;
};
static inline void
add_stmt_info_to_vec (stmt_vector_for_cost *stmt_cost_vec, int count,
- enum vect_cost_for_stmt kind, gimple stmt, int misalign)
+ enum vect_cost_for_stmt kind, gimple *stmt, int misalign)
{
stmt_info_for_cost si;
si.count = count;
/* Nodes that contain def-stmts of this node statements operands. */
vec<slp_tree> children;
/* A group of scalar stmts to be vectorized together. */
- vec<gimple> stmts;
+ vec<gimple *> stmts;
/* Load permutation relative to the stores, NULL if there is no
permutation. */
vec<unsigned> load_permutation;
/* Vectorized stmt/s. */
- vec<gimple> vec_stmts;
+ vec<gimple *> vec_stmts;
/* Number of vector stmts that are created to replace the group of scalar
stmts. It is calculated during the transformation phase as the number of
scalar elements in one scalar iteration (GROUP_SIZE) multiplied by VF
typedef struct _slp_oprnd_info
{
/* Def-stmts for the operands. */
- vec<gimple> def_stmts;
+ vec<gimple *> def_stmts;
/* Information about the first statement, its vector def-type, type, the
operand itself in case it's constant, and an indication if it's a pattern
stmt. */
/* Statements in the loop that have data references that are candidates for a
runtime (loop versioning) misalignment check. */
- vec<gimple> may_misalign_stmts;
+ vec<gimple *> may_misalign_stmts;
/* All interleaving chains of stores in the loop, represented by the first
stmt in the chain. */
- vec<gimple> grouped_stores;
+ vec<gimple *> grouped_stores;
/* All SLP instances in the loop. This is a subset of the set of GROUP_STORES
of the loop. */
unsigned slp_unrolling_factor;
/* Reduction cycles detected in the loop. Used in loop-aware SLP. */
- vec<gimple> reductions;
+ vec<gimple *> reductions;
/* All reduction chains in the loop, represented by the first
stmt in the chain. */
- vec<gimple> reduction_chains;
+ vec<gimple *> reduction_chains;
/* Hash table used to choose the best peeling option. */
hash_table<peel_info_hasher> *peeling_htab;
}
static inline bool
-nested_in_vect_loop_p (struct loop *loop, gimple stmt)
+nested_in_vect_loop_p (struct loop *loop, gimple *stmt)
{
return (loop->inner
&& (loop->inner == (gimple_bb (stmt))->loop_father));
basic_block bb;
/* All interleaving chains of stores in the basic block, represented by the
first stmt in the chain. */
- vec<gimple> grouped_stores;
+ vec<gimple *> grouped_stores;
/* All SLP instances in the basic block. This is a subset of the set of
GROUP_STORES of the basic block. */
bool in_pattern_p;
/* The stmt to which this info struct refers to. */
- gimple stmt;
+ gimple *stmt;
/* The loop_vec_info with respect to which STMT is vectorized. */
loop_vec_info loop_vinfo;
tree vectype;
/* The vectorized version of the stmt. */
- gimple vectorized_stmt;
+ gimple *vectorized_stmt;
/** The following is relevant only for stmts that contain a non-scalar
related_stmt of the "pattern stmt" points back to this stmt (which is
the last stmt in the original sequence of stmts that constitutes the
pattern). */
- gimple related_stmt;
+ gimple *related_stmt;
/* Used to keep a sequence of def stmts of a pattern stmt if such exists. */
gimple_seq pattern_def_seq;
/* Interleaving and reduction chains info. */
/* First element in the group. */
- gimple first_element;
+ gimple *first_element;
/* Pointer to the next element in the group. */
- gimple next_element;
+ gimple *next_element;
/* For data-refs, in case that two or more stmts share data-ref, this is the
pointer to the previously detected stmt with the same dr. */
- gimple same_dr_stmt;
+ gimple *same_dr_stmt;
/* The size of the group. */
unsigned int size;
/* For stores, number of stores from this group seen. We vectorize the last
/* Return a stmt_vec_info corresponding to STMT. */
static inline stmt_vec_info
-vinfo_for_stmt (gimple stmt)
+vinfo_for_stmt (gimple *stmt)
{
unsigned int uid = gimple_uid (stmt);
if (uid == 0)
/* Set vectorizer information INFO for STMT. */
static inline void
-set_vinfo_for_stmt (gimple stmt, stmt_vec_info info)
+set_vinfo_for_stmt (gimple *stmt, stmt_vec_info info)
{
unsigned int uid = gimple_uid (stmt);
if (uid == 0)
/* Return the earlier statement between STMT1 and STMT2. */
-static inline gimple
-get_earlier_stmt (gimple stmt1, gimple stmt2)
+static inline gimple *
+get_earlier_stmt (gimple *stmt1, gimple *stmt2)
{
unsigned int uid1, uid2;
/* Return the later statement between STMT1 and STMT2. */
-static inline gimple
-get_later_stmt (gimple stmt1, gimple stmt2)
+static inline gimple *
+get_later_stmt (gimple *stmt1, gimple *stmt2)
{
unsigned int uid1, uid2;
static inline bool
is_pattern_stmt_p (stmt_vec_info stmt_info)
{
- gimple related_stmt;
+ gimple *related_stmt;
stmt_vec_info related_stmt_info;
related_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
extern unsigned int current_vector_size;
extern tree get_vectype_for_scalar_type (tree);
extern tree get_same_sized_vectype (tree, tree);
-extern bool vect_is_simple_use (tree, gimple, loop_vec_info,
- bb_vec_info, gimple *,
+extern bool vect_is_simple_use (tree, gimple *, loop_vec_info,
+ bb_vec_info, gimple **,
tree *, enum vect_def_type *);
-extern bool vect_is_simple_use_1 (tree, gimple, loop_vec_info,
- bb_vec_info, gimple *,
+extern bool vect_is_simple_use_1 (tree, gimple *, loop_vec_info,
+ bb_vec_info, gimple **,
tree *, enum vect_def_type *, tree *);
-extern bool supportable_widening_operation (enum tree_code, gimple, tree, tree,
- enum tree_code *, enum tree_code *,
- int *, vec<tree> *);
+extern bool supportable_widening_operation (enum tree_code, gimple *, tree,
+ tree, enum tree_code *,
+ enum tree_code *, int *,
+ vec<tree> *);
extern bool supportable_narrowing_operation (enum tree_code, tree, tree,
enum tree_code *,
int *, vec<tree> *);
-extern stmt_vec_info new_stmt_vec_info (gimple stmt, loop_vec_info,
+extern stmt_vec_info new_stmt_vec_info (gimple *stmt, loop_vec_info,
bb_vec_info);
-extern void free_stmt_vec_info (gimple stmt);
+extern void free_stmt_vec_info (gimple *stmt);
extern tree vectorizable_function (gcall *, tree, tree);
extern void vect_model_simple_cost (stmt_vec_info, int, enum vect_def_type *,
stmt_vector_for_cost *,
extern unsigned record_stmt_cost (stmt_vector_for_cost *, int,
enum vect_cost_for_stmt, stmt_vec_info,
int, enum vect_cost_model_location);
-extern void vect_finish_stmt_generation (gimple, gimple,
+extern void vect_finish_stmt_generation (gimple *, gimple *,
gimple_stmt_iterator *);
extern bool vect_mark_stmts_to_be_vectorized (loop_vec_info);
-extern tree vect_get_vec_def_for_operand (tree, gimple, tree *);
-extern tree vect_init_vector (gimple, tree, tree,
+extern tree vect_get_vec_def_for_operand (tree, gimple *, tree *);
+extern tree vect_init_vector (gimple *, tree, tree,
gimple_stmt_iterator *);
extern tree vect_get_vec_def_for_stmt_copy (enum vect_def_type, tree);
-extern bool vect_transform_stmt (gimple, gimple_stmt_iterator *,
+extern bool vect_transform_stmt (gimple *, gimple_stmt_iterator *,
bool *, slp_tree, slp_instance);
-extern void vect_remove_stores (gimple);
-extern bool vect_analyze_stmt (gimple, bool *, slp_tree);
-extern bool vectorizable_condition (gimple, gimple_stmt_iterator *, gimple *,
- tree, int, slp_tree);
+extern void vect_remove_stores (gimple *);
+extern bool vect_analyze_stmt (gimple *, bool *, slp_tree);
+extern bool vectorizable_condition (gimple *, gimple_stmt_iterator *,
+ gimple **, tree, int, slp_tree);
extern void vect_get_load_cost (struct data_reference *, int, bool,
unsigned int *, unsigned int *,
stmt_vector_for_cost *,
extern void vect_get_store_cost (struct data_reference *, int,
unsigned int *, stmt_vector_for_cost *);
extern bool vect_supportable_shift (enum tree_code, tree);
-extern void vect_get_vec_defs (tree, tree, gimple, vec<tree> *,
+extern void vect_get_vec_defs (tree, tree, gimple *, vec<tree> *,
vec<tree> *, slp_tree, int);
extern tree vect_gen_perm_mask_any (tree, const unsigned char *);
extern tree vect_gen_perm_mask_checked (tree, const unsigned char *);
extern bool vect_can_force_dr_alignment_p (const_tree, unsigned int);
extern enum dr_alignment_support vect_supportable_dr_alignment
(struct data_reference *, bool);
-extern tree vect_get_smallest_scalar_type (gimple, HOST_WIDE_INT *,
+extern tree vect_get_smallest_scalar_type (gimple *, HOST_WIDE_INT *,
HOST_WIDE_INT *);
extern bool vect_analyze_data_ref_dependences (loop_vec_info, int *);
extern bool vect_slp_analyze_data_ref_dependences (bb_vec_info);
extern bool vect_verify_datarefs_alignment (loop_vec_info, bb_vec_info);
extern bool vect_analyze_data_ref_accesses (loop_vec_info, bb_vec_info);
extern bool vect_prune_runtime_alias_test_list (loop_vec_info);
-extern tree vect_check_gather_scatter (gimple, loop_vec_info, tree *, tree *,
+extern tree vect_check_gather_scatter (gimple *, loop_vec_info, tree *, tree *,
int *);
extern bool vect_analyze_data_refs (loop_vec_info, bb_vec_info, int *,
unsigned *);
-extern tree vect_create_data_ref_ptr (gimple, tree, struct loop *, tree,
+extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree,
tree *, gimple_stmt_iterator *,
- gimple *, bool, bool *,
+ gimple **, bool, bool *,
tree = NULL_TREE);
-extern tree bump_vector_ptr (tree, gimple, gimple_stmt_iterator *, gimple, tree);
+extern tree bump_vector_ptr (tree, gimple *, gimple_stmt_iterator *, gimple *,
+ tree);
extern tree vect_create_destination_var (tree, tree);
extern bool vect_grouped_store_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_store_lanes_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_grouped_load_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_load_lanes_supported (tree, unsigned HOST_WIDE_INT);
-extern void vect_permute_store_chain (vec<tree> ,unsigned int, gimple,
+extern void vect_permute_store_chain (vec<tree> ,unsigned int, gimple *,
gimple_stmt_iterator *, vec<tree> *);
-extern tree vect_setup_realignment (gimple, gimple_stmt_iterator *, tree *,
+extern tree vect_setup_realignment (gimple *, gimple_stmt_iterator *, tree *,
enum dr_alignment_support, tree,
struct loop **);
-extern void vect_transform_grouped_load (gimple, vec<tree> , int,
+extern void vect_transform_grouped_load (gimple *, vec<tree> , int,
gimple_stmt_iterator *);
-extern void vect_record_grouped_load_vectors (gimple, vec<tree> );
+extern void vect_record_grouped_load_vectors (gimple *, vec<tree> );
extern tree vect_get_new_vect_var (tree, enum vect_var_kind, const char *);
-extern tree vect_create_addr_base_for_vector_ref (gimple, gimple_seq *,
+extern tree vect_create_addr_base_for_vector_ref (gimple *, gimple_seq *,
tree, struct loop *,
tree = NULL_TREE);
/* In tree-vect-loop.c. */
/* FORNOW: Used in tree-parloops.c. */
extern void destroy_loop_vec_info (loop_vec_info, bool);
-extern gimple vect_force_simple_reduction (loop_vec_info, gimple, bool, bool *,
- bool);
+extern gimple *vect_force_simple_reduction (loop_vec_info, gimple *, bool,
+ bool *, bool);
/* Drive for loop analysis stage. */
extern loop_vec_info vect_analyze_loop (struct loop *);
/* Drive for loop transformation stage. */
extern void vect_transform_loop (loop_vec_info);
extern loop_vec_info vect_analyze_loop_form (struct loop *);
-extern bool vectorizable_live_operation (gimple, gimple_stmt_iterator *,
- gimple *);
-extern bool vectorizable_reduction (gimple, gimple_stmt_iterator *, gimple *,
- slp_tree);
-extern bool vectorizable_induction (gimple, gimple_stmt_iterator *, gimple *);
-extern tree get_initial_def_for_reduction (gimple, tree, tree *);
+extern bool vectorizable_live_operation (gimple *, gimple_stmt_iterator *,
+ gimple **);
+extern bool vectorizable_reduction (gimple *, gimple_stmt_iterator *,
+ gimple **, slp_tree);
+extern bool vectorizable_induction (gimple *, gimple_stmt_iterator *, gimple **);
+extern tree get_initial_def_for_reduction (gimple *, tree, tree *);
extern int vect_min_worthwhile_factor (enum tree_code);
extern int vect_get_known_peeling_cost (loop_vec_info, int, int *,
stmt_vector_for_cost *,
/* Pattern recognition functions.
Additional pattern recognition functions can (and will) be added
in the future. */
-typedef gimple (* vect_recog_func_ptr) (vec<gimple> *, tree *, tree *);
+typedef gimple *(* vect_recog_func_ptr) (vec<gimple *> *, tree *, tree *);
#define NUM_PATTERNS 13
void vect_pattern_recog (loop_vec_info, bb_vec_info);
/* Return whether STMT has a constant rhs that is_overflow_infinity. */
static inline bool
-stmt_overflow_infinity (gimple stmt)
+stmt_overflow_infinity (gimple *stmt)
{
if (is_gimple_assign (stmt)
&& get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) ==
*STRICT_OVERFLOW_P.*/
static bool
-gimple_assign_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
switch (get_gimple_rhs_class (code))
*STRICT_OVERFLOW_P.*/
static bool
-gimple_call_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
tree arg0 = gimple_call_num_args (stmt) > 0 ?
gimple_call_arg (stmt, 0) : NULL_TREE;
*STRICT_OVERFLOW_P.*/
static bool
-gimple_stmt_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
switch (gimple_code (stmt))
{
*STRICT_OVERFLOW_P.*/
static bool
-gimple_assign_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_assign_nonzero_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
switch (get_gimple_rhs_class (code))
*STRICT_OVERFLOW_P.*/
static bool
-gimple_stmt_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_stmt_nonzero_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
switch (gimple_code (stmt))
{
obtained so far. */
static bool
-vrp_stmt_computes_nonzero (gimple stmt, bool *strict_overflow_p)
+vrp_stmt_computes_nonzero (gimple *stmt, bool *strict_overflow_p)
{
if (gimple_stmt_nonzero_warnv_p (stmt, strict_overflow_p))
return true;
Store the result in *VR */
static void
-extract_range_basic (value_range_t *vr, gimple stmt)
+extract_range_basic (value_range_t *vr, gimple *stmt)
{
bool sop = false;
tree type = gimple_expr_type (stmt);
tree op = gimple_assign_rhs1 (stmt);
if (TREE_CODE (op) == code && TREE_CODE (TREE_OPERAND (op, 0)) == SSA_NAME)
{
- gimple g = SSA_NAME_DEF_STMT (TREE_OPERAND (op, 0));
+ gimple *g = SSA_NAME_DEF_STMT (TREE_OPERAND (op, 0));
if (is_gimple_call (g) && gimple_call_internal_p (g))
{
enum tree_code subcode = ERROR_MARK;
static void
adjust_range_with_scev (value_range_t *vr, struct loop *loop,
- gimple stmt, tree var)
+ gimple *stmt, tree var)
{
tree init, step, chrec, tmin, tmax, min, max, type, tem;
enum ev_direction dir;
create a new SSA name N and return the assertion assignment
'N = ASSERT_EXPR <V, V OP W>'. */
-static gimple
+static gimple *
build_assert_expr_for (tree cond, tree v)
{
tree a;
point values. */
static inline bool
-fp_predicate (gimple stmt)
+fp_predicate (gimple *stmt)
{
GIMPLE_CHECK (stmt, GIMPLE_COND);
inferred. */
static bool
-infer_value_range (gimple stmt, tree op, enum tree_code *comp_code_p, tree *val_p)
+infer_value_range (gimple *stmt, tree op, tree_code *comp_code_p, tree *val_p)
{
*val_p = NULL_TREE;
*comp_code_p = ERROR_MARK;
&& TREE_CODE (val) == INTEGER_CST
&& TYPE_UNSIGNED (TREE_TYPE (val)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
tree cst2 = NULL_TREE, name2 = NULL_TREE, name3 = NULL_TREE;
/* Extract CST2 from the (optional) addition. */
&& TREE_CODE (val) == INTEGER_CST)
{
imm_use_iterator ui;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
{
if (!is_gimple_assign (use_stmt))
if (TREE_CODE_CLASS (comp_code) == tcc_comparison
&& TREE_CODE (val) == INTEGER_CST)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
tree name2 = NULL_TREE, names[2], cst2 = NULL_TREE;
tree val2 = NULL_TREE;
unsigned int prec = TYPE_PRECISION (TREE_TYPE (val));
&& (nprec > 1
|| TYPE_UNSIGNED (TREE_TYPE (val))))
{
- gimple def_stmt2 = SSA_NAME_DEF_STMT (name2);
+ gimple *def_stmt2 = SSA_NAME_DEF_STMT (name2);
if (gimple_assign_cast_p (def_stmt2))
{
names[1] = gimple_assign_rhs1 (def_stmt2);
register_edge_assert_for_1 (tree op, enum tree_code code,
edge e, gimple_stmt_iterator bsi)
{
- gimple op_def;
+ gimple *op_def;
tree val;
enum tree_code rhs_code;
if (((comp_code == EQ_EXPR && integer_onep (val))
|| (comp_code == NE_EXPR && integer_zerop (val))))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == BIT_AND_EXPR)
if (((comp_code == EQ_EXPR && integer_zerop (val))
|| (comp_code == NE_EXPR && integer_onep (val))))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
/* For BIT_IOR_EXPR only if NAME == 0 both operands have
necessarily zero value, or if type-precision is one. */
static void
find_assert_locations_1 (basic_block bb, sbitmap live)
{
- gimple last;
+ gimple *last;
last = last_stmt (bb);
for (gimple_stmt_iterator si = gsi_last_bb (bb); !gsi_end_p (si);
gsi_prev (&si))
{
- gimple stmt;
+ gimple *stmt;
tree op;
ssa_op_iter i;
if (comp_code == NE_EXPR && integer_zerop (value))
{
tree t = op;
- gimple def_stmt = SSA_NAME_DEF_STMT (t);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (t);
while (is_gimple_assign (def_stmt)
&& CONVERT_EXPR_CODE_P
process_assert_insertions_for (tree name, assert_locus *loc)
{
/* Build the comparison expression NAME_i COMP_CODE VAL. */
- gimple stmt;
+ gimple *stmt;
tree cond;
- gimple assert_stmt;
+ gimple *assert_stmt;
edge_iterator ei;
edge e;
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
struct walk_stmt_info wi;
if (!gimple_has_location (stmt)
|| is_gimple_debug (stmt))
in basic block COND_BB. */
static bool
-all_imm_uses_in_stmt_or_feed_cond (tree var, gimple stmt, basic_block cond_bb)
+all_imm_uses_in_stmt_or_feed_cond (tree var, gimple *stmt, basic_block cond_bb)
{
use_operand_p use_p, use2_p;
imm_use_iterator iter;
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
if (USE_STMT (use_p) != stmt)
{
- gimple use_stmt = USE_STMT (use_p), use_stmt2;
+ gimple *use_stmt = USE_STMT (use_p), *use_stmt2;
if (is_gimple_debug (use_stmt))
continue;
while (is_gimple_assign (use_stmt)
{
edge e = single_pred_edge (bb);
basic_block cond_bb = e->src;
- gimple stmt = last_stmt (cond_bb);
+ gimple *stmt = last_stmt (cond_bb);
tree cst;
if (stmt == NULL
return;
if (gimple_assign_rhs1 (stmt) != var)
{
- gimple stmt2;
+ gimple *stmt2;
if (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME)
return;
FOR_EACH_BB_FN (bb, cfun)
for (si = gsi_after_labels (bb), is_unreachable = -1; !gsi_end_p (si);)
{
- gimple stmt = gsi_stmt (si);
- gimple use_stmt;
+ gimple *stmt = gsi_stmt (si);
+ gimple *use_stmt;
if (is_gimple_assign (stmt)
&& gimple_assign_rhs_code (stmt) == ASSERT_EXPR)
/* Return true if STMT is interesting for VRP. */
static bool
-stmt_interesting_for_vrp (gimple stmt)
+stmt_interesting_for_vrp (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
{
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
/* If the statement is a control insn, then we do not
want to avoid simulating the statement once. Failure
/* If the definition may be simulated again we cannot follow
this SSA edge as the SSA propagator does not necessarily
re-visit the use. */
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (!gimple_nop_p (def_stmt)
&& prop_simulate_again_p (def_stmt))
return NULL_TREE;
the SSA name in *OUTPUT_P. */
static enum ssa_prop_result
-vrp_visit_assignment_or_call (gimple stmt, tree *output_p)
+vrp_visit_assignment_or_call (gimple *stmt, tree *output_p)
{
tree def, lhs;
ssa_op_iter iter;
FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (!is_gimple_assign (use_stmt))
continue;
enum tree_code rhs_code = gimple_assign_rhs_code (use_stmt);
appropriate. */
static tree
-vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, gimple stmt)
+vrp_evaluate_conditional (tree_code code, tree op0, tree op1, gimple *stmt)
{
bool sop;
tree ret;
If STMT produces a varying value, return SSA_PROP_VARYING. */
static enum ssa_prop_result
-vrp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
+vrp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
{
tree def;
ssa_op_iter iter;
/* Simplify boolean operations if the source is known
to be already a boolean. */
static bool
-simplify_truth_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_truth_ops_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree lhs, op0, op1;
modulo. */
static bool
-simplify_div_or_mod_using_ranges (gimple stmt)
+simplify_div_or_mod_using_ranges (gimple *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree val = NULL;
disjoint. Return true if we do simplify. */
static bool
-simplify_min_or_max_using_ranges (gimple stmt)
+simplify_min_or_max_using_ranges (gimple *stmt)
{
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
ABS_EXPR into a NEGATE_EXPR. */
static bool
-simplify_abs_using_ranges (gimple stmt)
+simplify_abs_using_ranges (gimple *stmt)
{
tree op = gimple_assign_rhs1 (stmt);
value_range_t *vr = get_value_range (op);
operation is redundant. */
static bool
-simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
{
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
if (TREE_CODE (op0) == SSA_NAME
&& TREE_CODE (op1) == INTEGER_CST)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op0);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op0);
tree innerop;
if (!is_gimple_assign (def_stmt)
/* Simplify an integral conversion from an SSA name in STMT. */
static bool
-simplify_conversion_using_ranges (gimple stmt)
+simplify_conversion_using_ranges (gimple *stmt)
{
tree innerop, middleop, finaltype;
- gimple def_stmt;
+ gimple *def_stmt;
value_range_t *innervr;
signop inner_sgn, middle_sgn, final_sgn;
unsigned inner_prec, middle_prec, final_prec;
/* Simplify a conversion from integral SSA name to float in STMT. */
static bool
-simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi,
+ gimple *stmt)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
value_range_t *vr = get_value_range (rhs1);
/* Simplify an internal fn call using ranges if possible. */
static bool
-simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
{
enum tree_code subcode;
bool is_ubsan = false;
|| (is_ubsan && ovf))
return false;
- gimple g;
+ gimple *g;
location_t loc = gimple_location (stmt);
if (is_ubsan)
g = gimple_build_assign (gimple_call_lhs (stmt), subcode, op0, op1);
static bool
simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_assign (stmt))
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
{
bool assignment_p = false;
tree val;
- gimple stmt = gsi_stmt (*si);
+ gimple *stmt = gsi_stmt (*si);
if (is_gimple_assign (stmt)
&& TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
for any overflow warnings. */
static tree
-simplify_stmt_for_jump_threading (gimple stmt, gimple within_stmt,
+simplify_stmt_for_jump_threading (gimple *stmt, gimple *within_stmt,
class avail_exprs_stack *avail_exprs_stack ATTRIBUTE_UNUSED)
{
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
point in compilation. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple last;
+ gimple *last;
/* If the generic jump threading code does not find this block
interesting, then there is nothing to do. */
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (is_gimple_call (stmt))
find_decls_types (gimple_call_fntype (stmt), fld);
/* Check as to whether EXPR refers to a store to vptr. */
static tree
-is_vptr_store (gimple stmt, tree expr, bool is_write)
+is_vptr_store (gimple *stmt, tree expr, bool is_write)
{
if (is_write == true
&& gimple_assign_single_p (stmt)
tree base, rhs, expr_ptr, builtin_decl;
basic_block bb;
HOST_WIDE_INT size;
- gimple stmt, g;
+ gimple *stmt, *g;
gimple_seq seq;
location_t loc;
unsigned int align;
static void
instrument_builtin_call (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi), g;
+ gimple *stmt = gsi_stmt (*gsi), *g;
tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
unsigned int i, num = gimple_call_num_args (stmt), j;
static bool
instrument_gimple (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
tree rhs, lhs;
bool instrumented = false;
/* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
static void
-replace_func_exit (gimple stmt)
+replace_func_exit (gimple *stmt)
{
tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
- gimple g = gimple_build_call (builtin_decl, 0);
+ gimple *g = gimple_build_call (builtin_decl, 0);
gimple_set_location (g, cfun->function_end_locus);
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
gsi_replace (&gsi, g, true);
location_t loc;
basic_block exit_bb;
gimple_stmt_iterator gsi;
- gimple stmt, g;
+ gimple *stmt, *g;
tree builtin_decl;
edge e;
edge_iterator ei;
gimple_stmt_iterator gsi;
bool fentry_exit_instrument = false;
bool func_exit_seen = false;
- auto_vec<gimple> tsan_func_exits;
+ auto_vec<gimple *> tsan_func_exits;
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt)
&& gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
fentry_exit_instrument |= instrument_gimple (&gsi);
}
unsigned int i;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
if (fentry_exit_instrument)
replace_func_exit (stmt);
instrument_func_entry (void)
{
tree ret_addr, builtin_decl;
- gimple g;
+ gimple *g;
gimple_seq seq = NULL;
builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
bool
ubsan_instrument_unreachable (gimple_stmt_iterator *gsi)
{
- gimple g;
+ gimple *g;
location_t loc = gimple_location (gsi_stmt (*gsi));
if (flag_sanitize_undefined_trap_on_error)
/* Create a callgraph edge for statement STMT. */
static void
-ubsan_create_edge (gimple stmt)
+ubsan_create_edge (gimple *stmt)
{
gcall *call_stmt = dyn_cast <gcall *> (stmt);
basic_block bb = gimple_bb (stmt);
bool
ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 3);
index = force_gimple_operand_gsi (&cond_insert_point, index,
true, NULL_TREE,
false, GSI_NEW_STMT);
- gimple g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
+ gimple *g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
gimple_set_location (g, loc);
gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
{
gimple_stmt_iterator gsi = *gsip;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 3);
tree ptr = gimple_call_arg (stmt, 0);
basic_block cur_bb = gsi_bb (gsi);
- gimple g;
+ gimple *g;
if (!integer_zerop (align))
{
unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT;
bool
ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 4);
tree size = gimple_call_arg (stmt, 2);
tree ckind = gimple_call_arg (stmt, 3);
gimple_stmt_iterator gsi_orig = *gsi;
- gimple g;
+ gimple *g;
/* See if we can discard the check. */
if (TREE_CODE (size) != INTEGER_CST
ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
{
gimple_stmt_iterator gsi = *gsip;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 5);
tree op = gimple_call_arg (stmt, 0);
tree ckind_tree = gimple_call_arg (stmt, 4);
ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree);
tree type = TREE_TYPE (TREE_TYPE (ckind_tree));
- gimple g;
+ gimple *g;
basic_block fallthru_bb = NULL;
if (ckind == UBSAN_DOWNCAST_POINTER)
static void
instrument_null (gimple_stmt_iterator gsi, bool is_lhs)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
tree base = get_base_address (t);
const enum tree_code code = TREE_CODE (base);
static void
instrument_si_overflow (gimple_stmt_iterator gsi)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree_code code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree lhstype = TREE_TYPE (lhs);
tree a, b;
- gimple g;
+ gimple *g;
/* If this is not a signed operation, don't instrument anything here.
Also punt on bit-fields. */
static void
instrument_bool_enum_load (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree rhs = gimple_assign_rhs1 (stmt);
tree type = TREE_TYPE (rhs);
tree minv = NULL_TREE, maxv = NULL_TREE;
tree lhs = gimple_assign_lhs (stmt);
tree ptype = build_pointer_type (TREE_TYPE (rhs));
tree atype = reference_alias_ptr_type (rhs);
- gimple g = gimple_build_assign (make_ssa_name (ptype),
+ gimple *g = gimple_build_assign (make_ssa_name (ptype),
build_fold_addr_expr (rhs));
gimple_set_location (g, loc);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
static void
instrument_nonnull_arg (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc[2];
/* infer_nonnull_range needs flag_delete_null_pointer_checks set,
while for nonnull sanitization it is clear. */
if (POINTER_TYPE_P (TREE_TYPE (arg))
&& infer_nonnull_range_by_attribute (stmt, arg))
{
- gimple g;
+ gimple *g;
if (!is_gimple_val (arg))
{
g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
basic_block then_bb, fallthru_bb;
*gsi = create_cond_insert_point (gsi, true, false, true,
&then_bb, &fallthru_bb);
- gimple g = gimple_build_cond (EQ_EXPR, arg,
+ gimple *g = gimple_build_cond (EQ_EXPR, arg,
build_zero_cst (TREE_TYPE (arg)),
NULL_TREE, NULL_TREE);
gimple_set_location (g, loc[0]);
static void
instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
tree type;
while (TREE_CODE (base) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (base);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (base);
if (gimple_assign_ssa_name_copy_p (def_stmt)
|| (gimple_assign_cast_p (def_stmt)
&& POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
tree sizet;
tree base_addr = base;
- gimple bos_stmt = NULL;
+ gimple *bos_stmt = NULL;
if (decl_p)
base_addr = build1 (ADDR_EXPR,
build_pointer_type (TREE_TYPE (base)), base);
&& TREE_CODE (index) == SSA_NAME
&& TREE_CODE (sizet) == INTEGER_CST)
{
- gimple def = SSA_NAME_DEF_STMT (index);
+ gimple *def = SSA_NAME_DEF_STMT (index);
if (is_gimple_assign (def)
&& gimple_assign_rhs_code (def) == BIT_AND_EXPR
&& TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
GSI_SAME_STMT);
tree ckind = build_int_cst (unsigned_char_type_node,
is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF);
- gimple g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
+ gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
ptr, t, sizet, ckind);
gimple_set_location (g, loc);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
{
gsi_next (&gsi);
histogram_value
gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
- enum hist_type type, gimple stmt, tree value)
+ enum hist_type type, gimple *stmt, tree value)
{
histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
hist->hvalue.value = value;
static int
histogram_eq (const void *x, const void *y)
{
- return ((const_histogram_value) x)->hvalue.stmt == (const_gimple) y;
+ return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
}
/* Set histogram for STMT. */
static void
-set_histogram_value (struct function *fun, gimple stmt, histogram_value hist)
+set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
{
void **loc;
if (!hist && !VALUE_HISTOGRAMS (fun))
/* Get histogram list for STMT. */
histogram_value
-gimple_histogram_value (struct function *fun, gimple stmt)
+gimple_histogram_value (struct function *fun, gimple *stmt)
{
if (!VALUE_HISTOGRAMS (fun))
return NULL;
/* Add histogram for STMT. */
void
-gimple_add_histogram_value (struct function *fun, gimple stmt,
+gimple_add_histogram_value (struct function *fun, gimple *stmt,
histogram_value hist)
{
hist->hvalue.next = gimple_histogram_value (fun, stmt);
/* Remove histogram HIST from STMT's histogram list. */
void
-gimple_remove_histogram_value (struct function *fun, gimple stmt,
+gimple_remove_histogram_value (struct function *fun, gimple *stmt,
histogram_value hist)
{
histogram_value hist2 = gimple_histogram_value (fun, stmt);
/* Lookup histogram of type TYPE in the STMT. */
histogram_value
-gimple_histogram_value_of_type (struct function *fun, gimple stmt,
+gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
enum hist_type type)
{
histogram_value hist;
/* Dump information about HIST to DUMP_FILE. */
void
-stream_in_histogram_value (struct lto_input_block *ib, gimple stmt)
+stream_in_histogram_value (struct lto_input_block *ib, gimple *stmt)
{
enum hist_type type;
unsigned int ncounters = 0;
/* Dump all histograms attached to STMT to DUMP_FILE. */
void
-dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple stmt)
+dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
{
histogram_value hist;
for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
/* Remove all histograms associated with STMT. */
void
-gimple_remove_stmt_histograms (struct function *fun, gimple stmt)
+gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
{
histogram_value val;
while ((val = gimple_histogram_value (fun, stmt)) != NULL)
/* Duplicate all histograms associates with OSTMT to STMT. */
void
-gimple_duplicate_stmt_histograms (struct function *fun, gimple stmt,
- struct function *ofun, gimple ostmt)
+gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
+ struct function *ofun, gimple *ostmt)
{
histogram_value val;
for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
/* Move all histograms associated with OSTMT to STMT. */
void
-gimple_move_stmt_histograms (struct function *fun, gimple stmt, gimple ostmt)
+gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
{
histogram_value val = gimple_histogram_value (fun, ostmt);
if (val)
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
for (hist = gimple_histogram_value (cfun, stmt); hist;
hist = hist->hvalue.next)
somehow. */
static bool
-check_counter (gimple stmt, const char * name,
+check_counter (gimple *stmt, const char * name,
gcov_type *count, gcov_type *all, gcov_type bb_count)
{
if (*all != bb_count || *count > *all)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
histogram_value th = gimple_histogram_value (cfun, stmt);
if (!th)
continue;
gassign *stmt1, *stmt2;
gcond *stmt3;
tree tmp0, tmp1, tmp2;
- gimple bb1end, bb2end, bb3end;
+ gimple *bb1end, *bb2end, *bb3end;
basic_block bb, bb2, bb3, bb4;
tree optype, op1, op2;
edge e12, e13, e23, e24, e34;
gassign *stmt1, *stmt2, *stmt3;
gcond *stmt4;
tree tmp2, tmp3;
- gimple bb1end, bb2end, bb3end;
+ gimple *bb1end, *bb2end, *bb3end;
basic_block bb, bb2, bb3, bb4;
tree optype, op1, op2;
edge e12, e13, e23, e24, e34;
gcov_type count1, gcov_type count2, gcov_type all)
{
gassign *stmt1;
- gimple stmt2;
+ gimple *stmt2;
gcond *stmt3;
tree tmp1;
- gimple bb1end, bb2end = NULL, bb3end;
+ gimple *bb1end, *bb2end = NULL, *bb3end;
basic_block bb, bb2, bb3, bb4;
tree optype, op1, op2;
edge e12, e23 = 0, e24, e34, e14;
{
if (gimple_call_lhs (iretbnd_stmt))
{
- gimple copy;
+ gimple *copy;
gimple_set_vdef (iretbnd_stmt, NULL_TREE);
gimple_set_vuse (iretbnd_stmt, NULL_TREE);
}
void
-stringop_block_profile (gimple stmt, unsigned int *expected_align,
+stringop_block_profile (gimple *stmt, unsigned int *expected_align,
HOST_WIDE_INT *expected_size)
{
histogram_value histogram;
division/modulo optimization. */
static void
-gimple_divmod_values_to_profile (gimple stmt, histogram_values *values)
+gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
{
tree lhs, divisor, op0, type;
histogram_value hist;
indirect/virtual call optimization. */
static void
-gimple_indirect_call_to_profile (gimple stmt, histogram_values *values)
+gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
{
tree callee;
string operations. */
static void
-gimple_stringops_values_to_profile (gimple gs, histogram_values *values)
+gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
{
gcall *stmt;
tree blck_size;
them to list VALUES. */
static void
-gimple_values_to_profile (gimple stmt, histogram_values *values)
+gimple_values_to_profile (gimple *stmt, histogram_values *values)
{
gimple_divmod_values_to_profile (stmt, values);
gimple_stringops_values_to_profile (stmt, values);
struct
{
tree value; /* The value to profile. */
- gimple stmt; /* Insn containing the value. */
+ gimple *stmt; /* Insn containing the value. */
gcov_type *counters; /* Pointer to first counter. */
struct histogram_value_t *next; /* Linked list pointer. */
} hvalue;
extern bool gimple_value_profile_transformations (void);
histogram_value gimple_alloc_histogram_value (struct function *, enum hist_type,
- gimple stmt, tree);
-histogram_value gimple_histogram_value (struct function *, gimple);
-histogram_value gimple_histogram_value_of_type (struct function *, gimple,
+ gimple *stmt, tree);
+histogram_value gimple_histogram_value (struct function *, gimple *);
+histogram_value gimple_histogram_value_of_type (struct function *, gimple *,
enum hist_type);
-void gimple_add_histogram_value (struct function *, gimple, histogram_value);
-void dump_histograms_for_stmt (struct function *, FILE *, gimple);
-void gimple_remove_histogram_value (struct function *, gimple, histogram_value);
-void gimple_remove_stmt_histograms (struct function *, gimple);
-void gimple_duplicate_stmt_histograms (struct function *, gimple,
- struct function *, gimple);
-void gimple_move_stmt_histograms (struct function *, gimple, gimple);
+void gimple_add_histogram_value (struct function *, gimple *, histogram_value);
+void dump_histograms_for_stmt (struct function *, FILE *, gimple *);
+void gimple_remove_histogram_value (struct function *, gimple *, histogram_value);
+void gimple_remove_stmt_histograms (struct function *, gimple *);
+void gimple_duplicate_stmt_histograms (struct function *, gimple *,
+ struct function *, gimple *);
+void gimple_move_stmt_histograms (struct function *, gimple *, gimple *);
void verify_histograms (void);
void free_histograms (void);
-void stringop_block_profile (gimple, unsigned int *, HOST_WIDE_INT *);
+void stringop_block_profile (gimple *, unsigned int *, HOST_WIDE_INT *);
gcall *gimple_ic (gcall *, struct cgraph_node *, int, gcov_type,
gcov_type);
bool check_ic_target (gcall *, struct cgraph_node *);
extern void gimple_gen_average_profiler (histogram_value, unsigned, unsigned);
extern void gimple_gen_ior_profiler (histogram_value, unsigned, unsigned);
extern void stream_out_histogram_value (struct output_block *, histogram_value);
-extern void stream_in_histogram_value (struct lto_input_block *, gimple);
+extern void stream_in_histogram_value (struct lto_input_block *, gimple *);
extern struct cgraph_node* find_func_by_profile_id (int func_id);
call). */
static bool
-is_vtable_assignment_stmt (gimple stmt)
+is_vtable_assignment_stmt (gimple *stmt)
{
if (gimple_code (stmt) != GIMPLE_ASSIGN)
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple stmt2 = USE_STMT (use_p);
+ gimple *stmt2 = USE_STMT (use_p);
if (is_gimple_call (stmt2))
{
verify_bb_vtables (basic_block bb)
{
gimple_seq stmts;
- gimple stmt = NULL;
+ gimple *stmt = NULL;
gimple_stmt_iterator gsi_vtbl_assign;
gimple_stmt_iterator gsi_virtual_call;
/* Replace all uses of lhs with tmp0. */
found = false;
imm_use_iterator iterator;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iterator, lhs)
{
use_operand_p use_p;