+2015-10-06 Richard Biener <rguenther@suse.de>
+
+ * tree-vectorizer.h (vec_info): New base class for...
+ (_loop_vec_info): ... this and ...
+ (_bb_vec_info): ... this.
+ (vect_is_simple_use, vect_is_simple_use_1, new_stmt_vec_info,
+ vect_analyze_data_refs_alignment, vect_verify_datarefs_alignment,
+ vect_analyze_data_ref_accesses, vect_analyze_data_refs,
+ vect_schedule_slp, vect_analyze_slp, vect_pattern_recog,
+ vect_destroy_datarefs): Adjust interface to take a vec_info *
+ rather than both a loop_vec_info and a bb_vec_info argument.
+ * tree-vect-data-refs.c (vect_compute_data_refs_alignment,
+ vect_verify_datarefs_alignment, vect_enhance_data_refs_alignment,
+ vect_analyze_data_refs_alignment, vect_analyze_data_ref_accesses,
+ vect_analyze_data_refs, vect_create_data_ref_ptr): Adjust
+ accordingly.
+ * tree-vect-loop.c (new_loop_vec_info): Initialize base class.
+ (destroy_loop_vec_info, vect_analyze_loop_2,
+ vect_is_simple_reduction_1, get_initial_def_for_induction,
+ vect_create_epilog_for_reduction, vectorizable_reduction,
+ vectorizable_live_operation, vect_transform_loop): Adjust.
+ * tree-vect-patterns.c (type_conversion_p,
+ vect_recog_widen_mult_pattern, vect_recog_widen_shift_pattern,
+ vect_recog_rotate_pattern, vect_recog_vector_vector_shift_pattern,
+ vect_recog_divmod_pattern, vect_recog_mixed_size_cond_pattern,
+ check_bool_pattern, vect_recog_bool_pattern,
+ vect_mark_pattern_stmts, vect_pattern_recog): Likewise.
+ * tree-vect-slp.c (vect_get_and_check_slp_defs,
+ vect_build_slp_tree_1, vect_build_slp_tree, vect_analyze_slp_cost_1,
+ vect_analyze_slp_instance, vect_analyze_slp, destroy_bb_vec_info,
+ vect_slp_analyze_bb_1, vect_schedule_slp): Likewise.
+ (new_bb_vec_info): Initialize base classs.
+ * tree-vect-stmts.c (record_stmt_cost, process_use,
+ vect_get_vec_def_for_operand, vect_finish_stmt_generation,
+ vectorizable_mask_load_store, vectorizable_call,
+ vectorizable_simd_clone_call, vectorizable_conversion,
+ vectorizable_assignment, vectorizable_shift,
+ vectorizable_operation, vectorizable_store,
+ vectorizable_load, vect_is_simple_cond, vectorizable_condition,
+ new_stmt_vec_info, vect_is_simple_use, vect_is_simple_use_1): Likewise.
+ * tree-vectorizer.c (vect_destroy_datarefs): Likewise.
+
2015-10-05 Kaz Kojima <kkojima@gcc.gnu.org>
PR c/65345
Return FALSE if a data reference is found that cannot be vectorized. */
static bool
-vect_compute_data_refs_alignment (loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo)
+vect_compute_data_refs_alignment (vec_info *vinfo)
{
- vec<data_reference_p> datarefs;
+ vec<data_reference_p> datarefs = vinfo->datarefs;
struct data_reference *dr;
unsigned int i;
- if (loop_vinfo)
- datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
- else
- datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
FOR_EACH_VEC_ELT (datarefs, i, dr)
if (STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr)))
&& !vect_compute_data_ref_alignment (dr))
{
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
{
/* Mark unsupported statement as unvectorizable. */
STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
handled with respect to alignment. */
bool
-vect_verify_datarefs_alignment (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_verify_datarefs_alignment (vec_info *vinfo)
{
- vec<data_reference_p> datarefs;
+ vec<data_reference_p> datarefs = vinfo->datarefs;
struct data_reference *dr;
enum dr_alignment_support supportable_dr_alignment;
unsigned int i;
- if (loop_vinfo)
- datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
- else
- datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
FOR_EACH_VEC_ELT (datarefs, i, dr)
{
gimple *stmt = DR_STMT (dr);
if (do_peeling && known_alignment_for_access_p (dr0) && npeel == 0)
{
- stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+ stat = vect_verify_datarefs_alignment (loop_vinfo);
if (!stat)
do_peeling = false;
else
Drop the body_cst_vec on the floor here. */
body_cost_vec.release ();
- stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+ stat = vect_verify_datarefs_alignment (loop_vinfo);
gcc_assert (stat);
return stat;
}
/* Peeling and versioning can't be done together at this time. */
gcc_assert (! (do_peeling && do_versioning));
- stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+ stat = vect_verify_datarefs_alignment (loop_vinfo);
gcc_assert (stat);
return stat;
}
/* This point is reached if neither peeling nor versioning is being done. */
gcc_assert (! (do_peeling || do_versioning));
- stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
+ stat = vect_verify_datarefs_alignment (loop_vinfo);
return stat;
}
Return FALSE if a data reference is found that cannot be vectorized. */
bool
-vect_analyze_data_refs_alignment (loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo)
+vect_analyze_data_refs_alignment (vec_info *vinfo)
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
/* Mark groups of data references with same alignment using
data dependence information. */
- if (loop_vinfo)
+ if (is_a <loop_vec_info> (vinfo))
{
- vec<ddr_p> ddrs = LOOP_VINFO_DDRS (loop_vinfo);
+ vec<ddr_p> ddrs = vinfo->ddrs;
struct data_dependence_relation *ddr;
unsigned int i;
FOR_EACH_VEC_ELT (ddrs, i, ddr)
- vect_find_same_alignment_drs (ddr, loop_vinfo);
+ vect_find_same_alignment_drs (ddr, as_a <loop_vec_info> (vinfo));
}
- if (!vect_compute_data_refs_alignment (loop_vinfo, bb_vinfo))
+ if (!vect_compute_data_refs_alignment (vinfo))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
FORNOW: handle only arrays and pointer accesses. */
bool
-vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_analyze_data_ref_accesses (vec_info *vinfo)
{
unsigned int i;
- vec<data_reference_p> datarefs;
+ vec<data_reference_p> datarefs = vinfo->datarefs;
struct data_reference *dr;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_analyze_data_ref_accesses ===\n");
- if (loop_vinfo)
- datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
- else
- datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
if (datarefs.is_empty ())
return true;
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"not vectorized: complicated access pattern.\n");
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
{
/* Mark the statement as not vectorizable. */
STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
*/
bool
-vect_analyze_data_refs (loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo,
- int *min_vf, unsigned *n_stmts)
+vect_analyze_data_refs (vec_info *vinfo, int *min_vf, unsigned *n_stmts)
{
struct loop *loop = NULL;
basic_block bb = NULL;
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_analyze_data_refs ===\n");
- if (loop_vinfo)
+ if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo))
{
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
}
else
{
+ bb_vec_info bb_vinfo = as_a <bb_vec_info> (vinfo);
gimple_stmt_iterator gsi;
bb = BB_VINFO_BB (bb_vinfo);
&& !TREE_THIS_VOLATILE (DR_REF (dr))
&& targetm.vectorize.builtin_scatter != NULL;
bool maybe_simd_lane_access
- = loop_vinfo && loop->simduid;
+ = is_a <loop_vec_info> (vinfo) && loop->simduid;
/* If target supports vector gather loads or scatter stores, or if
this might be a SIMD lane access, see if they can't be used. */
- if (loop_vinfo
+ if (is_a <loop_vec_info> (vinfo)
&& (maybe_gather || maybe_scatter || maybe_simd_lane_access)
&& !nested_in_vect_loop_p (loop, stmt))
{
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
return false;
"not vectorized: base addr of dr is a "
"constant\n");
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
return false;
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
break;
if (gatherscatter != SG_NONE || simd_lane_access)
if (gatherscatter != SG_NONE)
{
tree off;
- if (!vect_check_gather_scatter (stmt, loop_vinfo, NULL, &off, NULL)
+ if (!vect_check_gather_scatter (stmt, as_a <loop_vec_info> (vinfo),
+ NULL, &off, NULL)
|| get_vectype_for_scalar_type (TREE_TYPE (off)) == NULL_TREE)
{
STMT_VINFO_DATA_REF (stmt_info) = NULL;
STMT_VINFO_GATHER_SCATTER_P (stmt_info) = gatherscatter;
}
- else if (loop_vinfo
+ else if (is_a <loop_vec_info> (vinfo)
&& TREE_CODE (DR_STEP (dr)) != INTEGER_CST)
{
if (nested_in_vect_loop_p (loop, stmt))
avoids spending useless time in analyzing their dependence. */
if (i != datarefs.length ())
{
- gcc_assert (bb_vinfo != NULL);
+ gcc_assert (is_a <bb_vec_info> (vinfo));
for (unsigned j = i; j < datarefs.length (); ++j)
{
data_reference_p dr = datarefs[j];
aggr_ptr, loop, &incr_gsi, insert_after,
&indx_before_incr, &indx_after_incr);
incr = gsi_stmt (incr_gsi);
- set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+ set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
/* Copy the points-to information if it exists. */
if (DR_PTR_INFO (dr))
containing_loop, &incr_gsi, insert_after, &indx_before_incr,
&indx_after_incr);
incr = gsi_stmt (incr_gsi);
- set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+ set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
/* Copy the points-to information if it exists. */
if (DR_PTR_INFO (dr))
unsigned int i, nbbs;
res = (loop_vec_info) xcalloc (1, sizeof (struct _loop_vec_info));
+ res->kind = vec_info::loop;
LOOP_VINFO_LOOP (res) = loop;
bbs = get_loop_body (loop);
loop_vec_info inner_loop_vinfo =
STMT_VINFO_LOOP_VINFO (stmt_info);
gcc_assert (loop->inner == LOOP_VINFO_LOOP (inner_loop_vinfo));
- STMT_VINFO_LOOP_VINFO (stmt_info) = res;
+ stmt_info->vinfo = res;
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
loop_vec_info inner_loop_vinfo =
STMT_VINFO_LOOP_VINFO (stmt_info);
gcc_assert (loop->inner == LOOP_VINFO_LOOP (inner_loop_vinfo));
- STMT_VINFO_LOOP_VINFO (stmt_info) = res;
+ stmt_info->vinfo = res;
}
}
else
{
gimple *phi = gsi_stmt (si);
gimple_set_uid (phi, 0);
- set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res, NULL));
+ set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res));
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
gimple *stmt = gsi_stmt (si);
gimple_set_uid (stmt, 0);
- set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res, NULL));
+ set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res));
}
}
}
}
free (LOOP_VINFO_BBS (loop_vinfo));
- vect_destroy_datarefs (loop_vinfo, NULL);
+ vect_destroy_datarefs (loop_vinfo);
free_dependence_relations (LOOP_VINFO_DDRS (loop_vinfo));
LOOP_VINFO_LOOP_NEST (loop_vinfo).release ();
LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).release ();
FORNOW: Handle only simple, array references, which
alignment can be forced, and aligned pointer-references. */
- ok = vect_analyze_data_refs (loop_vinfo, NULL, &min_vf, &n_stmts);
+ ok = vect_analyze_data_refs (loop_vinfo, &min_vf, &n_stmts);
if (!ok)
{
if (dump_enabled_p ())
vect_analyze_scalar_cycles (loop_vinfo);
- vect_pattern_recog (loop_vinfo, NULL);
+ vect_pattern_recog (loop_vinfo);
vect_fixup_scalar_cycles_with_patterns (loop_vinfo);
/* Analyze the access patterns of the data-refs in the loop (consecutive,
complex, etc.). FORNOW: Only handle consecutive access pattern. */
- ok = vect_analyze_data_ref_accesses (loop_vinfo, NULL);
+ ok = vect_analyze_data_ref_accesses (loop_vinfo);
if (!ok)
{
if (dump_enabled_p ())
}
/* Check the SLP opportunities in the loop, analyze and build SLP trees. */
- ok = vect_analyze_slp (loop_vinfo, NULL, n_stmts);
+ ok = vect_analyze_slp (loop_vinfo, n_stmts);
if (!ok)
return false;
/* Analyze the alignment of the data-refs in the loop.
Fail if a data reference is found that cannot be vectorized. */
- ok = vect_analyze_data_refs_alignment (loop_vinfo, NULL);
+ ok = vect_analyze_data_refs_alignment (loop_vinfo);
if (!ok)
{
if (dump_enabled_p ())
gimple *negate_stmt = gimple_build_assign (negrhs, NEGATE_EXPR, rhs);
gimple_stmt_iterator gsi = gsi_for_stmt (def_stmt);
set_vinfo_for_stmt (negate_stmt, new_stmt_vec_info (negate_stmt,
- loop_info, NULL));
+ loop_info));
gsi_insert_before (&gsi, negate_stmt, GSI_NEW_STMT);
gimple_assign_set_rhs2 (def_stmt, negrhs);
gimple_assign_set_rhs_code (def_stmt, PLUS_EXPR);
new_stmt);
gcc_assert (!new_bb);
set_vinfo_for_stmt (new_stmt,
- new_stmt_vec_info (new_stmt, loop_vinfo, NULL));
+ new_stmt_vec_info (new_stmt, loop_vinfo));
}
}
else
vec_dest = vect_get_new_vect_var (vectype, vect_simple_var, "vec_iv_");
induction_phi = create_phi_node (vec_dest, iv_loop->header);
set_vinfo_for_stmt (induction_phi,
- new_stmt_vec_info (induction_phi, loop_vinfo, NULL));
+ new_stmt_vec_info (induction_phi, loop_vinfo));
induc_def = PHI_RESULT (induction_phi);
/* Create the iv update inside the loop */
vec_def = make_ssa_name (vec_dest, new_stmt);
gimple_assign_set_lhs (new_stmt, vec_def);
gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
- set_vinfo_for_stmt (new_stmt, new_stmt_vec_info (new_stmt, loop_vinfo,
- NULL));
+ set_vinfo_for_stmt (new_stmt, new_stmt_vec_info (new_stmt, loop_vinfo));
/* Set the arguments of the phi node: */
add_phi_arg (induction_phi, vec_init, pe, UNKNOWN_LOCATION);
gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
}
set_vinfo_for_stmt (new_stmt,
- new_stmt_vec_info (new_stmt, loop_vinfo, NULL));
+ new_stmt_vec_info (new_stmt, loop_vinfo));
STMT_VINFO_RELATED_STMT (prev_stmt_vinfo) = new_stmt;
prev_stmt_vinfo = vinfo_for_stmt (new_stmt);
}
si = gsi_after_labels (bb);
gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
set_vinfo_for_stmt (new_stmt,
- new_stmt_vec_info (new_stmt, loop_vinfo, NULL));
+ new_stmt_vec_info (new_stmt, loop_vinfo));
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (new_stmt))
= STMT_VINFO_RELATED_STMT (vinfo_for_stmt (induction_phi));
}
{
tree new_def = copy_ssa_name (def);
phi = create_phi_node (new_def, exit_bb);
- set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, loop_vinfo, NULL));
+ set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, loop_vinfo));
if (j == 0)
new_phis.quick_push (phi);
else
SET_PHI_ARG_DEF (outer_phi, single_exit (loop)->dest_idx,
PHI_RESULT (phi));
set_vinfo_for_stmt (outer_phi, new_stmt_vec_info (outer_phi,
- loop_vinfo, NULL));
+ loop_vinfo));
inner_phis.quick_push (phi);
new_phis[i] = outer_phi;
prev_phi_info = vinfo_for_stmt (outer_phi);
SET_PHI_ARG_DEF (outer_phi, single_exit (loop)->dest_idx,
PHI_RESULT (phi));
set_vinfo_for_stmt (outer_phi, new_stmt_vec_info (outer_phi,
- loop_vinfo, NULL));
+ loop_vinfo));
STMT_VINFO_RELATED_STMT (prev_phi_info) = outer_phi;
prev_phi_info = vinfo_for_stmt (outer_phi);
}
if (nested_in_vect_loop)
{
set_vinfo_for_stmt (epilog_stmt,
- new_stmt_vec_info (epilog_stmt, loop_vinfo,
- NULL));
+ new_stmt_vec_info (epilog_stmt, loop_vinfo));
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (epilog_stmt)) =
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (new_phi));
/* Create vector phi node. */
vect_phi = create_phi_node (vec_initial_def, bb);
new_phi_vinfo = new_stmt_vec_info (vect_phi,
- loop_vec_info_for_loop (outer_loop), NULL);
+ loop_vec_info_for_loop (outer_loop));
set_vinfo_for_stmt (vect_phi, new_phi_vinfo);
/* Create vs0 - initial def of the double reduction phi. */
if (i == 0 && code == COND_EXPR)
continue;
- is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo, NULL,
+ is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo,
&def_stmt, &def, &dt, &tem);
if (!vectype_in)
vectype_in = tem;
}
}
- is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo, NULL,
+ is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo,
&def_stmt, &def, &dt, &tem);
if (!vectype_in)
vectype_in = tem;
operand. */
new_phi = create_phi_node (vec_dest, loop->header);
set_vinfo_for_stmt (new_phi,
- new_stmt_vec_info (new_phi, loop_vinfo,
- NULL));
+ new_stmt_vec_info (new_phi, loop_vinfo));
if (j == 0 || slp_node)
phis.quick_push (new_phi);
}
gimple *dummy_stmt;
tree dummy;
- vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo, NULL,
+ vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo,
&dummy_stmt, &dummy, &dt);
loop_vec_def0 = vect_get_vec_def_for_stmt_copy (dt,
loop_vec_def0);
vec_oprnds0[0] = loop_vec_def0;
if (op_type == ternary_op)
{
- vect_is_simple_use (op1, stmt, loop_vinfo, NULL, &dummy_stmt,
+ vect_is_simple_use (op1, stmt, loop_vinfo, &dummy_stmt,
&dummy, &dt);
loop_vec_def1 = vect_get_vec_def_for_stmt_copy (dt,
loop_vec_def1);
else
op = gimple_op (stmt, i + 1);
if (op
- && !vect_is_simple_use (op, stmt, loop_vinfo, NULL, &def_stmt, &def,
- &dt))
+ && !vect_is_simple_use (op, stmt, loop_vinfo, &def_stmt, &def, &dt))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
dump_printf_loc (MSG_NOTE, vect_location,
"=== scheduling SLP instances ===\n");
- vect_schedule_slp (loop_vinfo, NULL);
+ vect_schedule_slp (loop_vinfo);
}
/* Hybrid SLP stmts must be vectorized in addition to SLP. */
{
tree dummy;
gimple *dummy_gimple;
- loop_vec_info loop_vinfo;
stmt_vec_info stmt_vinfo;
tree type = TREE_TYPE (name);
tree oprnd0;
enum vect_def_type dt;
tree def;
- bb_vec_info bb_vinfo;
stmt_vinfo = vinfo_for_stmt (use_stmt);
- loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
- if (!vect_is_simple_use (name, use_stmt, loop_vinfo, bb_vinfo, def_stmt,
+ if (!vect_is_simple_use (name, use_stmt, stmt_vinfo->vinfo, def_stmt,
&def, &dt))
return false;
else
*promotion = false;
- if (!vect_is_simple_use (oprnd0, *def_stmt, loop_vinfo,
- bb_vinfo, &dummy_gimple, &dummy, &dt))
+ if (!vect_is_simple_use (oprnd0, *def_stmt, stmt_vinfo->vinfo,
+ &dummy_gimple, &dummy, &dt))
return false;
return true;
pattern_stmt = gimple_build_assign (var, WIDEN_MULT_EXPR, oprnd0, oprnd1);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo) = NULL;
/* If the original two operands have different sizes, we may need to convert
{
append_pattern_def_seq (stmt_vinfo, new_stmt);
stmt_vec_info new_stmt_info
- = new_stmt_vec_info (new_stmt, loop_vinfo, bb_vinfo);
+ = new_stmt_vec_info (new_stmt, stmt_vinfo->vinfo);
set_vinfo_for_stmt (new_stmt, new_stmt_info);
STMT_VINFO_VECTYPE (new_stmt_info) = vectype;
}
{
append_pattern_def_seq (stmt_vinfo, pattern_stmt);
stmt_vec_info pattern_stmt_info
- = new_stmt_vec_info (pattern_stmt, loop_vinfo, bb_vinfo);
+ = new_stmt_vec_info (pattern_stmt, stmt_vinfo->vinfo);
set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
STMT_VINFO_VECTYPE (pattern_stmt_info) = vecitype;
pattern_stmt = gimple_build_assign (vect_recog_temp_ssa_var (type, NULL),
if (wstmt)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
new_pattern_def_seq (stmt_vinfo, wstmt);
stmt_vec_info new_stmt_info
- = new_stmt_vec_info (wstmt, loop_vinfo, bb_vinfo);
+ = new_stmt_vec_info (wstmt, stmt_vinfo->vinfo);
set_vinfo_for_stmt (wstmt, new_stmt_info);
STMT_VINFO_VECTYPE (new_stmt_info) = vectype;
}
gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+ vec_info *vinfo = stmt_vinfo->vinfo;
enum vect_def_type dt;
optab optab1, optab2;
edge ext_def = NULL;
|| !TYPE_UNSIGNED (type))
return NULL;
- if (!vect_is_simple_use (oprnd1, last_stmt, loop_vinfo, bb_vinfo, &def_stmt,
- &def, &dt))
+ if (!vect_is_simple_use (oprnd1, last_stmt, vinfo, &def_stmt, &def, &dt))
return NULL;
if (dt != vect_internal_def
&& optab_handler (optab1, TYPE_MODE (vectype)) != CODE_FOR_nothing)
return NULL;
- if (bb_vinfo != NULL || dt != vect_internal_def)
+ if (is_a <bb_vec_info> (vinfo) || dt != vect_internal_def)
{
optab2 = optab_for_tree_code (rhs_code, vectype, optab_scalar);
if (optab2
|| !optab2
|| optab_handler (optab2, TYPE_MODE (vectype)) == CODE_FOR_nothing)
{
- if (bb_vinfo == NULL && dt == vect_internal_def)
+ if (! is_a <bb_vec_info> (vinfo) && dt == vect_internal_def)
return NULL;
optab1 = optab_for_tree_code (LSHIFT_EXPR, vectype, optab_scalar);
optab2 = optab_for_tree_code (RSHIFT_EXPR, vectype, optab_scalar);
if (dt == vect_external_def
&& TREE_CODE (oprnd1) == SSA_NAME
- && loop_vinfo)
+ && is_a <loop_vec_info> (vinfo))
{
- struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
+ struct loop *loop = as_a <loop_vec_info> (vinfo)->loop;
ext_def = loop_preheader_edge (loop);
if (!SSA_NAME_IS_DEFAULT_DEF (oprnd1))
{
}
else
{
- def_stmt_vinfo = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+ def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecstype;
append_pattern_def_seq (stmt_vinfo, def_stmt);
}
else
{
- def_stmt_vinfo = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+ def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecstype;
append_pattern_def_seq (stmt_vinfo, def_stmt);
gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+ vec_info *vinfo = stmt_vinfo->vinfo;
enum vect_def_type dt;
tree def;
!= TYPE_PRECISION (TREE_TYPE (oprnd0)))
return NULL;
- if (!vect_is_simple_use (oprnd1, last_stmt, loop_vinfo, bb_vinfo, &def_stmt,
+ if (!vect_is_simple_use (oprnd1, last_stmt, vinfo, &def_stmt,
&def, &dt))
return NULL;
gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+ vec_info *vinfo = stmt_vinfo->vinfo;
optab optab;
tree q;
int dummy_int, prec;
def_stmt = gimple_build_assign (var, COND_EXPR, cond,
build_int_cst (utype, -1),
build_int_cst (utype, 0));
- def_stmt_vinfo
- = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+ def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecutype;
append_pattern_def_seq (stmt_vinfo, def_stmt);
def_stmt = gimple_build_assign (var, RSHIFT_EXPR,
gimple_assign_lhs (def_stmt),
shift);
- def_stmt_vinfo
- = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+ def_stmt_vinfo = new_stmt_vec_info (def_stmt, vinfo);
set_vinfo_for_stmt (def_stmt, def_stmt_vinfo);
STMT_VINFO_VECTYPE (def_stmt_vinfo) = vecutype;
append_pattern_def_seq (stmt_vinfo, def_stmt);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
gimple *pattern_stmt, *def_stmt;
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+ vec_info *vinfo = stmt_vinfo->vinfo;
tree orig_type0 = NULL_TREE, orig_type1 = NULL_TREE;
gimple *def_stmt0 = NULL, *def_stmt1 = NULL;
bool promotion;
NOP_EXPR, gimple_assign_lhs (def_stmt));
new_pattern_def_seq (stmt_vinfo, def_stmt);
- def_stmt_info = new_stmt_vec_info (def_stmt, loop_vinfo, bb_vinfo);
+ def_stmt_info = new_stmt_vec_info (def_stmt, vinfo);
set_vinfo_for_stmt (def_stmt, def_stmt_info);
STMT_VINFO_VECTYPE (def_stmt_info) = vecitype;
*type_in = vecitype;
true if bool VAR can be optimized that way. */
static bool
-check_bool_pattern (tree var, loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+check_bool_pattern (tree var, vec_info *vinfo)
{
gimple *def_stmt;
enum vect_def_type dt;
tree def, rhs1;
enum tree_code rhs_code;
- if (!vect_is_simple_use (var, NULL, loop_vinfo, bb_vinfo, &def_stmt, &def,
+ if (!vect_is_simple_use (var, NULL, vinfo, &def_stmt, &def,
&dt))
return false;
switch (rhs_code)
{
case SSA_NAME:
- return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
+ return check_bool_pattern (rhs1, vinfo);
CASE_CONVERT:
if ((TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
|| !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
&& TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE)
return false;
- return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
+ return check_bool_pattern (rhs1, vinfo);
case BIT_NOT_EXPR:
- return check_bool_pattern (rhs1, loop_vinfo, bb_vinfo);
+ return check_bool_pattern (rhs1, vinfo);
case BIT_AND_EXPR:
case BIT_IOR_EXPR:
case BIT_XOR_EXPR:
- if (!check_bool_pattern (rhs1, loop_vinfo, bb_vinfo))
+ if (!check_bool_pattern (rhs1, vinfo))
return false;
- return check_bool_pattern (gimple_assign_rhs2 (def_stmt), loop_vinfo,
- bb_vinfo);
+ return check_bool_pattern (gimple_assign_rhs2 (def_stmt), vinfo);
default:
if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
enum tree_code rhs_code;
tree var, lhs, rhs, vectype;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
+ vec_info *vinfo = stmt_vinfo->vinfo;
gimple *pattern_stmt;
if (!is_gimple_assign (last_stmt))
if (vectype == NULL_TREE)
return NULL;
- if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
+ if (!check_bool_pattern (var, vinfo))
return NULL;
rhs = adjust_bool_pattern (var, TREE_TYPE (lhs), NULL_TREE, stmts);
if (get_vectype_for_scalar_type (type) == NULL_TREE)
return NULL;
- if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
+ if (!check_bool_pattern (var, vinfo))
return NULL;
rhs = adjust_bool_pattern (var, type, NULL_TREE, stmts);
gcc_assert (vectype != NULL_TREE);
if (!VECTOR_MODE_P (TYPE_MODE (vectype)))
return NULL;
- if (!check_bool_pattern (var, loop_vinfo, bb_vinfo))
+ if (!check_bool_pattern (var, vinfo))
return NULL;
rhs = adjust_bool_pattern (var, TREE_TYPE (vectype), NULL_TREE, stmts);
rhs = rhs2;
}
pattern_stmt = gimple_build_assign (lhs, SSA_NAME, rhs);
- pattern_stmt_info = new_stmt_vec_info (pattern_stmt, loop_vinfo,
- bb_vinfo);
+ pattern_stmt_info = new_stmt_vec_info (pattern_stmt, vinfo);
set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
STMT_VINFO_DATA_REF (pattern_stmt_info)
= STMT_VINFO_DATA_REF (stmt_vinfo);
{
stmt_vec_info pattern_stmt_info, def_stmt_info;
stmt_vec_info orig_stmt_info = vinfo_for_stmt (orig_stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (orig_stmt_info);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (orig_stmt_info);
+ vec_info *vinfo = orig_stmt_info->vinfo;
gimple *def_stmt;
pattern_stmt_info = vinfo_for_stmt (pattern_stmt);
if (pattern_stmt_info == NULL)
{
- pattern_stmt_info = new_stmt_vec_info (pattern_stmt, loop_vinfo,
- bb_vinfo);
+ pattern_stmt_info = new_stmt_vec_info (pattern_stmt, vinfo);
set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
}
gimple_set_bb (pattern_stmt, gimple_bb (orig_stmt));
def_stmt_info = vinfo_for_stmt (def_stmt);
if (def_stmt_info == NULL)
{
- def_stmt_info = new_stmt_vec_info (def_stmt, loop_vinfo,
- bb_vinfo);
+ def_stmt_info = new_stmt_vec_info (def_stmt, vinfo);
set_vinfo_for_stmt (def_stmt, def_stmt_info);
}
gimple_set_bb (def_stmt, gimple_bb (orig_stmt));
be recorded in S3. */
void
-vect_pattern_recog (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_pattern_recog (vec_info *vinfo)
{
struct loop *loop;
basic_block *bbs;
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_pattern_recog ===\n");
- if (loop_vinfo)
+ if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo))
{
loop = LOOP_VINFO_LOOP (loop_vinfo);
bbs = LOOP_VINFO_BBS (loop_vinfo);
}
else
{
- bbs = &BB_VINFO_BB (bb_vinfo);
+ bbs = &as_a <bb_vec_info> (vinfo)->bb;
nbbs = 1;
}
basic_block bb = bbs[i];
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- if (bb_vinfo && (stmt = gsi_stmt (si))
+ if (is_a <bb_vec_info> (vinfo)
+ && (stmt = gsi_stmt (si))
&& vinfo_for_stmt (stmt)
&& !STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (stmt)))
continue;
operation return 1, if everything is ok return 0. */
static int
-vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_get_and_check_slp_defs (vec_info *vinfo,
gimple *stmt, unsigned stmt_num,
vec<slp_oprnd_info> *oprnds_info)
{
bool first = stmt_num == 0;
bool second = stmt_num == 1;
- if (loop_vinfo)
- loop = LOOP_VINFO_LOOP (loop_vinfo);
+ if (is_a <loop_vec_info> (vinfo))
+ loop = LOOP_VINFO_LOOP (as_a <loop_vec_info> (vinfo));
if (is_gimple_call (stmt))
{
oprnd_info = (*oprnds_info)[i];
- if (!vect_is_simple_use (oprnd, NULL, loop_vinfo, bb_vinfo, &def_stmt,
+ if (!vect_is_simple_use (oprnd, NULL, vinfo, &def_stmt,
&def, &dt))
{
if (dump_enabled_p ())
from the pattern. Check that all the stmts of the node are in the
pattern. */
if (def_stmt && gimple_bb (def_stmt)
- && ((loop && flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
- || (!loop && gimple_bb (def_stmt) == BB_VINFO_BB (bb_vinfo)
+ && ((is_a <loop_vec_info> (vinfo)
+ && flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
+ || (is_a <bb_vec_info> (vinfo)
+ && gimple_bb (def_stmt) == as_a <bb_vec_info> (vinfo)->bb
&& gimple_code (def_stmt) != GIMPLE_PHI))
&& vinfo_for_stmt (def_stmt)
&& STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (def_stmt))
carried out or the stmts will never be vectorized by SLP. */
static bool
-vect_build_slp_tree_1 (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_build_slp_tree_1 (vec_info *vinfo,
vec<gimple *> stmts, unsigned int group_size,
unsigned nops, unsigned int *max_nunits,
unsigned int vectorization_factor, bool *matches,
/* If populating the vector type requires unrolling then fail
before adjusting *max_nunits for basic-block vectorization. */
- if (bb_vinfo
+ if (is_a <bb_vec_info> (vinfo)
&& TYPE_VECTOR_SUBPARTS (vectype) > group_size)
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
if (*max_nunits < TYPE_VECTOR_SUBPARTS (vectype))
{
*max_nunits = TYPE_VECTOR_SUBPARTS (vectype);
- if (bb_vinfo)
+ if (is_a <bb_vec_info> (vinfo))
vectorization_factor = *max_nunits;
}
greater than the SLP group size. */
unsigned ncopies
= vectorization_factor / TYPE_VECTOR_SUBPARTS (vectype);
- if (loop_vinfo
+ if (is_a <loop_vec_info> (vinfo)
&& GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) == stmt
&& ((GROUP_SIZE (vinfo_for_stmt (stmt))
- GROUP_GAP (vinfo_for_stmt (stmt)))
was found. */
static bool
-vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_build_slp_tree (vec_info *vinfo,
slp_tree *node, unsigned int group_size,
unsigned int *max_nunits,
vec<slp_tree> *loads,
return false;
bool two_operators = false;
- if (!vect_build_slp_tree_1 (loop_vinfo, bb_vinfo,
+ if (!vect_build_slp_tree_1 (vinfo,
SLP_TREE_SCALAR_STMTS (*node), group_size, nops,
max_nunits, vectorization_factor, matches,
&two_operators))
slp_oprnd_info oprnd_info;
FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (*node), i, stmt)
{
- switch (vect_get_and_check_slp_defs (loop_vinfo, bb_vinfo,
- stmt, i, &oprnds_info))
+ switch (vect_get_and_check_slp_defs (vinfo, stmt, i, &oprnds_info))
{
case 0:
break;
return false;
}
- if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &child,
+ if (vect_build_slp_tree (vinfo, &child,
group_size, max_nunits, loads,
vectorization_factor, matches,
npermutes, &this_tree_size, max_tree_size))
fails (or we don't try permutation below). Ideally we'd
even compute a permutation that will end up with the maximum
SLP tree size... */
- if (bb_vinfo
+ if (is_a <bb_vec_info> (vinfo)
&& !matches[0]
/* ??? Rejecting patterns this way doesn't work. We'd have to
do extra work to cancel the pattern so the uses see the
dump_printf (MSG_NOTE, "\n");
/* And try again with scratch 'matches' ... */
bool *tem = XALLOCAVEC (bool, group_size);
- if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &child,
+ if (vect_build_slp_tree (vinfo, &child,
group_size, max_nunits, loads,
vectorization_factor,
tem, npermutes, &this_tree_size,
enum vect_def_type dt;
if (!op || op == lhs)
continue;
- if (vect_is_simple_use (op, NULL, STMT_VINFO_LOOP_VINFO (stmt_info),
- STMT_VINFO_BB_VINFO (stmt_info),
- &def_stmt, &def, &dt))
+ if (vect_is_simple_use (op, NULL, stmt_info->vinfo, &def_stmt, &def, &dt))
{
/* Without looking at the actual initializer a vector of
constants can be implemented as load from the constant pool.
Return FALSE if it's impossible to SLP any stmt in the loop. */
static bool
-vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
+vect_analyze_slp_instance (vec_info *vinfo,
gimple *stmt, unsigned max_tree_size)
{
slp_instance new_instance;
}
else
{
- gcc_assert (loop_vinfo);
+ gcc_assert (is_a <loop_vec_info> (vinfo));
vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
}
}
else
{
- gcc_assert (loop_vinfo);
+ gcc_assert (is_a <loop_vec_info> (vinfo));
vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
- group_size = LOOP_VINFO_REDUCTIONS (loop_vinfo).length ();
+ group_size = as_a <loop_vec_info> (vinfo)->reductions.length ();
}
if (!vectype)
}
nunits = TYPE_VECTOR_SUBPARTS (vectype);
- if (loop_vinfo)
- vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
+ if (is_a <loop_vec_info> (vinfo))
+ vectorization_factor = as_a <loop_vec_info> (vinfo)->vectorization_factor;
else
vectorization_factor = nunits;
/* Calculate the unrolling factor. */
unrolling_factor = least_common_multiple (nunits, group_size) / group_size;
- if (unrolling_factor != 1 && !loop_vinfo)
+ if (unrolling_factor != 1 && is_a <bb_vec_info> (vinfo))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
else
{
/* Collect reduction statements. */
- vec<gimple *> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
+ vec<gimple *> reductions = as_a <loop_vec_info> (vinfo)->reductions;
for (i = 0; reductions.iterate (i, &next); i++)
scalar_stmts.safe_push (next);
}
/* Build the tree for the SLP instance. */
bool *matches = XALLOCAVEC (bool, group_size);
unsigned npermutes = 0;
- if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &node, group_size,
+ if (vect_build_slp_tree (vinfo, &node, group_size,
&max_nunits, &loads,
vectorization_factor, matches, &npermutes, NULL,
max_tree_size))
unrolling_factor = least_common_multiple (max_nunits, group_size)
/ group_size;
- if (unrolling_factor != 1 && !loop_vinfo)
+ if (unrolling_factor != 1 && is_a <bb_vec_info> (vinfo))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
}
}
-
- if (loop_vinfo)
- LOOP_VINFO_SLP_INSTANCES (loop_vinfo).safe_push (new_instance);
- else
- BB_VINFO_SLP_INSTANCES (bb_vinfo).safe_push (new_instance);
+ vinfo->slp_instances.safe_push (new_instance);
if (dump_enabled_p ())
vect_print_slp_tree (MSG_NOTE, node);
trees of packed scalar stmts if SLP is possible. */
bool
-vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- unsigned max_tree_size)
+vect_analyze_slp (vec_info *vinfo, unsigned max_tree_size)
{
unsigned int i;
- vec<gimple *> grouped_stores;
- vec<gimple *> reductions = vNULL;
- vec<gimple *> reduc_chains = vNULL;
gimple *first_element;
bool ok = false;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "=== vect_analyze_slp ===\n");
- if (loop_vinfo)
- {
- grouped_stores = LOOP_VINFO_GROUPED_STORES (loop_vinfo);
- reduc_chains = LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo);
- reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
- }
- else
- grouped_stores = BB_VINFO_GROUPED_STORES (bb_vinfo);
-
/* Find SLP sequences starting from groups of grouped stores. */
- FOR_EACH_VEC_ELT (grouped_stores, i, first_element)
- if (vect_analyze_slp_instance (loop_vinfo, bb_vinfo, first_element,
- max_tree_size))
+ FOR_EACH_VEC_ELT (vinfo->grouped_stores, i, first_element)
+ if (vect_analyze_slp_instance (vinfo, first_element, max_tree_size))
ok = true;
- if (reduc_chains.length () > 0)
+ if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo))
{
- /* Find SLP sequences starting from reduction chains. */
- FOR_EACH_VEC_ELT (reduc_chains, i, first_element)
- if (vect_analyze_slp_instance (loop_vinfo, bb_vinfo, first_element,
- max_tree_size))
- ok = true;
- else
- return false;
+ if (loop_vinfo->reduction_chains.length () > 0)
+ {
+ /* Find SLP sequences starting from reduction chains. */
+ FOR_EACH_VEC_ELT (loop_vinfo->reduction_chains, i, first_element)
+ if (vect_analyze_slp_instance (vinfo, first_element,
+ max_tree_size))
+ ok = true;
+ else
+ return false;
- /* Don't try to vectorize SLP reductions if reduction chain was
- detected. */
- return ok;
- }
+ /* Don't try to vectorize SLP reductions if reduction chain was
+ detected. */
+ return ok;
+ }
- /* Find SLP sequences starting from groups of reductions. */
- if (reductions.length () > 1
- && vect_analyze_slp_instance (loop_vinfo, bb_vinfo, reductions[0],
- max_tree_size))
- ok = true;
+ /* Find SLP sequences starting from groups of reductions. */
+ if (loop_vinfo->reductions.length () > 1
+ && vect_analyze_slp_instance (vinfo, loop_vinfo->reductions[0],
+ max_tree_size))
+ ok = true;
+ }
return true;
}
gimple_stmt_iterator gsi;
res = (bb_vec_info) xcalloc (1, sizeof (struct _bb_vec_info));
+ res->kind = vec_info::bb;
BB_VINFO_BB (res) = bb;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, 0);
- set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, NULL, res));
+ set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res));
}
BB_VINFO_GROUPED_STORES (res).create (10);
free_stmt_vec_info (stmt);
}
- vect_destroy_datarefs (NULL, bb_vinfo);
+ vect_destroy_datarefs (bb_vinfo);
free_dependence_relations (BB_VINFO_DDRS (bb_vinfo));
BB_VINFO_GROUPED_STORES (bb_vinfo).release ();
slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
if (!bb_vinfo)
return NULL;
- if (!vect_analyze_data_refs (NULL, bb_vinfo, &min_vf, &n_stmts))
+ if (!vect_analyze_data_refs (bb_vinfo, &min_vf, &n_stmts))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
return NULL;
}
- if (!vect_analyze_data_ref_accesses (NULL, bb_vinfo))
+ if (!vect_analyze_data_ref_accesses (bb_vinfo))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
return NULL;
}
- vect_pattern_recog (NULL, bb_vinfo);
+ vect_pattern_recog (bb_vinfo);
- if (!vect_analyze_data_refs_alignment (NULL, bb_vinfo))
+ if (!vect_analyze_data_refs_alignment (bb_vinfo))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
/* Check the SLP opportunities in the basic block, analyze and build SLP
trees. */
- if (!vect_analyze_slp (NULL, bb_vinfo, n_stmts))
+ if (!vect_analyze_slp (bb_vinfo, n_stmts))
{
if (dump_enabled_p ())
{
return NULL;
}
- if (!vect_verify_datarefs_alignment (NULL, bb_vinfo))
+ if (!vect_verify_datarefs_alignment (bb_vinfo))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
/* Generate vector code for all SLP instances in the loop/basic block. */
bool
-vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_schedule_slp (vec_info *vinfo)
{
vec<slp_instance> slp_instances;
slp_instance instance;
unsigned int i, vf;
bool is_store = false;
- if (loop_vinfo)
- {
- slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
- vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
- }
+ slp_instances = vinfo->slp_instances;
+ if (is_a <loop_vec_info> (vinfo))
+ vf = as_a <loop_vec_info> (vinfo)->vectorization_factor;
else
- {
- slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
- vf = 1;
- }
+ vf = 1;
FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
??? For BB vectorization we can as well remove scalar
stmts starting from the SLP tree root if they have no
uses. */
- if (loop_vinfo)
+ if (is_a <loop_vec_info> (vinfo))
vect_remove_slp_scalar_calls (root);
for (j = 0; SLP_TREE_SCALAR_STMTS (root).iterate (j, &store)
/* Schedule all the SLP instances when the first SLP stmt is reached. */
if (STMT_SLP_TYPE (stmt_info))
{
- vect_schedule_slp (NULL, bb_vinfo);
+ vect_schedule_slp (bb_vinfo);
break;
}
}
}
else
- {
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
- void *target_cost_data;
-
- if (loop_vinfo)
- target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
- else
- target_cost_data = BB_VINFO_TARGET_COST_DATA (bb_vinfo);
-
- return add_stmt_cost (target_cost_data, count, kind, stmt_info,
- misalign, where);
- }
+ return add_stmt_cost (stmt_info->vinfo->target_cost_data,
+ count, kind, stmt_info, misalign, where);
}
/* Return a variable of type ELEM_TYPE[NELEMS]. */
if (!force && !exist_non_indexing_operands_for_use_p (use, stmt))
return true;
- if (!vect_is_simple_use (use, stmt, loop_vinfo, NULL, &def_stmt, &def, &dt))
+ if (!vect_is_simple_use (use, stmt, loop_vinfo, &def_stmt, &def, &dt))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
dump_printf (MSG_NOTE, "\n");
}
- is_simple_use = vect_is_simple_use (op, stmt, loop_vinfo, NULL,
+ is_simple_use = vect_is_simple_use (op, stmt, loop_vinfo,
&def_stmt, &def, &dt);
gcc_assert (is_simple_use);
if (dump_enabled_p ())
gimple_stmt_iterator *gsi)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
- loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
- bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
gcc_assert (gimple_code (stmt) != GIMPLE_LABEL);
}
gsi_insert_before (gsi, vec_stmt, GSI_SAME_STMT);
- set_vinfo_for_stmt (vec_stmt, new_stmt_vec_info (vec_stmt, loop_vinfo,
- bb_vinfo));
+ set_vinfo_for_stmt (vec_stmt, new_stmt_vec_info (vec_stmt, vinfo));
if (dump_enabled_p ())
{
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
gcc_assert (gather_decl);
- if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo, NULL,
+ if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo,
&def_stmt, &def, &gather_dt,
&gather_off_vectype))
{
if (TREE_CODE (mask) != SSA_NAME)
return false;
- if (!vect_is_simple_use (mask, stmt, loop_vinfo, NULL,
+ if (!vect_is_simple_use (mask, stmt, loop_vinfo,
&def_stmt, &def, &dt))
return false;
if (is_store)
{
tree rhs = gimple_call_arg (stmt, 3);
- if (!vect_is_simple_use (rhs, stmt, loop_vinfo, NULL,
+ if (!vect_is_simple_use (rhs, stmt, loop_vinfo,
&def_stmt, &def, &dt))
return false;
}
vec_mask = vect_get_vec_def_for_operand (mask, stmt, NULL);
else
{
- vect_is_simple_use (vec_mask, NULL, loop_vinfo, NULL,
+ vect_is_simple_use (vec_mask, NULL, loop_vinfo,
&def_stmt, &def, &dt);
vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
}
}
else
{
- vect_is_simple_use (vec_rhs, NULL, loop_vinfo, NULL, &def_stmt,
+ vect_is_simple_use (vec_rhs, NULL, loop_vinfo, &def_stmt,
&def, &dt);
vec_rhs = vect_get_vec_def_for_stmt_copy (dt, vec_rhs);
- vect_is_simple_use (vec_mask, NULL, loop_vinfo, NULL, &def_stmt,
+ vect_is_simple_use (vec_mask, NULL, loop_vinfo, &def_stmt,
&def, &dt);
vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
}
else
{
- vect_is_simple_use (vec_mask, NULL, loop_vinfo, NULL, &def_stmt,
+ vect_is_simple_use (vec_mask, NULL, loop_vinfo, &def_stmt,
&def, &dt);
vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
int nunits_out;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
tree fndecl, new_temp, def, rhs_type;
gimple *def_stmt;
enum vect_def_type dt[3]
if (!rhs_type)
rhs_type = TREE_TYPE (op);
- if (!vect_is_simple_use_1 (op, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (op, stmt, vinfo,
&def_stmt, &def, &dt[i], &opvectype))
{
if (dump_enabled_p ())
unsigned int nunits;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL;
tree fndecl, new_temp, def;
gimple *def_stmt;
thisarginfo.simd_lane_linear = false;
op = gimple_call_arg (stmt, i);
- if (!vect_is_simple_use_1 (op, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (op, stmt, vinfo,
&def_stmt, &def, &thisarginfo.dt,
&thisarginfo.vectype)
|| thisarginfo.dt == vect_uninitialized_def)
tree phi_res = copy_ssa_name (op);
gphi *new_phi = create_phi_node (phi_res, loop->header);
set_vinfo_for_stmt (new_phi,
- new_stmt_vec_info (new_phi, loop_vinfo,
- NULL));
+ new_stmt_vec_info (new_phi, loop_vinfo));
add_phi_arg (new_phi, arginfo[i].op,
loop_preheader_edge (loop), UNKNOWN_LOCATION);
enum tree_code code
gimple_stmt_iterator si = gsi_after_labels (loop->header);
gsi_insert_after (&si, new_stmt, GSI_NEW_STMT);
set_vinfo_for_stmt (new_stmt,
- new_stmt_vec_info (new_stmt, loop_vinfo,
- NULL));
+ new_stmt_vec_info (new_stmt, loop_vinfo));
add_phi_arg (new_phi, phi_arg, loop_latch_edge (loop),
UNKNOWN_LOCATION);
arginfo[i].op = phi_res;
vec<tree> vec_oprnds1 = vNULL;
tree vop0;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
int multi_step_cvt = 0;
vec<tree> vec_dsts = vNULL;
vec<tree> interm_types = vNULL;
}
/* Check the operands of the operation. */
- if (!vect_is_simple_use_1 (op0, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (op0, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype_in))
{
if (dump_enabled_p ())
/* For WIDEN_MULT_EXPR, if OP0 is a constant, use the type of
OP1. */
if (CONSTANT_CLASS_P (op0))
- ok = vect_is_simple_use_1 (op1, stmt, loop_vinfo, bb_vinfo,
+ ok = vect_is_simple_use_1 (op1, stmt, vinfo,
&def_stmt, &def, &dt[1], &vectype_in);
else
- ok = vect_is_simple_use (op1, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+ ok = vect_is_simple_use (op1, stmt, vinfo, &def_stmt,
&def, &dt[1]);
if (!ok)
vec<tree> vec_oprnds = vNULL;
tree vop;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info = NULL;
enum tree_code code;
gcc_assert (ncopies >= 1);
- if (!vect_is_simple_use_1 (op, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (op, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype_in))
{
if (dump_enabled_p ())
unsigned int k;
bool scalar_shift_arg = true;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
int vf;
if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
}
op0 = gimple_assign_rhs1 (stmt);
- if (!vect_is_simple_use_1 (op0, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (op0, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype))
{
if (dump_enabled_p ())
return false;
op1 = gimple_assign_rhs2 (stmt);
- if (!vect_is_simple_use_1 (op1, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+ if (!vect_is_simple_use_1 (op1, stmt, vinfo, &def_stmt,
&def, &dt[1], &op1_vectype))
{
if (dump_enabled_p ())
vec<tree> vec_oprnds2 = vNULL;
tree vop0, vop1, vop2;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
int vf;
if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
}
op0 = gimple_assign_rhs1 (stmt);
- if (!vect_is_simple_use_1 (op0, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (op0, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype))
{
if (dump_enabled_p ())
if (op_type == binary_op || op_type == ternary_op)
{
op1 = gimple_assign_rhs2 (stmt);
- if (!vect_is_simple_use (op1, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+ if (!vect_is_simple_use (op1, stmt, vinfo, &def_stmt,
&def, &dt[1]))
{
if (dump_enabled_p ())
if (op_type == ternary_op)
{
op2 = gimple_assign_rhs3 (stmt);
- if (!vect_is_simple_use (op2, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+ if (!vect_is_simple_use (op2, stmt, vinfo, &def_stmt,
&def, &dt[2]))
{
if (dump_enabled_p ())
bool slp = (slp_node != NULL);
unsigned int vec_num;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
+ vec_info *vinfo = stmt_info->vinfo;
tree aggr_type;
tree scatter_base = NULL_TREE, scatter_off = NULL_TREE;
tree scatter_off_vectype = NULL_TREE, scatter_decl = NULL_TREE;
}
op = gimple_assign_rhs1 (stmt);
- if (!vect_is_simple_use (op, stmt, loop_vinfo, bb_vinfo, &def_stmt,
+ if (!vect_is_simple_use (op, stmt, vinfo, &def_stmt,
&def, &dt))
{
if (dump_enabled_p ())
{
gcc_assert (gimple_assign_single_p (next_stmt));
op = gimple_assign_rhs1 (next_stmt);
- if (!vect_is_simple_use (op, next_stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use (op, next_stmt, vinfo,
&def_stmt, &def, &dt))
{
if (dump_enabled_p ())
scatter_decl = vect_check_gather_scatter (stmt, loop_vinfo, &scatter_base,
&scatter_off, &scatter_scale);
gcc_assert (scatter_decl);
- if (!vect_is_simple_use_1 (scatter_off, NULL, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (scatter_off, NULL, vinfo,
&def_stmt, &def, &scatter_idx_dt,
&scatter_off_vectype))
{
loop, &incr_gsi, insert_after,
&offvar, NULL);
incr = gsi_stmt (incr_gsi);
- set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+ set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
stride_step = force_gimple_operand (stride_step, &stmts, true, NULL_TREE);
if (stmts)
vec_oprnd = vec_oprnds[j];
else
{
- vect_is_simple_use (vec_oprnd, NULL, loop_vinfo,
- bb_vinfo, &def_stmt, &def, &dt);
+ vect_is_simple_use (vec_oprnd, NULL, vinfo,
+ &def_stmt, &def, &dt);
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, vec_oprnd);
}
}
for (i = 0; i < group_size; i++)
{
op = oprnds[i];
- vect_is_simple_use (op, NULL, loop_vinfo, bb_vinfo, &def_stmt,
+ vect_is_simple_use (op, NULL, vinfo, &def_stmt,
&def, &dt);
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, op);
dr_chain[i] = vec_oprnd;
tree gather_off_vectype = NULL_TREE, gather_decl = NULL_TREE;
int gather_scale = 1;
enum vect_def_type gather_dt = vect_unknown_def_type;
+ vec_info *vinfo = stmt_info->vinfo;
if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
return false;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
gcc_assert (gather_decl);
- if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (gather_off, NULL, vinfo,
&def_stmt, &def, &gather_dt,
&gather_off_vectype))
{
loop, &incr_gsi, insert_after,
&offvar, NULL);
incr = gsi_stmt (incr_gsi);
- set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo, NULL));
+ set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
stride_step = force_gimple_operand (unshare_expr (stride_step),
&stmts, true, NULL_TREE);
}
new_stmt = SSA_NAME_DEF_STMT (new_temp);
set_vinfo_for_stmt (new_stmt,
- new_stmt_vec_info (new_stmt, loop_vinfo,
- bb_vinfo));
+ new_stmt_vec_info (new_stmt, vinfo));
}
if (negative)
condition operands are supportable using vec_is_simple_use. */
static bool
-vect_is_simple_cond (tree cond, gimple *stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, tree *comp_vectype)
+vect_is_simple_cond (tree cond, gimple *stmt, vec_info *vinfo,
+ tree *comp_vectype)
{
tree lhs, rhs;
tree def;
if (TREE_CODE (lhs) == SSA_NAME)
{
gimple *lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
- if (!vect_is_simple_use_1 (lhs, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (lhs, stmt, vinfo,
&lhs_def_stmt, &def, &dt, &vectype1))
return false;
}
if (TREE_CODE (rhs) == SSA_NAME)
{
gimple *rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
- if (!vect_is_simple_use_1 (rhs, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use_1 (rhs, stmt, vinfo,
&rhs_def_stmt, &def, &dt, &vectype2))
return false;
}
then_clause = gimple_assign_rhs2 (stmt);
else_clause = gimple_assign_rhs3 (stmt);
- if (!vect_is_simple_cond (cond_expr, stmt, loop_vinfo, bb_vinfo,
- &comp_vectype)
+ if (!vect_is_simple_cond (cond_expr, stmt, stmt_info->vinfo, &comp_vectype)
|| !comp_vectype)
return false;
if (TREE_CODE (then_clause) == SSA_NAME)
{
gimple *then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
- if (!vect_is_simple_use (then_clause, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use (then_clause, stmt, stmt_info->vinfo,
&then_def_stmt, &def, &dt))
return false;
}
if (TREE_CODE (else_clause) == SSA_NAME)
{
gimple *else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
- if (!vect_is_simple_use (else_clause, stmt, loop_vinfo, bb_vinfo,
+ if (!vect_is_simple_use (else_clause, stmt, stmt_info->vinfo,
&else_def_stmt, &def, &dt))
return false;
}
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0),
stmt, NULL);
vect_is_simple_use (TREE_OPERAND (cond_expr, 0), stmt,
- loop_vinfo, NULL, >emp, &def, &dts[0]);
+ loop_vinfo, >emp, &def, &dts[0]);
vec_cond_rhs =
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 1),
stmt, NULL);
vect_is_simple_use (TREE_OPERAND (cond_expr, 1), stmt,
- loop_vinfo, NULL, >emp, &def, &dts[1]);
+ loop_vinfo, >emp, &def, &dts[1]);
if (reduc_index == 1)
vec_then_clause = reduc_def;
else
vec_then_clause = vect_get_vec_def_for_operand (then_clause,
stmt, NULL);
vect_is_simple_use (then_clause, stmt, loop_vinfo,
- NULL, >emp, &def, &dts[2]);
+ >emp, &def, &dts[2]);
}
if (reduc_index == 2)
vec_else_clause = reduc_def;
vec_else_clause = vect_get_vec_def_for_operand (else_clause,
stmt, NULL);
vect_is_simple_use (else_clause, stmt, loop_vinfo,
- NULL, >emp, &def, &dts[3]);
+ >emp, &def, &dts[3]);
}
}
}
Create and initialize a new stmt_vec_info struct for STMT. */
stmt_vec_info
-new_stmt_vec_info (gimple *stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo)
+new_stmt_vec_info (gimple *stmt, vec_info *vinfo)
{
stmt_vec_info res;
res = (stmt_vec_info) xcalloc (1, sizeof (struct _stmt_vec_info));
STMT_VINFO_TYPE (res) = undef_vec_info_type;
STMT_VINFO_STMT (res) = stmt;
- STMT_VINFO_LOOP_VINFO (res) = loop_vinfo;
- STMT_VINFO_BB_VINFO (res) = bb_vinfo;
+ res->vinfo = vinfo;
STMT_VINFO_RELEVANT (res) = vect_unused_in_scope;
STMT_VINFO_LIVE_P (res) = false;
STMT_VINFO_VECTYPE (res) = NULL;
For now, operands defined outside the basic block are not supported. */
bool
-vect_is_simple_use (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, gimple **def_stmt,
- tree *def, enum vect_def_type *dt)
+vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
+ gimple **def_stmt, tree *def, enum vect_def_type *dt)
{
*def_stmt = NULL;
*def = NULL_TREE;
}
basic_block bb = gimple_bb (*def_stmt);
- if ((loop_vinfo && !flow_bb_inside_loop_p (LOOP_VINFO_LOOP (loop_vinfo), bb))
- || (bb_vinfo
- && (bb != BB_VINFO_BB (bb_vinfo)
+ if ((is_a <loop_vec_info> (vinfo)
+ && !flow_bb_inside_loop_p (as_a <loop_vec_info> (vinfo)->loop, bb))
+ || (is_a <bb_vec_info> (vinfo)
+ && (bb != as_a <bb_vec_info> (vinfo)->bb
|| gimple_code (*def_stmt) == GIMPLE_PHI)))
*dt = vect_external_def;
else
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (*def_stmt);
- if (bb_vinfo && !STMT_VINFO_VECTORIZABLE (stmt_vinfo))
+ if (is_a <bb_vec_info> (vinfo) && !STMT_VINFO_VECTORIZABLE (stmt_vinfo))
*dt = vect_external_def;
else
*dt = STMT_VINFO_DEF_TYPE (stmt_vinfo);
scalar operand. */
bool
-vect_is_simple_use_1 (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, gimple **def_stmt,
+vect_is_simple_use_1 (tree operand, gimple *stmt, vec_info *vinfo,
+ gimple **def_stmt,
tree *def, enum vect_def_type *dt, tree *vectype)
{
- if (!vect_is_simple_use (operand, stmt, loop_vinfo, bb_vinfo, def_stmt,
- def, dt))
+ if (!vect_is_simple_use (operand, stmt, vinfo, def_stmt, def, dt))
return false;
/* Now get a vector type if the def is internal, otherwise supply
/* A helper function to free data refs. */
void
-vect_destroy_datarefs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
+vect_destroy_datarefs (vec_info *vinfo)
{
- vec<data_reference_p> datarefs;
struct data_reference *dr;
unsigned int i;
- if (loop_vinfo)
- datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
- else
- datarefs = BB_VINFO_DATAREFS (bb_vinfo);
-
- FOR_EACH_VEC_ELT (datarefs, i, dr)
+ FOR_EACH_VEC_ELT (vinfo->datarefs, i, dr)
if (dr->aux)
{
free (dr->aux);
dr->aux = NULL;
}
- free_data_refs (datarefs);
+ free_data_refs (vinfo->datarefs);
}
return (a->npeel == b->npeel);
}
+/* Vectorizer state common between loop and basic-block vectorization. */
+struct vec_info {
+ enum { bb, loop } kind;
+
+ /* All SLP instances. */
+ vec<slp_instance> slp_instances;
+
+ /* All data references. */
+ vec<data_reference_p> datarefs;
+
+ /* All data dependences. */
+ vec<ddr_p> ddrs;
+
+ /* All interleaving chains of stores, represented by the first
+ stmt in the chain. */
+ vec<gimple *> grouped_stores;
+
+ /* Cost data used by the target cost model. */
+ void *target_cost_data;
+};
+
+struct _loop_vec_info;
+struct _bb_vec_info;
+
+template<>
+template<>
+inline bool
+is_a_helper <_loop_vec_info *>::test (vec_info *i)
+{
+ return i->kind == vec_info::loop;
+}
+
+template<>
+template<>
+inline bool
+is_a_helper <_bb_vec_info *>::test (vec_info *i)
+{
+ return i->kind == vec_info::bb;
+}
+
/*-----------------------------------------------------------------*/
/* Info on vectorized loops. */
/*-----------------------------------------------------------------*/
-typedef struct _loop_vec_info {
+typedef struct _loop_vec_info : public vec_info {
/* The loop to which this info struct refers to. */
struct loop *loop;
/* The loop nest in which the data dependences are computed. */
vec<loop_p> loop_nest;
- /* All data references in the loop. */
- vec<data_reference_p> datarefs;
-
- /* All data dependences in the loop. */
- vec<ddr_p> ddrs;
-
/* Data Dependence Relations defining address ranges that are candidates
for a run-time aliasing check. */
vec<ddr_p> may_alias_ddrs;
runtime (loop versioning) misalignment check. */
vec<gimple *> may_misalign_stmts;
- /* All interleaving chains of stores in the loop, represented by the first
- stmt in the chain. */
- vec<gimple *> grouped_stores;
-
- /* All SLP instances in the loop. This is a subset of the set of GROUP_STORES
- of the loop. */
- vec<slp_instance> slp_instances;
-
/* The unrolling factor needed to SLP the loop. In case of that pure SLP is
applied to the loop, i.e., no unrolling is needed, this is 1. */
unsigned slp_unrolling_factor;
/* Cost of a single scalar iteration. */
int single_scalar_iteration_cost;
- /* Cost data used by the target cost model. */
- void *target_cost_data;
-
/* When we have grouped data accesses with gaps, we may introduce invalid
memory accesses. We peel the last iteration of the loop to prevent
this. */
&& (loop->inner == (gimple_bb (stmt))->loop_father));
}
-typedef struct _bb_vec_info {
-
+typedef struct _bb_vec_info : public vec_info
+{
basic_block bb;
- /* All interleaving chains of stores in the basic block, represented by the
- first stmt in the chain. */
- vec<gimple *> grouped_stores;
-
- /* All SLP instances in the basic block. This is a subset of the set of
- GROUP_STORES of the basic block. */
- vec<slp_instance> slp_instances;
-
- /* All data references in the basic block. */
- vec<data_reference_p> datarefs;
-
- /* All data dependences in the basic block. */
- vec<ddr_p> ddrs;
-
- /* Cost data used by the target cost model. */
- void *target_cost_data;
-
} *bb_vec_info;
#define BB_VINFO_BB(B) (B)->bb
/* The stmt to which this info struct refers to. */
gimple *stmt;
- /* The loop_vec_info with respect to which STMT is vectorized. */
- loop_vec_info loop_vinfo;
+ /* The vec_info with respect to which STMT is vectorized. */
+ vec_info *vinfo;
/* The vector type to be used for the LHS of this statement. */
tree vectype;
indicates whether the stmt needs to be vectorized. */
enum vect_relevant relevant;
- /* The bb_vec_info with respect to which STMT is vectorized. */
- bb_vec_info bb_vinfo;
-
/* Is this statement vectorizable or should it be skipped in (partial)
vectorization. */
bool vectorizable;
/* Access Functions. */
#define STMT_VINFO_TYPE(S) (S)->type
#define STMT_VINFO_STMT(S) (S)->stmt
-#define STMT_VINFO_LOOP_VINFO(S) (S)->loop_vinfo
-#define STMT_VINFO_BB_VINFO(S) (S)->bb_vinfo
+inline loop_vec_info
+STMT_VINFO_LOOP_VINFO (stmt_vec_info stmt_vinfo)
+{
+ if (loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (stmt_vinfo->vinfo))
+ return loop_vinfo;
+ return NULL;
+}
+inline bb_vec_info
+STMT_VINFO_BB_VINFO (stmt_vec_info stmt_vinfo)
+{
+ if (bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (stmt_vinfo->vinfo))
+ return bb_vinfo;
+ return NULL;
+}
#define STMT_VINFO_RELEVANT(S) (S)->relevant
#define STMT_VINFO_LIVE_P(S) (S)->live
#define STMT_VINFO_VECTYPE(S) (S)->vectype
extern unsigned int current_vector_size;
extern tree get_vectype_for_scalar_type (tree);
extern tree get_same_sized_vectype (tree, tree);
-extern bool vect_is_simple_use (tree, gimple *, loop_vec_info,
- bb_vec_info, gimple **,
+extern bool vect_is_simple_use (tree, gimple *, vec_info *, gimple **,
tree *, enum vect_def_type *);
-extern bool vect_is_simple_use_1 (tree, gimple *, loop_vec_info,
- bb_vec_info, gimple **,
+extern bool vect_is_simple_use_1 (tree, gimple *, vec_info *, gimple **,
tree *, enum vect_def_type *, tree *);
extern bool supportable_widening_operation (enum tree_code, gimple *, tree,
tree, enum tree_code *,
extern bool supportable_narrowing_operation (enum tree_code, tree, tree,
enum tree_code *,
int *, vec<tree> *);
-extern stmt_vec_info new_stmt_vec_info (gimple *stmt, loop_vec_info,
- bb_vec_info);
+extern stmt_vec_info new_stmt_vec_info (gimple *stmt, vec_info *);
extern void free_stmt_vec_info (gimple *stmt);
extern tree vectorizable_function (gcall *, tree, tree);
extern void vect_model_simple_cost (stmt_vec_info, int, enum vect_def_type *,
extern bool vect_analyze_data_ref_dependences (loop_vec_info, int *);
extern bool vect_slp_analyze_data_ref_dependences (bb_vec_info);
extern bool vect_enhance_data_refs_alignment (loop_vec_info);
-extern bool vect_analyze_data_refs_alignment (loop_vec_info, bb_vec_info);
-extern bool vect_verify_datarefs_alignment (loop_vec_info, bb_vec_info);
-extern bool vect_analyze_data_ref_accesses (loop_vec_info, bb_vec_info);
+extern bool vect_analyze_data_refs_alignment (vec_info *);
+extern bool vect_verify_datarefs_alignment (vec_info *);
+extern bool vect_analyze_data_ref_accesses (vec_info *);
extern bool vect_prune_runtime_alias_test_list (loop_vec_info);
extern tree vect_check_gather_scatter (gimple *, loop_vec_info, tree *, tree *,
int *);
-extern bool vect_analyze_data_refs (loop_vec_info, bb_vec_info, int *,
- unsigned *);
+extern bool vect_analyze_data_refs (vec_info *, int *, unsigned *);
extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree,
tree *, gimple_stmt_iterator *,
gimple **, bool, bool *,
slp_instance, bool);
extern bool vect_slp_analyze_operations (vec<slp_instance> slp_instances,
void *);
-extern bool vect_schedule_slp (loop_vec_info, bb_vec_info);
-extern bool vect_analyze_slp (loop_vec_info, bb_vec_info, unsigned);
+extern bool vect_schedule_slp (vec_info *);
+extern bool vect_analyze_slp (vec_info *, unsigned);
extern bool vect_make_slp_decision (loop_vec_info);
extern void vect_detect_hybrid_slp (loop_vec_info);
extern void vect_get_slp_defs (vec<tree> , slp_tree,
in the future. */
typedef gimple *(* vect_recog_func_ptr) (vec<gimple *> *, tree *, tree *);
#define NUM_PATTERNS 13
-void vect_pattern_recog (loop_vec_info, bb_vec_info);
+void vect_pattern_recog (vec_info *);
/* In tree-vectorizer.c. */
unsigned vectorize_loops (void);
-void vect_destroy_datarefs (loop_vec_info, bb_vec_info);
+void vect_destroy_datarefs (vec_info *);
#endif /* GCC_TREE_VECTORIZER_H */