+2015-05-22 Richard Biener <rguenther@suse.de>
+
+ * tree-vect-loop.c (get_reduction_op): New function.
+ (vect_model_reduction_cost): Use it, add reduc_index parameter.
+ Make ready for BB reductions.
+ (vect_create_epilog_for_reduction): Use get_reduction_op.
+ (vectorizable_reduction): Init reduc_index to a valid value.
+ Adjust vect_model_reduction_cost call.
+ * tree-vect-slp.c (vect_get_constant_vectors): Use the proper
+ operand for reduction defaults. Add SAD_EXPR support.
+ Assert we have a neutral op for SLP reductions.
+ * tree-vect-stmts.c (vect_mark_stmts_to_be_vectorized): When
+ walking pattern stmt ops only recurse to SSA names.
+
2015-05-22 Richard Biener <rguenther@suse.de>
* tree-vect-patterns.c (vect_recog_dot_prod_pattern): Replace
return true;
}
+/* Return the reduction operand (with index REDUC_INDEX) of STMT. */
+
+static tree
+get_reduction_op (gimple stmt, int reduc_index)
+{
+ switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+ {
+ case GIMPLE_SINGLE_RHS:
+ gcc_assert (TREE_OPERAND_LENGTH (gimple_assign_rhs1 (stmt))
+ == ternary_op);
+ return TREE_OPERAND (gimple_assign_rhs1 (stmt), reduc_index);
+ case GIMPLE_UNARY_RHS:
+ return gimple_assign_rhs1 (stmt);
+ case GIMPLE_BINARY_RHS:
+ return (reduc_index
+ ? gimple_assign_rhs2 (stmt) : gimple_assign_rhs1 (stmt));
+ case GIMPLE_TERNARY_RHS:
+ return gimple_op (stmt, reduc_index + 1);
+ default:
+ gcc_unreachable ();
+ }
+}
+
/* TODO: Close dependency between vect_model_*_cost and vectorizable_*
functions. Design better to avoid maintenance issues. */
static bool
vect_model_reduction_cost (stmt_vec_info stmt_info, enum tree_code reduc_code,
- int ncopies)
+ int ncopies, int reduc_index)
{
int prologue_cost = 0, epilogue_cost = 0;
enum tree_code code;
tree reduction_op;
machine_mode mode;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
- struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
- void *target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
+ struct loop *loop = NULL;
+ void *target_cost_data;
+
+ if (loop_vinfo)
+ {
+ loop = LOOP_VINFO_LOOP (loop_vinfo);
+ target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
+ }
+ else
+ target_cost_data = BB_VINFO_TARGET_COST_DATA (STMT_VINFO_BB_VINFO (stmt_info));
/* Cost of reduction op inside loop. */
unsigned inside_cost = add_stmt_cost (target_cost_data, ncopies, vector_stmt,
stmt_info, 0, vect_body);
stmt = STMT_VINFO_STMT (stmt_info);
- switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
- {
- case GIMPLE_SINGLE_RHS:
- gcc_assert (TREE_OPERAND_LENGTH (gimple_assign_rhs1 (stmt)) == ternary_op);
- reduction_op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
- break;
- case GIMPLE_UNARY_RHS:
- reduction_op = gimple_assign_rhs1 (stmt);
- break;
- case GIMPLE_BINARY_RHS:
- reduction_op = gimple_assign_rhs2 (stmt);
- break;
- case GIMPLE_TERNARY_RHS:
- reduction_op = gimple_assign_rhs3 (stmt);
- break;
- default:
- gcc_unreachable ();
- }
+ reduction_op = get_reduction_op (stmt, reduc_index);
vectype = get_vectype_for_scalar_type (TREE_TYPE (reduction_op));
if (!vectype)
We have a reduction operator that will reduce the vector in one statement.
Also requires scalar extract. */
- if (!nested_in_vect_loop_p (loop, orig_stmt))
+ if (!loop || !nested_in_vect_loop_p (loop, orig_stmt))
{
if (reduc_code != ERROR_MARK)
{
gcc_assert (!slp_node);
}
- switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
- {
- case GIMPLE_SINGLE_RHS:
- gcc_assert (TREE_OPERAND_LENGTH (gimple_assign_rhs1 (stmt))
- == ternary_op);
- reduction_op = TREE_OPERAND (gimple_assign_rhs1 (stmt), reduc_index);
- break;
- case GIMPLE_UNARY_RHS:
- reduction_op = gimple_assign_rhs1 (stmt);
- break;
- case GIMPLE_BINARY_RHS:
- reduction_op = reduc_index ?
- gimple_assign_rhs2 (stmt) : gimple_assign_rhs1 (stmt);
- break;
- case GIMPLE_TERNARY_RHS:
- reduction_op = gimple_op (stmt, reduc_index + 1);
- break;
- default:
- gcc_unreachable ();
- }
+ reduction_op = get_reduction_op (stmt, reduc_index);
vectype = get_vectype_for_scalar_type (TREE_TYPE (reduction_op));
gcc_assert (vectype);
tree ops[3];
bool nested_cycle = false, found_nested_cycle_def = false;
gimple reduc_def_stmt = NULL;
- /* The default is that the reduction variable is the last in statement. */
- int reduc_index = 2;
bool double_reduc = false, dummy;
basic_block def_bb;
struct loop * def_stmt_loop, *outer_loop = NULL;
default:
gcc_unreachable ();
}
+ /* The default is that the reduction variable is the last in statement. */
+ int reduc_index = op_type - 1;
if (code == COND_EXPR && slp_node)
return false;
if (!vec_stmt) /* transformation not required. */
{
- if (!vect_model_reduction_cost (stmt_info, epilog_reduc_code, ncopies))
+ if (!vect_model_reduction_cost (stmt_info, epilog_reduc_code, ncopies,
+ reduc_index))
return false;
STMT_VINFO_TYPE (stmt_info) = reduc_vec_info_type;
return true;
struct loop *loop;
gimple_seq ctor_seq = NULL;
+ vector_type = get_vectype_for_scalar_type (TREE_TYPE (op));
+ nunits = TYPE_VECTOR_SUBPARTS (vector_type);
+
if (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_reduction_def
&& reduc_index != -1)
{
- op_num = reduc_index - 1;
- op = gimple_op (stmt, reduc_index);
+ op_num = reduc_index;
+ op = gimple_op (stmt, op_num + 1);
/* For additional copies (see the explanation of NUMBER_OF_COPIES below)
we need either neutral operands or the original operands. See
get_initial_def_for_reduction() for details. */
{
case WIDEN_SUM_EXPR:
case DOT_PROD_EXPR:
+ case SAD_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
case BIT_IOR_EXPR:
break;
default:
+ gcc_assert (!GROUP_FIRST_ELEMENT (stmt_vinfo));
neutral_op = NULL;
}
}
else
constant_p = false;
- vector_type = get_vectype_for_scalar_type (TREE_TYPE (op));
- gcc_assert (vector_type);
- nunits = TYPE_VECTOR_SUBPARTS (vector_type);
-
/* NUMBER_OF_COPIES is the number of times we need to use the same values in
created vectors. It is greater than 1 if unrolling is performed.