+2020-05-07 Kewen Lin <linkw@gcc.gnu.org>
+
+ * tree-vect-stmts.c (vectorizable_load): Check alignment to avoid
+ redundant half vector handlings for no peeling gaps.
+
2020-05-07 Giuliano Belinassi <giuliano.belinassi@usp.br>
* tree-ssa-operands.c (operands_scanner): New class.
{
tree ltype = vectype;
tree new_vtype = NULL_TREE;
+ unsigned HOST_WIDE_INT gap
+ = DR_GROUP_GAP (first_stmt_info);
+ unsigned int vect_align
+ = vect_known_alignment_in_bytes (first_dr_info);
+ unsigned int scalar_dr_size
+ = vect_get_scalar_dr_size (first_dr_info);
/* If there's no peeling for gaps but we have a gap
with slp loads then load the lower half of the
vector only. See get_group_load_store_type for
if (slp
&& loop_vinfo
&& !LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)
- && DR_GROUP_GAP (first_stmt_info) != 0
- && known_eq (nunits,
- (group_size
- - DR_GROUP_GAP (first_stmt_info)) * 2)
- && known_eq (nunits, group_size))
+ && gap != 0
+ && known_eq (nunits, (group_size - gap) * 2)
+ && known_eq (nunits, group_size)
+ && gap >= (vect_align / scalar_dr_size))
{
tree half_vtype;
new_vtype
if (ltype != vectype
&& memory_access_type == VMAT_CONTIGUOUS_REVERSE)
{
- unsigned HOST_WIDE_INT gap
- = DR_GROUP_GAP (first_stmt_info);
- gap *= tree_to_uhwi (TYPE_SIZE_UNIT (elem_type));
- tree gapcst = build_int_cst (ref_type, gap);
+ unsigned HOST_WIDE_INT gap_offset
+ = gap * tree_to_uhwi (TYPE_SIZE_UNIT (elem_type));
+ tree gapcst = build_int_cst (ref_type, gap_offset);
offset = size_binop (PLUS_EXPR, offset, gapcst);
}
data_ref