From: Richard Biener Date: Thu, 5 Nov 2020 09:16:53 +0000 (+0100) Subject: Fix SLP vectorization of stores from boolean vectors X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=403f0dac0cb24184631e175836766a3a1da165a5;p=gcc.git Fix SLP vectorization of stores from boolean vectors The following fixes SLP vectorization of stores that were pattern recognized. Since in SLP vectorization pattern analysis happens after dataref group analysis we have to adjust the groups with the pattern stmts. This has some effects down the pipeline and exposes cases where we looked at the wrong pattern/non-pattern stmts. 2020-11-05 Richard Biener * tree-vect-data-refs.c (vect_slp_analyze_node_dependences): Use the original stmts. (vect_slp_analyze_node_alignment): Use the pattern stmt. * tree-vect-slp.c (vect_fixup_store_groups_with_patterns): New function. (vect_slp_analyze_bb_1): Call it. * gcc.dg/vect/bb-slp-69.c: New testcase. --- diff --git a/gcc/testsuite/gcc.dg/vect/bb-slp-69.c b/gcc/testsuite/gcc.dg/vect/bb-slp-69.c new file mode 100644 index 00000000000..ca72a6804b7 --- /dev/null +++ b/gcc/testsuite/gcc.dg/vect/bb-slp-69.c @@ -0,0 +1,45 @@ +/* { dg-do compile } */ +/* { dg-require-effective-target vect_int } */ + +_Bool arr[16]; + +void foo(char *q) +{ + char *p = __builtin_assume_aligned (q, 16); + _Bool b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15; + b0 = p[0] != 0; + b1 = p[1] != 0; + b2 = p[2] != 0; + b3 = p[3] != 0; + b4 = p[4] != 0; + b5 = p[5] != 0; + b6 = p[6] != 0; + b7 = p[7] != 0; + b8 = p[8] != 0; + b9 = p[9] != 0; + b10 = p[10] != 0; + b11 = p[11] != 0; + b12 = p[12] != 0; + b13 = p[13] != 0; + b14 = p[14] != 0; + b15 = p[15] != 0; + arr[0] = b0; + arr[1] = b1; + arr[2] = b2; + arr[3] = b3; + arr[4] = b4; + arr[5] = b5; + arr[6] = b6; + arr[7] = b7; + arr[8] = b8; + arr[9] = b9; + arr[10] = b10; + arr[11] = b11; + arr[12] = b12; + arr[13] = b13; + arr[14] = b14; + arr[15] = b15; +} + +/* { dg-final { scan-tree-dump "transform load" "slp2" } } */ +/* { dg-final { scan-tree-dump "optimized: basic block" "slp2" } } */ diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c index fd14b480dbf..8afd3044461 100644 --- a/gcc/tree-vect-data-refs.c +++ b/gcc/tree-vect-data-refs.c @@ -688,7 +688,8 @@ vect_slp_analyze_node_dependences (vec_info *vinfo, slp_tree node, stmt_vec_info last_access_info = vect_find_last_scalar_stmt_in_slp (node); for (unsigned k = 0; k < SLP_TREE_SCALAR_STMTS (node).length (); ++k) { - stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k]; + stmt_vec_info access_info + = vect_orig_stmt (SLP_TREE_SCALAR_STMTS (node)[k]); if (access_info == last_access_info) continue; data_reference *dr_a = STMT_VINFO_DATA_REF (access_info); @@ -759,7 +760,8 @@ vect_slp_analyze_node_dependences (vec_info *vinfo, slp_tree node, = vect_find_first_scalar_stmt_in_slp (node); for (unsigned k = 0; k < SLP_TREE_SCALAR_STMTS (node).length (); ++k) { - stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k]; + stmt_vec_info access_info + = vect_orig_stmt (SLP_TREE_SCALAR_STMTS (node)[k]); if (access_info == first_access_info) continue; data_reference *dr_a = STMT_VINFO_DATA_REF (access_info); @@ -2444,7 +2446,8 @@ vect_slp_analyze_node_alignment (vec_info *vinfo, slp_tree node) /* For creating the data-ref pointer we need alignment of the first element as well. */ - first_stmt_info = vect_find_first_scalar_stmt_in_slp (node); + first_stmt_info + = vect_stmt_to_vectorize (vect_find_first_scalar_stmt_in_slp (node)); if (first_stmt_info != SLP_TREE_SCALAR_STMTS (node)[0]) { first_dr_info = STMT_VINFO_DR_INFO (first_stmt_info); diff --git a/gcc/tree-vect-slp.c b/gcc/tree-vect-slp.c index 016883a2655..9c63983b625 100644 --- a/gcc/tree-vect-slp.c +++ b/gcc/tree-vect-slp.c @@ -4035,6 +4035,48 @@ vect_slp_check_for_constructors (bb_vec_info bb_vinfo) } } +/* Walk the grouped store chains and replace entries with their + pattern variant if any. */ + +static void +vect_fixup_store_groups_with_patterns (vec_info *vinfo) +{ + stmt_vec_info first_element; + unsigned i; + + FOR_EACH_VEC_ELT (vinfo->grouped_stores, i, first_element) + { + /* We also have CTORs in this array. */ + if (!STMT_VINFO_GROUPED_ACCESS (first_element)) + continue; + if (STMT_VINFO_IN_PATTERN_P (first_element)) + { + stmt_vec_info orig = first_element; + first_element = STMT_VINFO_RELATED_STMT (first_element); + DR_GROUP_FIRST_ELEMENT (first_element) = first_element; + DR_GROUP_SIZE (first_element) = DR_GROUP_SIZE (orig); + DR_GROUP_GAP (first_element) = DR_GROUP_GAP (orig); + DR_GROUP_NEXT_ELEMENT (first_element) = DR_GROUP_NEXT_ELEMENT (orig); + vinfo->grouped_stores[i] = first_element; + } + stmt_vec_info prev = first_element; + while (DR_GROUP_NEXT_ELEMENT (prev)) + { + stmt_vec_info elt = DR_GROUP_NEXT_ELEMENT (prev); + if (STMT_VINFO_IN_PATTERN_P (elt)) + { + stmt_vec_info orig = elt; + elt = STMT_VINFO_RELATED_STMT (elt); + DR_GROUP_NEXT_ELEMENT (prev) = elt; + DR_GROUP_GAP (elt) = DR_GROUP_GAP (orig); + DR_GROUP_NEXT_ELEMENT (elt) = DR_GROUP_NEXT_ELEMENT (orig); + } + DR_GROUP_FIRST_ELEMENT (elt) = first_element; + prev = elt; + } + } +} + /* Check if the region described by BB_VINFO can be vectorized, returning true if so. When returning false, set FATAL to true if the same failure would prevent vectorization at other vector sizes, false if it is still @@ -4093,6 +4135,9 @@ vect_slp_analyze_bb_1 (bb_vec_info bb_vinfo, int n_stmts, bool &fatal, vect_pattern_recog (bb_vinfo); + /* Update store groups from pattern processing. */ + vect_fixup_store_groups_with_patterns (bb_vinfo); + /* Check the SLP opportunities in the basic block, analyze and build SLP trees. */ if (!vect_analyze_slp (bb_vinfo, n_stmts))