fixup BIT_FIELD_REF detection in SLP discovery
authorRichard Biener <rguenther@suse.de>
Thu, 9 Jul 2020 14:06:04 +0000 (16:06 +0200)
committerRichard Biener <rguenther@suse.de>
Thu, 9 Jul 2020 17:54:34 +0000 (19:54 +0200)
This fixes a thinko where we end up combining a BIT_FIELD_REF
and a memory access, fixed by checking all stmts are a load or
none.

2020-07-09  Richard Biener  <rguenther@suse.de>

PR tree-optimization/96133
* tree-vect-slp.c (vect_build_slp_tree_1): Compare load_p
status between stmts.

gcc/tree-vect-slp.c

index 35ae698459349f6125fda75e280d887fec91f055..b3645b0a82079c5139dbed6b805beff15d8d388e 100644 (file)
@@ -760,7 +760,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char *swap,
   machine_mode optab_op2_mode;
   machine_mode vec_mode;
   stmt_vec_info first_load = NULL, prev_first_load = NULL;
-  bool load_p = false;
+  bool first_stmt_load_p = false, load_p = false;
 
   /* For every stmt in NODE find its def stmt/s.  */
   stmt_vec_info stmt_info;
@@ -850,6 +850,7 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char *swap,
        {
          *node_vectype = vectype;
          first_stmt_code = rhs_code;
+         first_stmt_load_p = load_p;
 
          /* Shift arguments should be equal in all the packed stmts for a
             vector shift with scalar shift operand.  */
@@ -931,24 +932,25 @@ vect_build_slp_tree_1 (vec_info *vinfo, unsigned char *swap,
          if (first_stmt_code != rhs_code
              && alt_stmt_code == ERROR_MARK)
            alt_stmt_code = rhs_code;
-         if (first_stmt_code != rhs_code
-             && (first_stmt_code != IMAGPART_EXPR
-                 || rhs_code != REALPART_EXPR)
-             && (first_stmt_code != REALPART_EXPR
-                 || rhs_code != IMAGPART_EXPR)
-             /* Handle mismatches in plus/minus by computing both
-                and merging the results.  */
-             && !((first_stmt_code == PLUS_EXPR
-                   || first_stmt_code == MINUS_EXPR)
-                  && (alt_stmt_code == PLUS_EXPR
-                      || alt_stmt_code == MINUS_EXPR)
-                  && rhs_code == alt_stmt_code)
-             && !(STMT_VINFO_GROUPED_ACCESS (stmt_info)
-                   && (first_stmt_code == ARRAY_REF
-                       || first_stmt_code == BIT_FIELD_REF
-                       || first_stmt_code == INDIRECT_REF
-                       || first_stmt_code == COMPONENT_REF
-                       || first_stmt_code == MEM_REF)))
+         if ((first_stmt_code != rhs_code
+              && (first_stmt_code != IMAGPART_EXPR
+                  || rhs_code != REALPART_EXPR)
+              && (first_stmt_code != REALPART_EXPR
+                  || rhs_code != IMAGPART_EXPR)
+              /* Handle mismatches in plus/minus by computing both
+                 and merging the results.  */
+              && !((first_stmt_code == PLUS_EXPR
+                    || first_stmt_code == MINUS_EXPR)
+                   && (alt_stmt_code == PLUS_EXPR
+                       || alt_stmt_code == MINUS_EXPR)
+                   && rhs_code == alt_stmt_code)
+              && !(STMT_VINFO_GROUPED_ACCESS (stmt_info)
+                   && (first_stmt_code == ARRAY_REF
+                       || first_stmt_code == BIT_FIELD_REF
+                       || first_stmt_code == INDIRECT_REF
+                       || first_stmt_code == COMPONENT_REF
+                       || first_stmt_code == MEM_REF)))
+             || first_stmt_load_p != load_p)
            {
              if (dump_enabled_p ())
                {