return NULL_TREE;
}
+/* Look if STMT_INFO is consumed by SLP indirectly and mark it pure_slp
+ if so, otherwise pushing it to WORKLIST. */
+
+static void
+maybe_push_to_hybrid_worklist (vec_info *vinfo,
+ vec<stmt_vec_info> &worklist,
+ stmt_vec_info stmt_info)
+{
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Processing hybrid candidate : %G", stmt_info->stmt);
+ stmt_vec_info orig_info = vect_orig_stmt (stmt_info);
+ imm_use_iterator iter2;
+ ssa_op_iter iter1;
+ use_operand_p use_p;
+ def_operand_p def_p;
+ bool any_def = false;
+ FOR_EACH_PHI_OR_STMT_DEF (def_p, orig_info->stmt, iter1, SSA_OP_DEF)
+ {
+ any_def = true;
+ FOR_EACH_IMM_USE_FAST (use_p, iter2, DEF_FROM_PTR (def_p))
+ {
+ stmt_vec_info use_info = vinfo->lookup_stmt (USE_STMT (use_p));
+ /* An out-of loop use means this is a loop_vect sink. */
+ if (!use_info)
+ {
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Found loop_vect sink: %G", stmt_info->stmt);
+ worklist.safe_push (stmt_info);
+ return;
+ }
+ else if (!STMT_SLP_TYPE (vect_stmt_to_vectorize (use_info)))
+ {
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Found loop_vect use: %G", use_info->stmt);
+ worklist.safe_push (stmt_info);
+ return;
+ }
+ }
+ }
+ /* No def means this is a loo_vect sink. */
+ if (!any_def)
+ {
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Found loop_vect sink: %G", stmt_info->stmt);
+ worklist.safe_push (stmt_info);
+ return;
+ }
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Marked SLP consumed stmt pure: %G", stmt_info->stmt);
+ STMT_SLP_TYPE (stmt_info) = pure_slp;
+}
+
/* Find stmts that must be both vectorized and SLPed. */
void
/* All stmts participating in SLP are marked pure_slp, all other
stmts are loop_vect.
- First collect all loop_vect stmts into a worklist. */
+ First collect all loop_vect stmts into a worklist.
+ SLP patterns cause not all original scalar stmts to appear in
+ SLP_TREE_SCALAR_STMTS and thus not all of them are marked pure_slp.
+ Rectify this here and do a backward walk over the IL only considering
+ stmts as loop_vect when they are used by a loop_vect stmt and otherwise
+ mark them as pure_slp. */
auto_vec<stmt_vec_info> worklist;
- for (unsigned i = 0; i < LOOP_VINFO_LOOP (loop_vinfo)->num_nodes; ++i)
+ for (int i = LOOP_VINFO_LOOP (loop_vinfo)->num_nodes - 1; i >= 0; --i)
{
basic_block bb = LOOP_VINFO_BBS (loop_vinfo)[i];
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gphi *phi = gsi.phi ();
stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (phi);
if (!STMT_SLP_TYPE (stmt_info) && STMT_VINFO_RELEVANT (stmt_info))
- worklist.safe_push (stmt_info);
+ maybe_push_to_hybrid_worklist (loop_vinfo,
+ worklist, stmt_info);
}
- for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
- gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
+ gsi_prev (&gsi))
{
gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
= loop_vinfo->lookup_stmt (gsi_stmt (gsi2));
if (!STMT_SLP_TYPE (patt_info)
&& STMT_VINFO_RELEVANT (patt_info))
- worklist.safe_push (patt_info);
+ maybe_push_to_hybrid_worklist (loop_vinfo,
+ worklist, patt_info);
}
stmt_info = STMT_VINFO_RELATED_STMT (stmt_info);
}
if (!STMT_SLP_TYPE (stmt_info) && STMT_VINFO_RELEVANT (stmt_info))
- worklist.safe_push (stmt_info);
+ maybe_push_to_hybrid_worklist (loop_vinfo,
+ worklist, stmt_info);
}
}