From 1e99c6e0448f32e9490946f3821fe0c45b69b3f5 Mon Sep 17 00:00:00 2001 From: Jakub Jelinek Date: Thu, 31 Oct 2013 14:58:29 +0100 Subject: [PATCH] tree-vrp.c (maybe_set_nonzero_bits): New function. * tree-vrp.c (maybe_set_nonzero_bits): New function. (remove_range_assertions): Call it. From-SVN: r204258 --- gcc/ChangeLog | 3 +++ gcc/tree-vrp.c | 61 ++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 551b3031ea9..99a50346897 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,5 +1,8 @@ 2013-10-31 Jakub Jelinek + * tree-vrp.c (maybe_set_nonzero_bits): New function. + (remove_range_assertions): Call it. + * tree.c (tree_ctz): New function. * tree.h (tree_ctz): New prototype. * tree-ssanames.h (get_range_info, get_nonzero_bits): Change diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c index a28e9369660..15cfcb3f591 100644 --- a/gcc/tree-vrp.c +++ b/gcc/tree-vrp.c @@ -6486,6 +6486,60 @@ all_imm_uses_in_stmt_or_feed_cond (tree var, gimple stmt, basic_block cond_bb) return true; } +/* Handle + _4 = x_3 & 31; + if (_4 != 0) + goto ; + else + goto ; + : + __builtin_unreachable (); + : + x_5 = ASSERT_EXPR ; + If x_3 has no other immediate uses (checked by caller), + var is the x_3 var from ASSERT_EXPR, we can clear low 5 bits + from the non-zero bitmask. */ + +static void +maybe_set_nonzero_bits (basic_block bb, tree var) +{ + edge e = single_pred_edge (bb); + basic_block cond_bb = e->src; + gimple stmt = last_stmt (cond_bb); + tree cst; + + if (stmt == NULL + || gimple_code (stmt) != GIMPLE_COND + || gimple_cond_code (stmt) != ((e->flags & EDGE_TRUE_VALUE) + ? EQ_EXPR : NE_EXPR) + || TREE_CODE (gimple_cond_lhs (stmt)) != SSA_NAME + || !integer_zerop (gimple_cond_rhs (stmt))) + return; + + stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt)); + if (!is_gimple_assign (stmt) + || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR + || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST) + return; + if (gimple_assign_rhs1 (stmt) != var) + { + gimple stmt2; + + if (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME) + return; + stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); + if (!gimple_assign_cast_p (stmt2) + || gimple_assign_rhs1 (stmt2) != var + || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt2)) + || (TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))) + != TYPE_PRECISION (TREE_TYPE (var)))) + return; + } + cst = gimple_assign_rhs2 (stmt); + set_nonzero_bits (var, (get_nonzero_bits (var) + & ~tree_to_double_int (cst))); +} + /* Convert range assertion expressions into the implied copies and copy propagate away the copies. Doing the trivial copy propagation here avoids the need to run the full copy propagation pass after @@ -6566,8 +6620,11 @@ remove_range_assertions (void) if (is_unreachable && all_imm_uses_in_stmt_or_feed_cond (var, stmt, single_pred (bb))) - set_range_info (var, SSA_NAME_RANGE_INFO (lhs)->min, - SSA_NAME_RANGE_INFO (lhs)->max); + { + set_range_info (var, SSA_NAME_RANGE_INFO (lhs)->min, + SSA_NAME_RANGE_INFO (lhs)->max); + maybe_set_nonzero_bits (bb, var); + } } /* Propagate the RHS into every use of the LHS. */ -- 2.30.2