--- /dev/null
+/* PR tree-optimization/96979 */
+/* { dg-do compile } */
+/* { dg-options "-std=c++17 -O2" } */
+
+using u64 = unsigned long long;
+
+constexpr inline u64
+foo (const char *str) noexcept
+{
+ u64 value = 0xcbf29ce484222325ULL;
+ for (u64 i = 0; str[i]; i++)
+ value = (value ^ u64(str[i])) * 0x100000001b3ULL;
+ return value;
+}
+
+struct V
+{
+ enum W
+ {
+#define A(n) n,
+#define B(n) A(n##0) A(n##1) A(n##2) A(n##3) A(n##4) A(n##5) A(n##6) A(n##7) A(n##8) A(n##9)
+#define C(n) B(n##0) B(n##1) B(n##2) B(n##3) B(n##4) B(n##5) B(n##6) B(n##7) B(n##8) B(n##9)
+#define D(n) C(n##0) C(n##1) C(n##2) C(n##3) C(n##4) C(n##5) C(n##6) C(n##7) C(n##8) C(n##9)
+#define E D(foo1) D(foo2) D(foo3)
+ E
+ last
+ };
+
+ constexpr static W
+ bar (const u64 h) noexcept
+ {
+ switch (h)
+ {
+#undef A
+#define F(n) #n
+#define A(n) case foo (F(n)): return n;
+ E
+ }
+ return last;
+ }
+};
+
+int
+baz (const char *s)
+{
+ const u64 h = foo (s);
+ return V::bar (h);
+}
if (range == 0)
return false;
+ if (range > HOST_WIDE_INT_M1U / 100)
+ return false;
+
+ unsigned HOST_WIDE_INT lhs = 100 * range;
+ if (lhs < range)
+ return false;
+
+ /* First make quick guess as each cluster
+ can add at maximum 2 to the comparison_count. */
+ if (lhs > 2 * max_ratio * (end - start + 1))
+ return false;
+
unsigned HOST_WIDE_INT comparison_count = 0;
for (unsigned i = start; i <= end; i++)
{
comparison_count += sc->m_range_p ? 2 : 1;
}
- unsigned HOST_WIDE_INT lhs = 100 * range;
- if (lhs < range)
- return false;
-
return lhs <= max_ratio * comparison_count;
}
{
/* Check overflow. */
if (range == 0)
- return 0;
+ return false;
if (range >= GET_MODE_BITSIZE (word_mode))
return false;
- return uniq <= 3;
+ return uniq <= m_max_case_bit_tests;
}
/* Return true when cluster starting at START and ending at END (inclusive)
bit_test_cluster::can_be_handled (const vec<cluster *> &clusters,
unsigned start, unsigned end)
{
+ auto_vec<int, m_max_case_bit_tests> dest_bbs;
/* For algorithm correctness, bit test for a single case must return
true. We bail out in is_beneficial if it's called just for
a single case. */
unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
clusters[end]->get_high ());
- auto_bitmap dest_bbs;
+
+ /* Make a guess first. */
+ if (!can_be_handled (range, m_max_case_bit_tests))
+ return false;
for (unsigned i = start; i <= end; i++)
{
simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
- bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
+ /* m_max_case_bit_tests is very small integer, thus the operation
+ is constant. */
+ if (!dest_bbs.contains (sc->m_case_bb->index))
+ {
+ if (dest_bbs.length () >= m_max_case_bit_tests)
+ return false;
+ dest_bbs.quick_push (sc->m_case_bb->index);
+ }
}
- return can_be_handled (range, bitmap_count_bits (dest_bbs));
+ return true;
}
/* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
then return 0. */
static unsigned HOST_WIDE_INT get_range (tree low, tree high)
{
- tree r = fold_build2 (MINUS_EXPR, TREE_TYPE (low), high, low);
- if (!tree_fits_uhwi_p (r))
+ wide_int w = wi::to_wide (high) - wi::to_wide (low);
+ if (wi::neg_p (w, TYPE_SIGN (TREE_TYPE (low))) || !wi::fits_uhwi_p (w))
return 0;
-
- return tree_to_uhwi (r) + 1;
+ return w.to_uhwi () + 1;
}
/* Case label. */