From 0162d00d12be24ee3f02ce876adafeaa91c6f7f9 Mon Sep 17 00:00:00 2001 From: Andrew MacLeod Date: Wed, 28 Oct 2020 16:41:15 -0400 Subject: [PATCH] Call infer_non_null() directly when checking for non-null. Simply call infer_non_null directly and avoid uneccessary checks of the statement being modified. gcc/ PR tree-optimization/97609 * gimple-range-cache.cc (non_null_ref::process_name): Call infer_nonnull_range directly instead of infer_value_range. gcc/testsuite/ * g++.dg/pr97609.C: New. --- gcc/gimple-range-cache.cc | 12 +++------ gcc/testsuite/g++.dg/pr97609.C | 46 ++++++++++++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 8 deletions(-) create mode 100644 gcc/testsuite/g++.dg/pr97609.C diff --git a/gcc/gimple-range-cache.cc b/gcc/gimple-range-cache.cc index 13b9933cc01..bc9243c1279 100644 --- a/gcc/gimple-range-cache.cc +++ b/gcc/gimple-range-cache.cc @@ -91,19 +91,15 @@ non_null_ref::process_name (tree name) { gimple *s = USE_STMT (use_p); unsigned index = gimple_bb (s)->index; - tree value; - enum tree_code comp_code; // If bit is already set for this block, dont bother looking again. if (bitmap_bit_p (b, index)) continue; - // If we can infer a != 0 range, then set the bit for this BB - if (infer_value_range (s, name, &comp_code, &value)) - { - if (comp_code == NE_EXPR && integer_zerop (value)) - bitmap_set_bit (b, index); - } + // If we can infer a nonnull range, then set the bit for this BB + if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name) + && infer_nonnull_range (s, name)) + bitmap_set_bit (b, index); } m_nn[v] = b; diff --git a/gcc/testsuite/g++.dg/pr97609.C b/gcc/testsuite/g++.dg/pr97609.C new file mode 100644 index 00000000000..8e582c9ad49 --- /dev/null +++ b/gcc/testsuite/g++.dg/pr97609.C @@ -0,0 +1,46 @@ +// PR tree-optimization/97609 +// { dg-do compile { target c++11 } } +// { dg-options "-O2 -fno-tree-fre -fnon-call-exceptions" } + +struct _Fwd_list_node_base { + _Fwd_list_node_base *_M_next; + void _M_transfer_after() { _Fwd_list_node_base *__keep = _M_next = __keep; } +}; +struct _Fwd_list_const_iterator { + _Fwd_list_const_iterator(_Fwd_list_node_base *__n) : _M_node(__n) {} + _Fwd_list_const_iterator(int); + _Fwd_list_node_base *_M_node; +}; +template struct forward_list { + _Fwd_list_node_base _M_head; + template + forward_list(_InputIterator, _InputIterator); + forward_list(int); + _Fwd_list_const_iterator cbefore_begin() { return &_M_head; } + void splice_after(_Fwd_list_const_iterator) noexcept; + void splice_after(_Fwd_list_const_iterator __pos, forward_list &) { + splice_after(__pos, 0); + } + using __remove_return_type = void; + __remove_return_type unique() { unique(0); } + template __remove_return_type unique(_BinPred); +}; +template +void forward_list<_Tp, _Alloc>::splice_after(_Fwd_list_const_iterator __pos) + noexcept { + __pos._M_node->_M_transfer_after(); +} +template +template +auto forward_list<_Tp, _Alloc>::unique(_BinPred) -> __remove_return_type { + forward_list __to_destroy(0); + splice_after(__to_destroy.cbefore_begin()); +} + +void +foo () +{ + forward_list c1 (0, 0); + c1.unique (); +} + -- 2.30.2