This is a key refactoring towards proofs for strings.
This ensures that InferInfo is maintained until just before facts, lemmas and conflicts are processed. This allows us both to:
(1) track more accurate stats and debug information,
(2) have enough information to ensure that proofs can be constructed at the moment facts, lemmas, and conflicts are processed.
Changes in this PR:
PendingInfer and InferInfo were merged, CoreInferInfo is now the previous equivalent of InferInfo, extended with core-solver-specific information, which is only used by CoreSolver.
sendInference( const InferInfo& info, ... ) is now the central method for sending inferences which is called by the other variants, not the other way around.
sendLemma is inlined into sendInference(...) for clarity.
doPendingLemmas and doPendingFacts are extended to process the side effects of the inference infos.
There is actually a further issue with pending inferences related to eagerly processing the side effect of CoreInferInfo before we know the lemma is sent. I believe this issue does not occur in practice based on our strategy. Further refactoring could tighten this, e.g. virtual void InferInfo::processSideEffect. I will do this in another PR.
Further refactoring can address whether asLemma should be a field of InferInfo (it probably should), and whether we should explicitly check for AND in conclusions instead of making it the responsibility of the user of this class.
namespace theory {
namespace strings {
+CoreInferInfo::CoreInferInfo() : d_index(0), d_rev(false) {}
+
CoreSolver::CoreSolver(context::Context* c,
context::UserContext* u,
SolverState& s,
InferenceManager& im,
TermRegistry& tr,
BaseSolver& bs)
- : d_state(s), d_im(im), d_termReg(tr), d_bsolver(bs), d_nf_pairs(c)
+ : d_state(s), d_im(im), d_termReg(tr), d_bsolver(bs), d_nfPairs(c)
{
d_zero = NodeManager::currentNM()->mkConst( Rational( 0 ) );
d_one = NodeManager::currentNM()->mkConst( Rational( 1 ) );
TypeNode stype)
{
//the possible inferences
- std::vector< InferInfo > pinfer;
+ std::vector<CoreInferInfo> pinfer;
// loop over all pairs
for(unsigned i=0; i<normal_forms.size()-1; i++) {
//unify each normalform[j] with normal_forms[i]
<< ") : " << std::endl;
Inference min_id = Inference::NONE;
unsigned max_index = 0;
- for (unsigned i = 0, size = pinfer.size(); i < size; i++)
+ for (unsigned i = 0, psize = pinfer.size(); i < psize; i++)
{
- Trace("strings-solve") << "#" << i << ": From " << pinfer[i].d_i << " / "
- << pinfer[i].d_j << " (rev=" << pinfer[i].d_rev
- << ") : ";
- Trace("strings-solve") << pinfer[i].d_conc << " by " << pinfer[i].d_id
- << std::endl;
- if (!set_use_index || pinfer[i].d_id < min_id
- || (pinfer[i].d_id == min_id && pinfer[i].d_index > max_index))
+ CoreInferInfo& ipii = pinfer[i];
+ InferInfo& ii = ipii.d_infer;
+ Trace("strings-solve") << "#" << i << ": From " << ipii.d_i << " / "
+ << ipii.d_j << " (rev=" << ipii.d_rev << ") : ";
+ Trace("strings-solve") << ii.d_conc << " by " << ii.d_id << std::endl;
+ if (!set_use_index || ii.d_id < min_id
+ || (ii.d_id == min_id && ipii.d_index > max_index))
{
- min_id = pinfer[i].d_id;
- max_index = pinfer[i].d_index;
+ min_id = ii.d_id;
+ max_index = ipii.d_index;
use_index = i;
set_use_index = true;
}
}
Trace("strings-solve") << "...choose #" << use_index << std::endl;
- doInferInfo(pinfer[use_index]);
+ if (!processInferInfo(pinfer[use_index]))
+ {
+ Unhandled() << "Failed to process infer info " << pinfer[use_index].d_infer
+ << std::endl;
+ }
}
void CoreSolver::processSimpleNEq(NormalForm& nfi,
unsigned& index,
bool isRev,
unsigned rproc,
- std::vector<InferInfo>& pinfer,
+ std::vector<CoreInferInfo>& pinfer,
TypeNode stype)
{
NodeManager* nm = NodeManager::currentNM();
}
// The candidate inference "info"
- InferInfo info;
+ CoreInferInfo info;
+ InferInfo& iinfo = info.d_infer;
info.d_index = index;
// for debugging
info.d_i = nfi.d_base;
<< std::endl;
Node lenEq = nm->mkNode(EQUAL, xLenTerm, yLenTerm);
lenEq = Rewriter::rewrite(lenEq);
- info.d_conc = nm->mkNode(OR, lenEq, lenEq.negate());
- info.d_pending_phase[lenEq] = true;
- info.d_id = Inference::LEN_SPLIT;
+ iinfo.d_conc = nm->mkNode(OR, lenEq, lenEq.negate());
+ iinfo.d_id = Inference::LEN_SPLIT;
+ info.d_pendingPhase[lenEq] = true;
pinfer.push_back(info);
break;
}
// We are dealing with a looping word equation.
if (!isRev)
{ // FIXME
- NormalForm::getExplanationForPrefixEq(nfi, nfj, -1, -1, info.d_ant);
+ NormalForm::getExplanationForPrefixEq(nfi, nfj, -1, -1, iinfo.d_ant);
ProcessLoopResult plr =
processLoop(lhsLoopIdx != -1 ? nfi : nfj,
lhsLoopIdx != -1 ? nfj : nfi,
// infer the purification equality, and the (dis)equality
// with the empty string in the direction that the rewriter
// inferred
- info.d_conc = nm->mkNode(
+ iinfo.d_conc = nm->mkNode(
AND, p.eqNode(nc), !eq.getConst<bool>() ? pEq.negate() : pEq);
- info.d_id = Inference::INFER_EMP;
+ iinfo.d_id = Inference::INFER_EMP;
}
else
{
- info.d_conc = nm->mkNode(OR, eq, eq.negate());
- info.d_id = Inference::LEN_SPLIT_EMP;
+ iinfo.d_conc = nm->mkNode(OR, eq, eq.negate());
+ iinfo.d_id = Inference::LEN_SPLIT_EMP;
}
pinfer.push_back(info);
break;
// At this point, we know that `nc` is non-empty, so we add that to our
// explanation.
- info.d_ant.push_back(expNonEmpty);
+ iinfo.d_ant.push_back(expNonEmpty);
size_t ncIndex = index + 1;
Node nextConstStr = nfnc.collectConstantStringAt(ncIndex);
if (p > 1)
{
NormalForm::getExplanationForPrefixEq(
- nfc, nfnc, cIndex, ncIndex, info.d_ant);
+ nfc, nfnc, cIndex, ncIndex, iinfo.d_ant);
Node prea = p == straLen ? stra
: (isRev ? Word::suffix(stra, p)
: Word::prefix(stra, p));
Trace("strings-csp")
<< "Const Split: " << prea << " is removed from " << stra
<< " due to " << strb << ", p=" << p << std::endl;
- info.d_conc = nc.eqNode(isRev ? utils::mkNConcat(sk, prea)
- : utils::mkNConcat(prea, sk));
- info.d_new_skolem[LENGTH_SPLIT].push_back(sk);
- info.d_id = Inference::SSPLIT_CST_PROP;
+ iinfo.d_conc = nc.eqNode(isRev ? utils::mkNConcat(sk, prea)
+ : utils::mkNConcat(prea, sk));
+ iinfo.d_new_skolem[LENGTH_SPLIT].push_back(sk);
+ iinfo.d_id = Inference::SSPLIT_CST_PROP;
pinfer.push_back(info);
break;
}
Trace("strings-csp") << "Const Split: " << firstChar
<< " is removed from " << stra << " (serial) "
<< std::endl;
- NormalForm::getExplanationForPrefixEq(nfi, nfj, index, index, info.d_ant);
- info.d_conc = nc.eqNode(isRev ? utils::mkNConcat(sk, firstChar)
- : utils::mkNConcat(firstChar, sk));
- info.d_new_skolem[LENGTH_SPLIT].push_back(sk);
- info.d_id = Inference::SSPLIT_CST;
+ NormalForm::getExplanationForPrefixEq(
+ nfi, nfj, index, index, iinfo.d_ant);
+ iinfo.d_conc = nc.eqNode(isRev ? utils::mkNConcat(sk, firstChar)
+ : utils::mkNConcat(firstChar, sk));
+ iinfo.d_new_skolem[LENGTH_SPLIT].push_back(sk);
+ iinfo.d_id = Inference::SSPLIT_CST;
pinfer.push_back(info);
break;
}
}
}
- NormalForm::getExplanationForPrefixEq(nfi, nfj, index, index, info.d_ant);
+ NormalForm::getExplanationForPrefixEq(nfi, nfj, index, index, iinfo.d_ant);
// Add premises for x != "" ^ y != ""
for (unsigned xory = 0; xory < 2; xory++)
{
Node tnz = d_state.explainNonEmpty(x);
if (!tnz.isNull())
{
- info.d_ant.push_back(tnz);
+ iinfo.d_ant.push_back(tnz);
}
else
{
tnz = x.eqNode(emp).negate();
- info.d_antn.push_back(tnz);
+ iinfo.d_antn.push_back(tnz);
}
}
SkolemCache* skc = d_termReg.getSkolemCache();
y,
isRev ? SkolemCache::SK_ID_V_SPT_REV : SkolemCache::SK_ID_V_SPT,
"v_spt");
- info.d_new_skolem[LENGTH_GEQ_ONE].push_back(sk);
+ iinfo.d_new_skolem[LENGTH_GEQ_ONE].push_back(sk);
Node eq1 =
x.eqNode(isRev ? utils::mkNConcat(sk, y) : utils::mkNConcat(y, sk));
Node eq2 =
if (lentTestSuccess != -1)
{
- info.d_antn.push_back(lentTestExp);
- info.d_conc = lentTestSuccess == 0 ? eq1 : eq2;
- info.d_id = Inference::SSPLIT_VAR_PROP;
+ iinfo.d_antn.push_back(lentTestExp);
+ iinfo.d_conc = lentTestSuccess == 0 ? eq1 : eq2;
+ iinfo.d_id = Inference::SSPLIT_VAR_PROP;
}
else
{
Node ldeq = nm->mkNode(EQUAL, xLenTerm, yLenTerm).negate();
- info.d_ant.push_back(ldeq);
- info.d_conc = nm->mkNode(OR, eq1, eq2);
- info.d_id = Inference::SSPLIT_VAR;
+ iinfo.d_ant.push_back(ldeq);
+ iinfo.d_conc = nm->mkNode(OR, eq1, eq2);
+ iinfo.d_id = Inference::SSPLIT_VAR;
}
pinfer.push_back(info);
break;
//xs(zy)=t(yz)xr
CoreSolver::ProcessLoopResult CoreSolver::processLoop(NormalForm& nfi,
- NormalForm& nfj,
- int loop_index,
- int index,
- InferInfo& info)
+ NormalForm& nfj,
+ int loop_index,
+ int index,
+ CoreInferInfo& info)
{
if (options::stringProcessLoopMode() == options::ProcessLoopMode::ABORT)
{
Node emp = Word::mkEmptyWord(stype);
+ InferInfo& iinfo = info.d_infer;
if (s_zy.isConst() && r.isConst() && r != emp)
{
int c;
{
Trace("strings-loop") << "Strings::Loop: tails are different."
<< std::endl;
- d_im.sendInference(info.d_ant, conc, Inference::FLOOP_CONFLICT, true);
+ d_im.sendInference(iinfo.d_ant, conc, Inference::FLOOP_CONFLICT, true);
return ProcessLoopResult::CONFLICT;
}
}
if (expNonEmpty.isNull())
{
// try to make t equal to empty to avoid loop
- info.d_conc = nm->mkNode(kind::OR, split_eq, split_eq.negate());
- info.d_id = Inference::LEN_SPLIT_EMP;
+ iinfo.d_conc = nm->mkNode(kind::OR, split_eq, split_eq.negate());
+ iinfo.d_id = Inference::LEN_SPLIT_EMP;
return ProcessLoopResult::INFERENCE;
}
else
{
- info.d_ant.push_back(expNonEmpty);
+ iinfo.d_ant.push_back(expNonEmpty);
}
}
else
}
}
- Node ant = d_im.mkExplain(info.d_ant);
- info.d_ant.clear();
- info.d_antn.push_back(ant);
+ Node ant = d_im.mkExplain(iinfo.d_ant);
+ iinfo.d_ant.clear();
+ iinfo.d_antn.push_back(ant);
Node str_in_re;
if (s_zy == t_yz && r == emp && s_zy.isConst()
} // normal case
// we will be done
- info.d_conc = conc;
- info.d_id = Inference::FLOOP;
- info.d_nf_pair[0] = nfi.d_base;
- info.d_nf_pair[1] = nfj.d_base;
+ iinfo.d_conc = conc;
+ iinfo.d_id = Inference::FLOOP;
+ info.d_nfPair[0] = nfi.d_base;
+ info.d_nfPair[1] = nfj.d_base;
return ProcessLoopResult::INFERENCE;
}
}
if( !isNormalFormPair( n1, n2 ) ){
int index = 0;
- NodeIntMap::const_iterator it = d_nf_pairs.find( n1 );
- if( it!=d_nf_pairs.end() ){
+ NodeIntMap::const_iterator it = d_nfPairs.find(n1);
+ if (it != d_nfPairs.end())
+ {
index = (*it).second;
}
- d_nf_pairs[n1] = index + 1;
+ d_nfPairs[n1] = index + 1;
if( index<(int)d_nf_pairs_data[n1].size() ){
d_nf_pairs_data[n1][index] = n2;
}else{
return isNormalFormPair(n2,n1);
}
//Trace("strings-debug") << "is normal form pair. " << n1 << " " << n2 << std::endl;
- NodeIntMap::const_iterator it = d_nf_pairs.find( n1 );
- if( it!=d_nf_pairs.end() ){
+ NodeIntMap::const_iterator it = d_nfPairs.find(n1);
+ if (it != d_nfPairs.end())
+ {
Assert(d_nf_pairs_data.find(n1) != d_nf_pairs_data.end());
for( int i=0; i<(*it).second; i++ ){
Assert(i < (int)d_nf_pairs_data[n1].size());
}
}
-void CoreSolver::doInferInfo(const InferInfo& ii)
+bool CoreSolver::processInferInfo(CoreInferInfo& cii)
{
- // send the inference
- if (!ii.d_nf_pair[0].isNull())
+ InferInfo& ii = cii.d_infer;
+ // rewrite the conclusion, ensure non-trivial
+ ii.d_conc = Rewriter::rewrite(ii.d_conc);
+
+ if (ii.isTrivial())
{
- Assert(!ii.d_nf_pair[1].isNull());
- addNormalFormPair(ii.d_nf_pair[0], ii.d_nf_pair[1]);
+ // conclusion rewrote to true
+ return false;
}
- // send the inference
- d_im.sendInference(ii);
- // Register the new skolems from this inference. We register them here
- // (lazily), since the code above has now decided to use the inference
- // at use_index that involves them.
- for (const std::pair<const LengthStatus, std::vector<Node> >& sks :
- ii.d_new_skolem)
+ // process the state change to this solver
+ if (!cii.d_nfPair[0].isNull())
{
- for (const Node& n : sks.second)
- {
- d_termReg.registerTermAtomic(n, sks.first);
- }
+ Assert(!cii.d_nfPair[1].isNull());
+ addNormalFormPair(cii.d_nfPair[0], cii.d_nfPair[1]);
}
-}
+ // send phase requirements
+ for (const std::pair<const Node, bool> pp : cii.d_pendingPhase)
+ {
+ d_im.sendPhaseRequirement(pp.first, pp.second);
+ }
+
+ // send the inference, which is a lemma
+ d_im.sendInference(ii, true);
+ return true;
+}
}/* CVC4::theory::strings namespace */
}/* CVC4::theory namespace */
namespace theory {
namespace strings {
+/**
+ * This data structure encapsulates an inference for the core solver of the
+ * theory of strings. This includes the form of the inference to be processed
+ * by the inference manager, the side effects it generates for the core solver,
+ * and information used for heuristics and debugging.
+ */
+class CoreInferInfo
+{
+ public:
+ CoreInferInfo();
+ ~CoreInferInfo() {}
+ /** The infer info of this class */
+ InferInfo d_infer;
+ /**
+ * The pending phase requirements, see InferenceManager::sendPhaseRequirement.
+ */
+ std::map<Node, bool> d_pendingPhase;
+ /**
+ * The index in the normal forms under which this inference is addressing.
+ * For example, if the inference is inferring x = y from |x|=|y| and
+ * w ++ x ++ ... = w ++ y ++ ...
+ * then d_index is 1, since x and y are at index 1 in these concat terms.
+ */
+ unsigned d_index;
+ /**
+ * The normal form pair that is cached as a result of this inference.
+ */
+ Node d_nfPair[2];
+ /** for debugging
+ *
+ * The base pair of strings d_i/d_j that led to the inference, and whether
+ * (d_rev) we were processing the normal forms of these strings in reverse
+ * direction.
+ */
+ Node d_i;
+ Node d_j;
+ bool d_rev;
+};
+
/** The core solver for the theory of strings
*
* This implements techniques for handling (dis)equalities involving
private:
/**
* This processes the infer info ii as an inference. In more detail, it calls
- * the inference manager to process the inference, it introduces Skolems, and
- * updates the set of normal form pairs.
+ * the inference manager to process the inference, and updates the set of
+ * normal form pairs. Returns true if the conclusion of ii was not true
+ * after rewriting. If the conclusion is true, this method does nothing.
*/
- void doInferInfo(const InferInfo& ii);
+ bool processInferInfo(CoreInferInfo& ii);
/** Add that (n1,n2) is a normal form pair in the current context. */
void addNormalFormPair(Node n1, Node n2);
/** Is (n1,n2) a normal form pair in the current context? */
*
* This method is called when two equal terms have normal forms nfi and nfj.
* It adds (typically at most one) possible inference to the vector pinfer.
- * This inference is in the form of an InferInfo object, which stores the
+ * This inference is in the form of an CoreInferInfo object, which stores the
* necessary information regarding how to process the inference.
*
* index: The index in the normal form vectors (nfi.d_nf and nfj.d_nf) that
unsigned& index,
bool isRev,
unsigned rproc,
- std::vector<InferInfo>& pinfer,
+ std::vector<CoreInferInfo>& pinfer,
TypeNode stype);
//--------------------------end for checkNormalFormsEq
NormalForm& nfj,
int loop_index,
int index,
- InferInfo& info);
+ CoreInferInfo& info);
//--------------------------end for checkNormalFormsEq with loops
//--------------------------for checkNormalFormsDeq
* indepedent map from nodes to lists of nodes to model this, given by
* the two data members below.
*/
- NodeIntMap d_nf_pairs;
+ NodeIntMap d_nfPairs;
std::map<Node, std::vector<Node> > d_nf_pairs_data;
/** list of non-congruent concat terms in each equivalence class */
std::map<Node, std::vector<Node> > d_eqc;
return out;
}
-InferInfo::InferInfo() : d_id(Inference::NONE), d_index(0), d_rev(false) {}
+InferInfo::InferInfo() : d_id(Inference::NONE) {}
+
+bool InferInfo::isTrivial() const
+{
+ Assert(!d_conc.isNull());
+ return d_conc.isConst() && d_conc.getConst<bool>();
+}
+
+bool InferInfo::isConflict() const
+{
+ Assert(!d_conc.isNull());
+ return d_conc.isConst() && !d_conc.getConst<bool>() && d_antn.empty();
+}
+
+bool InferInfo::isFact() const
+{
+ Assert(!d_conc.isNull());
+ TNode atom = d_conc.getKind() == kind::NOT ? d_conc[0] : d_conc;
+ return !atom.isConst() && atom.getKind() != kind::OR && d_antn.empty();
+}
+
+std::ostream& operator<<(std::ostream& out, const InferInfo& ii)
+{
+ out << "(infer " << ii.d_id << " " << ii.d_conc;
+ if (!ii.d_ant.empty())
+ {
+ out << " :ant (" << ii.d_ant << ")";
+ }
+ if (!ii.d_antn.empty())
+ {
+ out << " :antn (" << ii.d_antn << ")";
+ }
+ out << ")";
+ return out;
+}
} // namespace strings
} // namespace theory
};
/**
- * This data structure encapsulates an inference for strings. This includes
- * the form of the inference, as well as the side effects it generates.
+ * An inference. This is a class to track an unprocessed call to either
+ * send a fact, lemma, or conflict that is waiting to be asserted to the
+ * equality engine or sent on the output channel.
*/
class InferInfo
{
public:
InferInfo();
- /**
- * The identifier for the inference, indicating the kind of reasoning used
- * for this conclusion.
- */
+ ~InferInfo() {}
+ /** The inference identifier */
Inference d_id;
- /** The conclusion of the inference */
+ /** The conclusion */
Node d_conc;
/**
* The antecedant(s) of the inference, interpreted conjunctively. These are
* can be assumed for them.
*/
std::map<LengthStatus, std::vector<Node> > d_new_skolem;
+ /** Is this infer info trivial? True if d_conc is true. */
+ bool isTrivial() const;
/**
- * The pending phase requirements, see InferenceManager::sendPhaseRequirement.
- */
- std::map<Node, bool> d_pending_phase;
- /**
- * The index in the normal forms under which this inference is addressing.
- * For example, if the inference is inferring x = y from |x|=|y| and
- * w ++ x ++ ... = w ++ y ++ ...
- * then d_index is 1, since x and y are at index 1 in these concat terms.
+ * Does this infer info correspond to a conflict? True if d_conc is false
+ * and it has no new antecedants (d_antn).
*/
- unsigned d_index;
+ bool isConflict() const;
/**
- * The normal form pair that is cached as a result of this inference.
- */
- Node d_nf_pair[2];
- /** for debugging
- *
- * The base pair of strings d_i/d_j that led to the inference, and whether
- * (d_rev) we were processing the normal forms of these strings in reverse
- * direction.
+ * Does this infer info correspond to a "fact". A fact is an inference whose
+ * conclusion should be added as an equality or predicate to the equality
+ * engine with no new external antecedants (d_antn).
*/
- Node d_i;
- Node d_j;
- bool d_rev;
+ bool isFact() const;
};
+/**
+ * Writes an inference info to a stream.
+ *
+ * @param out The stream to write to
+ * @param ii The inference info to write to the stream
+ * @return The stream
+ */
+std::ostream& operator<<(std::ostream& out, const InferInfo& ii);
+
} // namespace strings
} // namespace theory
} // namespace CVC4
}
void InferenceManager::sendInference(const std::vector<Node>& exp,
- const std::vector<Node>& exp_n,
+ const std::vector<Node>& expn,
Node eq,
Inference infer,
bool asLemma)
{
eq = eq.isNull() ? d_false : Rewriter::rewrite(eq);
- if (Trace.isOn("strings-infer-debug"))
- {
- Trace("strings-infer-debug")
- << "By " << infer << ", infer : " << eq << " from: " << std::endl;
- for (unsigned i = 0; i < exp.size(); i++)
- {
- Trace("strings-infer-debug") << " " << exp[i] << std::endl;
- }
- for (unsigned i = 0; i < exp_n.size(); i++)
- {
- Trace("strings-infer-debug") << " N:" << exp_n[i] << std::endl;
- }
- }
if (eq == d_true)
{
return;
}
- // only keep stats if not trivial conclusion
- d_statistics.d_inferences << infer;
- Node atom = eq.getKind() == NOT ? eq[0] : eq;
- // check if we should send a lemma or an inference
- if (asLemma || atom == d_false || atom.getKind() == OR || !exp_n.empty()
- || options::stringInferAsLemmas())
+ // wrap in infer info and send below
+ InferInfo ii;
+ ii.d_id = infer;
+ ii.d_conc = eq;
+ ii.d_ant = exp;
+ ii.d_antn = expn;
+ sendInference(ii, asLemma);
+}
+
+void InferenceManager::sendInference(const std::vector<Node>& exp,
+ Node eq,
+ Inference infer,
+ bool asLemma)
+{
+ std::vector<Node> expn;
+ sendInference(exp, expn, eq, infer, asLemma);
+}
+
+void InferenceManager::sendInference(const InferInfo& ii, bool asLemma)
+{
+ Assert(!ii.isTrivial());
+ Trace("strings-infer-debug")
+ << "sendInference: " << ii << ", asLemma = " << asLemma << std::endl;
+ // check if we should send a conflict, lemma or a fact
+ if (asLemma || options::stringInferAsLemmas() || !ii.isFact())
{
- Node eq_exp;
- if (options::stringRExplainLemmas())
+ if (ii.isConflict())
{
- eq_exp = mkExplain(exp, exp_n);
- }
- else
- {
- if (exp.empty())
- {
- eq_exp = utils::mkAnd(exp_n);
- }
- else if (exp_n.empty())
- {
- eq_exp = utils::mkAnd(exp);
- }
- else
- {
- std::vector<Node> ev;
- ev.insert(ev.end(), exp.begin(), exp.end());
- ev.insert(ev.end(), exp_n.begin(), exp_n.end());
- eq_exp = NodeManager::currentNM()->mkNode(AND, ev);
- }
- }
- // if we have unexplained literals, this lemma is not a conflict
- if (eq == d_false && !exp_n.empty())
- {
- eq = eq_exp.negate();
- eq_exp = d_true;
+ Trace("strings-infer-debug") << "...as conflict" << std::endl;
+ Trace("strings-lemma") << "Strings::Conflict: " << ii.d_ant << " by "
+ << ii.d_id << std::endl;
+ Trace("strings-conflict") << "CONFLICT: inference conflict " << ii.d_ant
+ << " by " << ii.d_id << std::endl;
+ // we must fully explain it
+ Node conf = mkExplain(ii.d_ant);
+ Trace("strings-assert") << "(assert (not " << conf << ")) ; conflict "
+ << ii.d_id << std::endl;
+ ++(d_statistics.d_conflictsInfer);
+ // only keep stats if we process it here
+ d_statistics.d_inferences << ii.d_id;
+ d_out.conflict(conf);
+ d_state.setConflict();
+ return;
}
- sendLemma(eq_exp, eq, infer);
+ Trace("strings-infer-debug") << "...as lemma" << std::endl;
+ d_pendingLem.push_back(ii);
return;
}
- Node eqExp = utils::mkAnd(exp);
if (options::stringInferSym())
{
std::vector<Node> vars;
std::vector<Node> subs;
std::vector<Node> unproc;
- d_termReg.inferSubstitutionProxyVars(eqExp, vars, subs, unproc);
+ for (const Node& ac : ii.d_ant)
+ {
+ d_termReg.inferSubstitutionProxyVars(ac, vars, subs, unproc);
+ }
if (unproc.empty())
{
- Node eqs =
- eq.substitute(vars.begin(), vars.end(), subs.begin(), subs.end());
+ Node eqs = ii.d_conc.substitute(
+ vars.begin(), vars.end(), subs.begin(), subs.end());
+ InferInfo iiSubsLem;
+ // keep the same id for now, since we are transforming the form of the
+ // inference, not the root reason.
+ iiSubsLem.d_id = ii.d_id;
+ iiSubsLem.d_conc = eqs;
if (Trace.isOn("strings-lemma-debug"))
{
- Trace("strings-lemma-debug") << "Strings::Infer " << eq << " from "
- << eqExp << " by " << infer << std::endl;
+ Trace("strings-lemma-debug")
+ << "Strings::Infer " << iiSubsLem << std::endl;
Trace("strings-lemma-debug")
<< "Strings::Infer Alternate : " << eqs << std::endl;
for (unsigned i = 0, nvars = vars.size(); i < nvars; i++)
<< " " << vars[i] << " -> " << subs[i] << std::endl;
}
}
- sendLemma(d_true, eqs, infer);
+ Trace("strings-infer-debug") << "...as symbolic lemma" << std::endl;
+ d_pendingLem.push_back(iiSubsLem);
return;
}
if (Trace.isOn("strings-lemma-debug"))
}
}
}
- Trace("strings-lemma") << "Strings::Infer " << eq << " from " << eqExp
- << " by " << infer << std::endl;
- Trace("strings-assert") << "(assert (=> " << eqExp << " " << eq
- << ")) ; infer " << infer << std::endl;
- d_pending.push_back(PendingInfer(infer, eq, eqExp));
-}
-
-void InferenceManager::sendInference(const std::vector<Node>& exp,
- Node eq,
- Inference infer,
- bool asLemma)
-{
- std::vector<Node> exp_n;
- sendInference(exp, exp_n, eq, infer, asLemma);
-}
-
-void InferenceManager::sendInference(const InferInfo& i)
-{
- sendInference(i.d_ant, i.d_antn, i.d_conc, i.d_id, true);
-}
-
-void InferenceManager::sendLemma(Node ant, Node conc, Inference infer)
-{
- if (conc.isNull() || conc == d_false)
- {
- Trace("strings-conflict")
- << "Strings::Conflict : " << infer << " : " << ant << std::endl;
- Trace("strings-lemma") << "Strings::Conflict : " << infer << " : " << ant
- << std::endl;
- Trace("strings-assert")
- << "(assert (not " << ant << ")) ; conflict " << infer << std::endl;
- ++(d_statistics.d_conflictsInfer);
- d_out.conflict(ant);
- d_state.setConflict();
- return;
- }
- Node lem;
- if (ant == d_true)
- {
- lem = conc;
- }
- else
- {
- lem = NodeManager::currentNM()->mkNode(IMPLIES, ant, conc);
- }
- Trace("strings-lemma") << "Strings::Lemma " << infer << " : " << lem
- << std::endl;
- Trace("strings-assert") << "(assert " << lem << ") ; lemma " << infer
- << std::endl;
- d_pendingLem.push_back(lem);
+ Trace("strings-infer-debug") << "...as fact" << std::endl;
+ // add to pending, to be processed as a fact
+ d_pending.push_back(ii);
}
-
bool InferenceManager::sendSplit(Node a, Node b, Inference infer, bool preq)
{
Node eq = a.eqNode(b);
{
return false;
}
- // update statistics
- d_statistics.d_inferences << infer;
NodeManager* nm = NodeManager::currentNM();
- Node lemma_or = nm->mkNode(OR, eq, nm->mkNode(NOT, eq));
- Trace("strings-lemma") << "Strings::Lemma " << infer
- << " SPLIT : " << lemma_or << std::endl;
- d_pendingLem.push_back(lemma_or);
+ InferInfo iiSplit;
+ iiSplit.d_id = infer;
+ iiSplit.d_conc = nm->mkNode(OR, eq, nm->mkNode(NOT, eq));
sendPhaseRequirement(eq, preq);
+ d_pendingLem.push_back(iiSplit);
return true;
}
size_t i = 0;
while (!d_state.isInConflict() && i < d_pending.size())
{
- Node fact = d_pending[i].d_fact;
- Node exp = d_pending[i].d_exp;
- if (fact.getKind() == AND)
- {
- for (const Node& lit : fact)
- {
- bool polarity = lit.getKind() != NOT;
- TNode atom = polarity ? lit : lit[0];
- assertPendingFact(atom, polarity, exp);
- }
- }
- else
- {
- bool polarity = fact.getKind() != NOT;
- TNode atom = polarity ? fact : fact[0];
- assertPendingFact(atom, polarity, exp);
- }
+ InferInfo& ii = d_pending[i];
+ // At this point, ii should be a "fact", i.e. something whose conclusion
+ // should be added as a normal equality or predicate to the equality engine
+ // with no new external assumptions (ii.d_antn).
+ Assert(ii.isFact());
+ Node fact = ii.d_conc;
+ Node exp = utils::mkAnd(ii.d_ant);
+ Trace("strings-assert") << "(assert (=> " << exp << " " << fact
+ << ")) ; fact " << ii.d_id << std::endl;
+ // only keep stats if we process it here
+ Trace("strings-lemma") << "Strings::Fact: " << fact << " from " << exp
+ << " by " << ii.d_id << std::endl;
+ d_statistics.d_inferences << ii.d_id;
+ // assert it as a pending fact
+ bool polarity = fact.getKind() != NOT;
+ TNode atom = polarity ? fact : fact[0];
+ // no double negation or double (conjunctive) conclusions
+ Assert(atom.getKind() != NOT && atom.getKind() != AND);
+ assertPendingFact(atom, polarity, exp);
// Must reference count the equality and its explanation, which is not done
// by the equality engine. Notice that we do not need to do this for
// external assertions, which enter as facts through sendAssumption.
void InferenceManager::doPendingLemmas()
{
- if (!d_state.isInConflict())
+ if (d_state.isInConflict())
+ {
+ // just clear the pending vectors, nothing else to do
+ d_pendingLem.clear();
+ d_pendingReqPhase.clear();
+ return;
+ }
+ NodeManager* nm = NodeManager::currentNM();
+ for (unsigned i = 0, psize = d_pendingLem.size(); i < psize; i++)
{
- for (const Node& lc : d_pendingLem)
+ InferInfo& ii = d_pendingLem[i];
+ Assert(!ii.isTrivial());
+ Assert(!ii.isConflict());
+ // get the explanation
+ Node eqExp;
+ if (options::stringRExplainLemmas())
+ {
+ eqExp = mkExplain(ii.d_ant, ii.d_antn);
+ }
+ else
+ {
+ std::vector<Node> ev;
+ ev.insert(ev.end(), ii.d_ant.begin(), ii.d_ant.end());
+ ev.insert(ev.end(), ii.d_antn.begin(), ii.d_antn.end());
+ eqExp = utils::mkAnd(ev);
+ }
+ // make the lemma node
+ Node lem = ii.d_conc;
+ if (eqExp != d_true)
{
- Trace("strings-pending") << "Process pending lemma : " << lc << std::endl;
- ++(d_statistics.d_lemmasInfer);
- d_out.lemma(lc);
+ lem = nm->mkNode(IMPLIES, eqExp, lem);
}
- for (const std::pair<const Node, bool>& prp : d_pendingReqPhase)
+ Trace("strings-pending") << "Process pending lemma : " << lem << std::endl;
+ Trace("strings-assert")
+ << "(assert " << lem << ") ; lemma " << ii.d_id << std::endl;
+ Trace("strings-lemma") << "Strings::Lemma: " << lem << " by " << ii.d_id
+ << std::endl;
+ // only keep stats if we process it here
+ d_statistics.d_inferences << ii.d_id;
+ ++(d_statistics.d_lemmasInfer);
+ d_out.lemma(lem);
+
+ // Process the side effects of the inference info.
+ // Register the new skolems from this inference. We register them here
+ // (lazily), since this is the moment when we have decided to process the
+ // inference.
+ for (const std::pair<const LengthStatus, std::vector<Node> >& sks :
+ ii.d_new_skolem)
{
- Trace("strings-pending") << "Require phase : " << prp.first
- << ", polarity = " << prp.second << std::endl;
- d_out.requirePhase(prp.first, prp.second);
+ for (const Node& n : sks.second)
+ {
+ d_termReg.registerTermAtomic(n, sks.first);
+ }
}
}
+ // process the pending require phase calls
+ for (const std::pair<const Node, bool>& prp : d_pendingReqPhase)
+ {
+ Trace("strings-pending") << "Require phase : " << prp.first
+ << ", polarity = " << prp.second << std::endl;
+ d_out.requirePhase(prp.first, prp.second);
+ }
d_pendingLem.clear();
d_pendingReqPhase.clear();
}
namespace theory {
namespace strings {
-/**
- * A pending inference. This is a helper class to track an unprocessed call to
- * InferenceManager::sendInference that is waiting to be asserted as a fact to
- * the equality engine.
- */
-struct PendingInfer
-{
- PendingInfer(Inference i, Node fact, Node exp)
- : d_infer(i), d_fact(fact), d_exp(exp)
- {
- }
- ~PendingInfer() {}
- /** The inference identifier */
- Inference d_infer;
- /** The conclusion */
- Node d_fact;
- /**
- * Its explanation. This is a conjunction of literals that hold in the
- * equality engine in the current context.
- */
- Node d_exp;
-};
-
/** Inference Manager
*
* The purpose of this class is to process inference steps for strategies
* equality engine of the theory of strings. On the other hand, the set
* exp_n ("explanations new") contain nodes that are not explainable by the
* theory of strings. This method may call sendLemma or otherwise add a
- * PendingInfer to d_pending, indicating a fact should be asserted to the
+ * InferInfo to d_pending, indicating a fact should be asserted to the
* equality engine. Overall, the result of this method is one of the
* following:
*
* of each type.
*
* If the flag asLemma is true, then this method will send a lemma instead
- * of an inference whenever applicable.
+ * of a fact whenever applicable.
*/
void sendInference(const std::vector<Node>& exp,
const std::vector<Node>& exp_n,
/** Send inference
*
- * Makes the appropriate call to send inference based on the infer info
- * data structure (see sendInference documentation above).
+ * This implements the above methods for the InferInfo object. It is called
+ * by the methods above.
+ *
+ * The inference info ii should have a rewritten conclusion and should not be
+ * trivial (InferInfo::isTrivial). It is the responsibility of the caller to
+ * ensure this.
+ *
+ * If the flag asLemma is true, then this method will send a lemma instead
+ * of a fact whenever applicable.
*/
- void sendInference(const InferInfo& i);
+ void sendInference(const InferInfo& ii, bool asLemma = false);
/** Send split
*
* This requests that ( a = b V a != b ) is sent on the output channel as a
*
* This method is called to indicate this class should send a phase
* requirement request to the output channel for literal lit to be
- * decided with polarity pol.
+ * decided with polarity pol. This requirement is processed at the same time
+ * lemmas are sent on the output channel of this class during this call to
+ * check. This means if the current lemmas of this class are abandoned (due
+ * to a conflict), the phase requirement is not processed.
*/
void sendPhaseRequirement(Node lit, bool pol);
/**
* of atom, including calls to registerTerm.
*/
void assertPendingFact(Node atom, bool polarity, Node exp);
- /**
- * Indicates that ant => conc should be sent on the output channel of this
- * class. This will either trigger an immediate call to the conflict
- * method of the output channel of this class of conc is false, or adds the
- * above lemma to the lemma cache d_pending_lem, which may be flushed
- * later within the current call to TheoryStrings::check.
- *
- * The argument infer identifies the reason for inference, used for
- * debugging.
- */
- void sendLemma(Node ant, Node conc, Inference infer);
/** Reference to the solver state of the theory of strings. */
SolverState& d_state;
/** Reference to the term registry of theory of strings */
* The list of pending literals to assert to the equality engine along with
* their explanation.
*/
- std::vector<PendingInfer> d_pending;
+ std::vector<InferInfo> d_pending;
/** A map from literals to their pending phase requirement */
std::map<Node, bool> d_pendingReqPhase;
/** A list of pending lemmas to be sent on the output channel. */
- std::vector<Node> d_pendingLem;
+ std::vector<InferInfo> d_pendingLem;
/**
* The keep set of this class. This set is maintained to ensure that
* facts and their explanations are ref-counted. Since facts and their