Also updates its inference manager to not track stats since the standard ones are now used.
This also sets up some dependencies in the sygus extension which will be used to implement InferenceId for the sygus extension, to be done on a separate PR.
TheoryState& state,
ProofNodeManager* pnm)
: InferenceManagerBuffered(t, state, pnm, "theory::datatypes"),
- d_inferenceLemmas("theory::datatypes::inferenceLemmas"),
- d_inferenceFacts("theory::datatypes::inferenceFacts"),
- d_inferenceConflicts("theory::datatypes::inferenceConflicts"),
d_pnm(pnm),
d_ipc(pnm == nullptr ? nullptr
: new InferProofCons(state.getSatContext(), pnm)),
pnm, state.getUserContext(), "datatypes::lemPg"))
{
d_false = NodeManager::currentNM()->mkConst(false);
- smtStatisticsRegistry()->registerStat(&d_inferenceLemmas);
- smtStatisticsRegistry()->registerStat(&d_inferenceFacts);
- smtStatisticsRegistry()->registerStat(&d_inferenceConflicts);
}
InferenceManager::~InferenceManager()
{
- smtStatisticsRegistry()->unregisterStat(&d_inferenceLemmas);
- smtStatisticsRegistry()->unregisterStat(&d_inferenceFacts);
- smtStatisticsRegistry()->unregisterStat(&d_inferenceConflicts);
}
void InferenceManager::addPendingInference(Node conc,
+ InferenceId id,
Node exp,
- bool forceLemma,
- InferenceId i)
+ bool forceLemma)
{
// if we are forcing the inference to be processed as a lemma, or if the
// inference must be sent as a lemma based on the policy in
// mustCommunicateFact.
if (forceLemma || DatatypesInference::mustCommunicateFact(conc, exp))
{
- d_pendingLem.emplace_back(new DatatypesInference(this, conc, exp, i));
+ d_pendingLem.emplace_back(new DatatypesInference(this, conc, exp, id));
}
else
{
- d_pendingFact.emplace_back(new DatatypesInference(this, conc, exp, i));
+ d_pendingFact.emplace_back(new DatatypesInference(this, conc, exp, id));
}
}
return;
}
// otherwise send as a normal lemma
- if (lemma(lem, id, p))
- {
- d_inferenceLemmas << id;
- }
+ lemma(lem, id, p);
}
void InferenceManager::sendDtConflict(const std::vector<Node>& conf, InferenceId id)
prepareDtInference(d_false, exp, id, d_ipc.get());
}
conflictExp(id, conf, d_ipc.get());
- d_inferenceConflicts << id;
}
-bool InferenceManager::sendLemmas(const std::vector<Node>& lemmas)
+bool InferenceManager::sendLemmas(const std::vector<Node>& lemmas,
+ InferenceId id)
{
bool ret = false;
for (const Node& lem : lemmas)
{
- if (lemma(lem, InferenceId::UNKNOWN))
+ if (lemma(lem, id))
{
ret = true;
}
}
d_lemPg->setProofFor(lem, pn);
}
- // use trusted lemma
return TrustNode::mkTrustLemma(lem, d_lemPg.get());
}
#include "theory/datatypes/infer_proof_cons.h"
#include "theory/datatypes/inference.h"
#include "theory/inference_manager_buffered.h"
-#include "util/statistics_registry.h"
namespace CVC4 {
namespace theory {
*
* @param conc The conclusion of the inference
* @param exp The explanation of the inference
+ * @param id The inference, used for stats and as a hint for constructing
+ * the proof of (conc => exp)
* @param forceLemma Whether this inference *must* be processed as a lemma.
* Otherwise, it may be processed as a fact or lemma based on
* mustCommunicateFact.
- * @param i The inference, used for stats and as a hint for constructing
- * the proof of (conc => exp)
*/
void addPendingInference(Node conc,
+ InferenceId id,
Node exp,
- bool forceLemma = false,
- InferenceId i = InferenceId::UNKNOWN);
+ bool forceLemma = false);
/**
* Process the current lemmas and facts. This is a custom method that can
* be seen as overriding the behavior of calling both doPendingLemmas and
* Send lemma immediately on the output channel
*/
void sendDtLemma(Node lem,
- InferenceId i = InferenceId::UNKNOWN,
+ InferenceId id,
LemmaProperty p = LemmaProperty::NONE);
/**
* Send conflict immediately on the output channel
*/
- void sendDtConflict(const std::vector<Node>& conf, InferenceId i = InferenceId::UNKNOWN);
+ void sendDtConflict(const std::vector<Node>& conf, InferenceId id);
/**
* Send lemmas with property NONE on the output channel immediately.
* Returns true if any lemma was sent.
*/
- bool sendLemmas(const std::vector<Node>& lemmas);
+ bool sendLemmas(const std::vector<Node>& lemmas, InferenceId id);
private:
/** Are proofs enabled? */
Node prepareDtInference(Node conc, Node exp, InferenceId id, InferProofCons* ipc);
/** The false node */
Node d_false;
- /**
- * Counts the number of applications of each type of inference processed by
- * the above method as facts, lemmas and conflicts.
- */
- HistogramStat<InferenceId> d_inferenceLemmas;
- HistogramStat<InferenceId> d_inferenceFacts;
- HistogramStat<InferenceId> d_inferenceConflicts;
/** Pointer to the proof node manager */
ProofNodeManager* d_pnm;
/** The inference to proof converter */
#include "options/quantifiers_options.h"
#include "printer/printer.h"
#include "theory/datatypes/sygus_datatype_utils.h"
-#include "theory/datatypes/theory_datatypes.h"
#include "theory/datatypes/theory_datatypes_utils.h"
#include "theory/quantifiers/sygus/sygus_explain.h"
#include "theory/quantifiers/sygus/term_database_sygus.h"
#include "theory/quantifiers/term_util.h"
-#include "theory/quantifiers_engine.h"
#include "theory/theory_model.h"
using namespace CVC4;
using namespace CVC4::theory;
using namespace CVC4::theory::datatypes;
-SygusExtension::SygusExtension(TheoryDatatypes* td,
- QuantifiersEngine* qe,
- context::Context* c)
- : d_td(td),
- d_tds(qe->getTermDatabaseSygus()),
- d_ssb(qe),
- d_testers(c),
- d_testers_exp(c),
- d_active_terms(c),
- d_currTermSize(c)
+SygusExtension::SygusExtension(TheoryState& s,
+ InferenceManager& im,
+ quantifiers::TermDbSygus* tds,
+ DecisionManager* dm)
+ : d_state(s),
+ d_im(im),
+ d_tds(tds),
+ d_dm(dm),
+ d_ssb(tds),
+ d_testers(s.getSatContext()),
+ d_testers_exp(s.getSatContext()),
+ d_active_terms(s.getSatContext()),
+ d_currTermSize(s.getSatContext())
{
d_zero = NodeManager::currentNM()->mkConst(Rational(0));
d_true = NodeManager::currentNM()->mkConst(true);
d_anchor_to_ag_strategy[e].reset(
new DecisionStrategySingleton("sygus_enum_active",
ag,
- d_td->getSatContext(),
- d_td->getValuation()));
+ d_state.getSatContext(),
+ d_state.getValuation()));
}
- d_td->getDecisionManager()->registerStrategy(
- DecisionManager::STRAT_DT_SYGUS_ENUM_ACTIVE,
- d_anchor_to_ag_strategy[e].get());
+ d_dm->registerStrategy(DecisionManager::STRAT_DT_SYGUS_ENUM_ACTIVE,
+ d_anchor_to_ag_strategy[e].get());
}
Node m;
if (!ag.isNull())
if( it==d_szinfo.end() ){
Trace("sygus-sb") << "Sygus : register measure term : " << m << std::endl;
d_szinfo[m].reset(new SygusSizeDecisionStrategy(
- m, d_td->getSatContext(), d_td->getValuation()));
+ m, d_state.getSatContext(), d_state.getValuation()));
// register this as a decision strategy
- d_td->getDecisionManager()->registerStrategy(
- DecisionManager::STRAT_DT_SYGUS_ENUM_SIZE, d_szinfo[m].get());
+ d_dm->registerStrategy(DecisionManager::STRAT_DT_SYGUS_ENUM_SIZE,
+ d_szinfo[m].get());
}
}
Trace("dt-sygus-debug") << "Checking model value of " << prog << "..."
<< std::endl;
Assert(prog.getType().isDatatype());
- Node progv = d_td->getValuation().getModel()->getValue( prog );
+ Node progv = d_state.getValuation().getModel()->getValue(prog);
if (Trace.isOn("dt-sygus"))
{
Trace("dt-sygus") << "* DT model : " << prog << " -> ";
if (options::sygusFair() == options::SygusFairMode::DT_SIZE)
{
Node prog_sz = NodeManager::currentNM()->mkNode( kind::DT_SIZE, prog );
- Node prog_szv = d_td->getValuation().getModel()->getValue( prog_sz );
+ Node prog_szv = d_state.getValuation().getModel()->getValue(prog_sz);
Node progv_sz = NodeManager::currentNM()->mkNode( kind::DT_SIZE, progv );
Trace("sygus-sb") << " Mv[" << prog << "] = " << progv << ", size = " << prog_szv << std::endl;
if (Trace.isOn("sygus-sb-check-value"))
{
Node prog_sz = nm->mkNode(DT_SIZE, n);
- Node prog_szv = d_td->getValuation().getModel()->getValue( prog_sz );
+ Node prog_szv = d_state.getValuation().getModel()->getValue(prog_sz);
for( int i=0; i<ind; i++ ){
Trace("sygus-sb-check-value") << " ";
}
// ensure that the expected size bound is met
int cindex = utils::indexOf(vn.getOperator());
Node tst = utils::mkTester(n, cindex, dt);
- bool hastst = d_td->getEqualityEngine()->hasTerm(tst);
+ bool hastst = d_state.getEqualityEngine()->hasTerm(tst);
Node tstrep;
if (hastst)
{
- tstrep = d_td->getEqualityEngine()->getRepresentative(tst);
+ tstrep = d_state.getEqualityEngine()->getRepresentative(tst);
}
if (!hastst || tstrep != d_true)
{
int SygusExtension::getGuardStatus( Node g ) {
bool value;
- if( d_td->getValuation().hasSatValue( g, value ) ) {
+ if (d_state.getValuation().hasSatValue(g, value))
+ {
if( value ){
return 1;
}else{
#include "expr/dtype.h"
#include "expr/node.h"
#include "theory/datatypes/sygus_simple_sym.h"
+#include "theory/decision_manager.h"
#include "theory/quantifiers/sygus/sygus_explain.h"
#include "theory/quantifiers/sygus/synth_conjecture.h"
#include "theory/quantifiers/sygus_sampler.h"
namespace theory {
namespace datatypes {
-class TheoryDatatypes;
+class InferenceManager;
/**
* This is the sygus extension of the decision procedure for quantifier-free
typedef context::CDHashSet<Node, NodeHashFunction> NodeSet;
public:
- SygusExtension(TheoryDatatypes* td,
- QuantifiersEngine* qe,
- context::Context* c);
+ SygusExtension(TheoryState& s,
+ InferenceManager& im,
+ quantifiers::TermDbSygus* tds,
+ DecisionManager* dm);
~SygusExtension();
/**
* Notify this class that tester for constructor tindex has been asserted for
*/
void check(std::vector<Node>& lemmas);
private:
- /** Pointer to the datatype theory that owns this class. */
- TheoryDatatypes* d_td;
+ /** The theory state of the datatype theory */
+ TheoryState& d_state;
+ /** The inference manager of the datatype theory */
+ InferenceManager& d_im;
/** Pointer to the sygus term database */
quantifiers::TermDbSygus* d_tds;
+ /** Pointer to the decision manager */
+ DecisionManager* d_dm;
/** the simple symmetry breaking utility */
SygusSimpleSymBreak d_ssb;
/**
#include "theory/datatypes/sygus_simple_sym.h"
#include "theory/quantifiers/term_util.h"
-#include "theory/quantifiers_engine.h"
using namespace std;
using namespace CVC4::kind;
namespace theory {
namespace datatypes {
-SygusSimpleSymBreak::SygusSimpleSymBreak(QuantifiersEngine* qe)
- : d_tds(qe->getTermDatabaseSygus())
+SygusSimpleSymBreak::SygusSimpleSymBreak(quantifiers::TermDbSygus* tds)
+ : d_tds(tds)
{
}
class SygusSimpleSymBreak
{
public:
- SygusSimpleSymBreak(QuantifiersEngine* qe);
+ SygusSimpleSymBreak(quantifiers::TermDbSygus* tds);
~SygusSimpleSymBreak() {}
/** consider argument kind
*
// this is not done.
if (getQuantifiersEngine() && options::sygus())
{
+ quantifiers::TermDbSygus* tds =
+ getQuantifiersEngine()->getTermDatabaseSygus();
d_sygusExtension.reset(
- new SygusExtension(this, getQuantifiersEngine(), getSatContext()));
+ new SygusExtension(d_state, d_im, tds, getDecisionManager()));
// do congruence on evaluation functions
d_equalityEngine->addFunctionKind(kind::DT_SYGUS_EVAL);
}
Assert(d_sygusExtension != nullptr);
std::vector<Node> lemmas;
d_sygusExtension->check(lemmas);
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
return;
}
else if (level == EFFORT_FULL && !d_state.isInConflict()
assumptions.push_back(assumption);
Node lemma = assumptions.size()==1 ? assumptions[0] : NodeManager::currentNM()->mkNode( OR, assumptions );
Trace("dt-singleton") << "*************Singleton equality lemma " << lemma << std::endl;
- d_im.lemma(lemma, InferenceId::UNKNOWN);
+ d_im.lemma(lemma, InferenceId::DATATYPES_REC_SINGLETON_EQ);
}
}
}else{
//this may not be necessary?
//if only one constructor, then this term must be this constructor
Node t = utils::mkTester(n, 0, dt);
- d_im.addPendingInference(t, d_true, false, InferenceId::DATATYPES_SPLIT);
+ d_im.addPendingInference(
+ t, InferenceId::DATATYPES_SPLIT, d_true);
Trace("datatypes-infer") << "DtInfer : 1-cons (full) : " << t << std::endl;
}else{
Assert(consIndex != -1 || dt.isSygus());
NodeBuilder<> nb(kind::OR);
nb << test << test.notNode();
Node lemma = nb;
- d_im.lemma(lemma, InferenceId::UNKNOWN);
+ d_im.lemma(lemma, InferenceId::DATATYPES_BINARY_SPLIT);
d_out->requirePhase( test, true );
}else{
Trace("dt-split") << "*************Split for constructors on " << n << endl;
{
std::vector< Node > lemmas;
d_sygusExtension->assertFact(atom, polarity, lemmas);
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
}
//add to tester if applicable
Node t_arg;
std::vector< Node > lemmas;
d_sygusExtension->assertTester(tindex, t_arg, atom, lemmas);
Trace("dt-tester") << "Done assert tester to sygus." << std::endl;
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
}
}
}else{
{
std::vector< Node > lemmas;
d_sygusExtension->preRegisterTerm(n, lemmas);
- d_im.sendLemmas(lemmas);
+ d_im.sendLemmas(lemmas, InferenceId::UNKNOWN);
}
break;
}
for( int i=0; i<(int)cons1.getNumChildren(); i++ ) {
if( !areEqual( cons1[i], cons2[i] ) ){
Node eq = cons1[i].eqNode( cons2[i] );
- d_im.addPendingInference(eq, unifEq, false, InferenceId::DATATYPES_UNIF);
+ d_im.addPendingInference(
+ eq, InferenceId::DATATYPES_UNIF, unifEq);
Trace("datatypes-infer") << "DtInfer : cons-inj : " << eq << " by " << unifEq << std::endl;
}
}
d_term_sk[n] = k;
Node eq = k.eqNode( n );
Trace("datatypes-infer") << "DtInfer : ref : " << eq << std::endl;
- d_im.addPendingLemma(eq, InferenceId::UNKNOWN);
+ d_im.addPendingLemma(eq, InferenceId::DATATYPES_PURIFY);
return k;
}else{
return (*it).second;
: utils::mkTester(t_arg, testerIndex, dt);
Node t_concl_exp = ( nb.getNumChildren() == 1 ) ? nb.getChild( 0 ) : nb;
d_im.addPendingInference(
- t_concl, t_concl_exp, false, InferenceId::DATATYPES_LABEL_EXH);
+ t_concl, InferenceId::DATATYPES_LABEL_EXH, t_concl_exp);
Trace("datatypes-infer") << "DtInfer : label : " << t_concl << " by " << t_concl_exp << std::endl;
return;
}
bool forceLemma = !s.getType().isDatatype();
Trace("datatypes-infer") << "DtInfer : collapse sel";
Trace("datatypes-infer") << " : " << eq << " by " << eq_exp << std::endl;
- d_im.addPendingInference(eq, eq_exp, forceLemma, InferenceId::DATATYPES_COLLAPSE_SEL);
+ d_im.addPendingInference(
+ eq, InferenceId::DATATYPES_COLLAPSE_SEL, eq_exp, forceLemma);
}
}
}
Node v2 = NodeManager::currentNM()->mkSkolem( "k2", tn );
a = v1.eqNode( v2 ).negate();
//send out immediately as lemma
- d_im.lemma(a, InferenceId::UNKNOWN);
+ d_im.lemma(a, InferenceId::DATATYPES_REC_SINGLETON_FORCE_DEQ);
Trace("dt-singleton") << "******** assert " << a << " to avoid singleton cardinality for type " << tn << std::endl;
}
d_singleton_lemma[index][tn] = a;
Node lem = nm->mkNode(LEQ, d_zero, n);
Trace("datatypes-infer")
<< "DtInfer : size geq zero : " << lem << std::endl;
- d_im.addPendingLemma(lem, InferenceId::UNKNOWN);
+ d_im.addPendingLemma(lem, InferenceId::DATATYPES_SIZE_POS);
}
else if (nk == DT_HEIGHT_BOUND && n[1].getConst<Rational>().isZero())
{
: nm->mkNode(OR, children));
}
Trace("datatypes-infer") << "DtInfer : zero height : " << lem << std::endl;
- d_im.addPendingLemma(lem, InferenceId::UNKNOWN);
+ d_im.addPendingLemma(lem, InferenceId::DATATYPES_HEIGHT_ZERO);
}
}
}
Trace("datatypes-infer-debug") << "DtInstantiate : " << eqc << " " << eq
<< " forceLemma = " << forceLemma << std::endl;
- d_im.addPendingInference(eq, exp, forceLemma, InferenceId::DATATYPES_INST);
+ d_im.addPendingInference(eq, InferenceId::DATATYPES_INST, exp, forceLemma);
Trace("datatypes-infer") << "DtInfer : instantiate : " << eq << " by " << exp
<< std::endl;
}
Trace("dt-cdt") << std::endl;
Node eq = part_out[i][0].eqNode( part_out[i][j] );
Node eqExp = NodeManager::currentNM()->mkAnd(exp);
- d_im.addPendingInference(eq, eqExp, false, InferenceId::DATATYPES_BISIMILAR);
+ d_im.addPendingInference(eq, InferenceId::DATATYPES_BISIMILAR, eqExp);
Trace("datatypes-infer") << "DtInfer : cdt-bisimilar : " << eq << " by " << eqExp << std::endl;
}
}
case InferenceId::BAG_DIFFERENCE_REMOVE: return "BAG_DIFFERENCE_REMOVE";
case InferenceId::BAG_DUPLICATE_REMOVAL: return "BAG_DUPLICATE_REMOVAL";
- case InferenceId::DATATYPES_UNIF: return "UNIF";
- case InferenceId::DATATYPES_INST: return "INST";
- case InferenceId::DATATYPES_SPLIT: return "SPLIT";
- case InferenceId::DATATYPES_LABEL_EXH: return "LABEL_EXH";
- case InferenceId::DATATYPES_COLLAPSE_SEL: return "COLLAPSE_SEL";
- case InferenceId::DATATYPES_CLASH_CONFLICT: return "CLASH_CONFLICT";
- case InferenceId::DATATYPES_TESTER_CONFLICT: return "TESTER_CONFLICT";
- case InferenceId::DATATYPES_TESTER_MERGE_CONFLICT: return "TESTER_MERGE_CONFLICT";
- case InferenceId::DATATYPES_BISIMILAR: return "BISIMILAR";
- case InferenceId::DATATYPES_CYCLE: return "CYCLE";
+ case InferenceId::DATATYPES_PURIFY: return "DATATYPES_PURIFY";
+ case InferenceId::DATATYPES_UNIF: return "DATATYPES_UNIF";
+ case InferenceId::DATATYPES_INST: return "DATATYPES_INST";
+ case InferenceId::DATATYPES_SPLIT: return "DATATYPES_SPLIT";
+ case InferenceId::DATATYPES_BINARY_SPLIT: return "DATATYPES_BINARY_SPLIT";
+ case InferenceId::DATATYPES_LABEL_EXH: return "DATATYPES_LABEL_EXH";
+ case InferenceId::DATATYPES_COLLAPSE_SEL: return "DATATYPES_COLLAPSE_SEL";
+ case InferenceId::DATATYPES_CLASH_CONFLICT:
+ return "DATATYPES_CLASH_CONFLICT";
+ case InferenceId::DATATYPES_TESTER_CONFLICT:
+ return "DATATYPES_TESTER_CONFLICT";
+ case InferenceId::DATATYPES_TESTER_MERGE_CONFLICT:
+ return "DATATYPES_TESTER_MERGE_CONFLICT";
+ case InferenceId::DATATYPES_BISIMILAR: return "DATATYPES_BISIMILAR";
+ case InferenceId::DATATYPES_REC_SINGLETON_EQ:
+ return "DATATYPES_REC_SINGLETON_EQ";
+ case InferenceId::DATATYPES_REC_SINGLETON_FORCE_DEQ:
+ return "DATATYPES_REC_SINGLETON_FORCE_DEQ";
+ case InferenceId::DATATYPES_CYCLE: return "DATATYPES_CYCLE";
+ case InferenceId::DATATYPES_SIZE_POS: return "DATATYPES_SIZE_POS";
+ case InferenceId::DATATYPES_HEIGHT_ZERO: return "DATATYPES_HEIGHT_ZERO";
case InferenceId::SEP_PTO_NEG_PROP: return "SEP_PTO_NEG_PROP";
case InferenceId::SEP_PTO_PROP: return "SEP_PTO_PROP";
// ---------------------------------- end bitvector theory
// ---------------------------------- datatypes theory
+ // (= k t) for fresh k
+ DATATYPES_PURIFY,
// (= (C t1 ... tn) (C s1 .. sn)) => (= ti si)
DATATYPES_UNIF,
// ((_ is Ci) t) => (= t (Ci (sel_1 t) ... (sel_n t)))
DATATYPES_INST,
// (or ((_ is C1) t) V ... V ((_ is Cn) t))
DATATYPES_SPLIT,
+ // (or ((_ is Ci) t) V (not ((_ is Ci) t)))
+ DATATYPES_BINARY_SPLIT,
// (not ((_ is C1) t)) ^ ... [j] ... ^ (not ((_ is Cn) t)) => ((_ is Cj) t)
DATATYPES_LABEL_EXH,
// (= t (Ci t1 ... tn)) => (= (sel_j t) rewrite((sel_j (Ci t1 ... tn))))
DATATYPES_TESTER_MERGE_CONFLICT,
// bisimilarity for codatatypes
DATATYPES_BISIMILAR,
+ // corecursive singleton equality
+ DATATYPES_REC_SINGLETON_EQ,
+ // corecursive singleton equality (not (= k1 k2)) for fresh k1, k2
+ DATATYPES_REC_SINGLETON_FORCE_DEQ,
// cycle conflict for datatypes
DATATYPES_CYCLE,
+ //-------------------- datatypes size/height
+ // (>= (dt.size t) 0)
+ DATATYPES_SIZE_POS,
+ // (=> (= (dt.height t) 0) => (and (= (dt.height (sel_1 t)) 0) .... ))
+ DATATYPES_HEIGHT_ZERO,
// ---------------------------------- end datatypes theory
// ---------------------------------- sep theory