This commit adds tracking of Farkas coefficients to proof enabled builds in the theory of linear
real arithmetic when proofs are enabled. There could be some performance changes due to subtly
different search paths being taken.
Additional bug fixes:
- Polynomial::exactDivide did not satisfy preconditions to the Monomial constructor.
To prevent future problems, Monomials should now be made via one of the mkMonomial functions.
- Fixes a bug in SumOfInfeasibilitiesSPD::greedyConflictSubsets().
There was a way to use a row twice in the construction of the conflicts.
This was violating an assumption in the Tableau when constructing the intermediate rows.
Constraints:
- To enable proofs, all conflicts and propagations are designed to go through the Constraint system
before they are converted to externally understandable conflicts and propagations in the form
of Node.
- Constraints must now be given a reason for marking them as true that corresponds to a proof.
- Constraints should now be marked as being true by one of the impliedbyX functions.
- Each impliedByX function has an ArithProofType associated with it.
- Each call to an impliedByX function stores a context dependent ConstraintRule object
to track the proof.
- After marking the node as true the caller should either try to propagate the constraint or raise
a conflict.
- There are no more special cases for marking a node as being true when its negation has a proof
vs. when the negation does not have a proof. One must now explicitly pass in a inConflict flag
to the impliedByX (and similar functions).
For example,this is now longer both:
void setAssertedToTheTheory(TNode witness);
void setAssertedToTheTheoryWithNegationTrue(TNode witness);
There is just:
void setAssertedToTheTheory(TNode witness, bool inConflict);
#include "theory/arith/arithvar.h"
#include <limits>
+#include <set>
namespace CVC4 {
namespace theory {
const ArithVar ARITHVAR_SENTINEL = std::numeric_limits<ArithVar>::max();
+bool debugIsASet(const std::vector<ArithVar>& variables){
+ std::set<ArithVar> asSet(variables.begin(), variables.end());
+ return asSet.size() == variables.size();
+}
+
}/* CVC4::theory::arith namespace */
}/* CVC4::theory namespace */
}/* CVC4 namespace */
typedef std::pair<ArithVar, Rational> ArithRatPair;
typedef std::vector< ArithRatPair > ArithRatPairVec;
+extern bool debugIsASet(const ArithVarVec& variables);
+
+
}/* CVC4::theory::arith namespace */
}/* CVC4::theory namespace */
}/* CVC4 namespace */
d_ta.signal(x);
}
-RaiseConflict::RaiseConflict(TheoryArithPrivate& ta, ConstraintCPVec& buf )
+RaiseConflict::RaiseConflict(TheoryArithPrivate& ta)
: d_ta(ta)
- , d_construction(buf)
{}
+void RaiseConflict::raiseConflict(ConstraintCP c) const{
+ Assert(c->inConflict());
+ d_ta.raiseConflict(c);
+}
+
+FarkasConflictBuilder::FarkasConflictBuilder()
+ : d_farkas()
+ , d_constraints()
+ , d_consequent(NullConstraint)
+ , d_consequentSet(false)
+{
+ reset();
+}
+
+bool FarkasConflictBuilder::underConstruction() const{
+ return d_consequent != NullConstraint;
+}
+
+bool FarkasConflictBuilder::consequentIsSet() const{
+ return d_consequentSet;
+}
+
+void FarkasConflictBuilder::reset(){
+ d_consequent = NullConstraint;
+ d_constraints.clear();
+ d_consequentSet = false;
+ PROOF(d_farkas.clear());
+ Assert(!underConstruction());
+}
+
/* Adds a constraint to the constraint under construction. */
-void RaiseConflict::addConstraint(ConstraintCP c){
- d_construction.push_back(c);
+void FarkasConflictBuilder::addConstraint(ConstraintCP c, const Rational& fc){
+ Assert(!PROOF_ON() ||
+ (!underConstruction() && d_constraints.empty() && d_farkas.empty()) ||
+ (underConstruction() && d_constraints.size() + 1 == d_farkas.size()));
+ Assert(PROOF_ON() || d_farkas.empty());
+ Assert(c->isTrue());
+
+
+ if(d_consequent == NullConstraint){
+ d_consequent = c;
+ } else {
+ d_constraints.push_back(c);
+ }
+ PROOF(d_farkas.push_back(fc););
+ Assert(!PROOF_ON() || d_constraints.size() + 1 == d_farkas.size());
+ Assert(PROOF_ON() || d_farkas.empty());
}
-/* Turns the vector under construction into a conflict */
-void RaiseConflict::commitConflict(){
- Assert(!d_construction.empty());
- sendConflict(d_construction);
- d_construction.clear();
+
+void FarkasConflictBuilder::addConstraint(ConstraintCP c, const Rational& fc, const Rational& mult){
+ Assert(!mult.isZero());
+ if(PROOF_ON() && !mult.isOne()){
+ Rational prod = fc * mult;
+ addConstraint(c, prod);
+ }else{
+ addConstraint(c, fc);
+ }
}
-void RaiseConflict::sendConflict(const ConstraintCPVec& vec){
- d_ta.raiseConflict(vec);
+void FarkasConflictBuilder::makeLastConsequent(){
+ Assert(!d_consequentSet);
+ Assert(underConstruction());
+
+ if(d_constraints.empty()){
+ // no-op
+ d_consequentSet = true;
+ } else {
+ Assert(d_consequent != NullConstraint);
+ ConstraintCP last = d_constraints.back();
+ d_constraints.back() = d_consequent;
+ d_consequent = last;
+ PROOF( std::swap( d_farkas.front(), d_farkas.back() ) );
+ d_consequentSet = true;
+ }
+
+ Assert(! d_consequent->negationHasProof() );
+ Assert(d_consequentSet);
}
+/* Turns the vector under construction into a conflict */
+ConstraintCP FarkasConflictBuilder::commitConflict(){
+ Assert(underConstruction());
+ Assert(!d_constraints.empty());
+ Assert(!PROOF_ON() ||
+ (!underConstruction() && d_constraints.empty() && d_farkas.empty()) ||
+ (underConstruction() && d_constraints.size() + 1 == d_farkas.size()));
+ Assert(PROOF_ON() || d_farkas.empty());
+ Assert(d_consequentSet);
+
+ ConstraintP not_c = d_consequent->getNegation();
+ RationalVectorCP coeffs = NULLPROOF(&d_farkas);
+ not_c->impliedByFarkas(d_constraints, coeffs, true );
+
+ reset();
+ Assert(!underConstruction());
+ Assert( not_c->inConflict() );
+ Assert(!d_consequentSet);
+ return not_c;
+}
+
+RaiseEqualityEngineConflict::RaiseEqualityEngineConflict(TheoryArithPrivate& ta)
+ : d_ta(ta)
+{}
+
/* If you are not an equality engine, don't use this! */
-void RaiseConflict::blackBoxConflict(Node n){
- d_ta.blackBoxConflict(n);
+void RaiseEqualityEngineConflict::raiseEEConflict(Node n) const{
+ d_ta.raiseBlackBoxConflict(n);
}
class RaiseConflict {
private:
TheoryArithPrivate& d_ta;
- ConstraintCPVec& d_construction;
public:
- RaiseConflict(TheoryArithPrivate& ta, ConstraintCPVec& d_construction);
+ RaiseConflict(TheoryArithPrivate& ta);
+
+ /** Calls d_ta.raiseConflict(c) */
+ void raiseConflict(ConstraintCP c) const;
+};
+
+class FarkasConflictBuilder {
+private:
+ RationalVector d_farkas;
+ ConstraintCPVec d_constraints;
+ ConstraintCP d_consequent;
+ bool d_consequentSet;
+public:
+
+ /**
+ * Constructs a new FarkasConflictBuilder.
+ */
+ FarkasConflictBuilder();
+
+ /**
+ * Adds an antecedent constraint to the conflict under construction
+ * with the farkas coefficient fc * mult.
+ *
+ * The value mult is either 1 or -1.
+ */
+ void addConstraint(ConstraintCP c, const Rational& fc, const Rational& mult);
+
+ /**
+ * Adds an antecedent constraint to the conflict under construction
+ * with the farkas coefficient fc.
+ */
+ void addConstraint(ConstraintCP c, const Rational& fc);
+
+ /**
+ * Makes the last constraint added the consequent.
+ * Can be done exactly once per reset().
+ */
+ void makeLastConsequent();
+
+ /**
+ * Turns the antecendents into a proof of the negation of one of the
+ * antecedents.
+ *
+ * The buffer is no longer underConstruction afterwards.
+ *
+ * precondition:
+ * - At least two constraints have been asserted.
+ * - makeLastConsequent() has been called.
+ *
+ * postcondition: The returned constraint is in conflict.
+ */
+ ConstraintCP commitConflict();
+
+ /** Returns true if a conflict has been pushed back since the last reset. */
+ bool underConstruction() const;
+
+ /** Returns true if the consequent has been set since the last reset. */
+ bool consequentIsSet() const;
+
+ /** Resets the state of the buffer. */
+ void reset();
+};
- /* Adds a constraint to the constraint under construction. */
- void addConstraint(ConstraintCP c);
- /* Turns the vector under construction into a conflict */
- void commitConflict();
- void sendConflict(const ConstraintCPVec& vec);
+class RaiseEqualityEngineConflict {
+private:
+ TheoryArithPrivate& d_ta;
+
+public:
+ RaiseEqualityEngineConflict(TheoryArithPrivate& ta);
/* If you are not an equality engine, don't use this! */
- void blackBoxConflict(Node n);
+ void raiseEEConflict(Node n) const;
};
class BoundCountingLookup {
namespace theory {
namespace arith {
-ArithCongruenceManager::ArithCongruenceManager(context::Context* c, ConstraintDatabase& cd, SetupLiteralCallBack setup, const ArithVariables& avars, RaiseConflict raiseConflict)
+ArithCongruenceManager::ArithCongruenceManager(context::Context* c, ConstraintDatabase& cd, SetupLiteralCallBack setup, const ArithVariables& avars, RaiseEqualityEngineConflict raiseConflict)
: d_inConflict(c),
d_raiseConflict(raiseConflict),
d_notify(*this),
Assert(!inConflict());
Debug("arith::conflict") << "difference manager conflict " << conflict << std::endl;
d_inConflict.raise();
- d_raiseConflict.blackBoxConflict(conflict);
+ d_raiseConflict.raiseEEConflict(conflict);
}
bool ArithCongruenceManager::inConflict() const{
return d_inConflict.isRaised();
++(d_statistics.d_watchedVariableIsZero);
ArithVar s = lb->getVariable();
- Node reason = Constraint_::externalExplainByAssertions(lb,ub);
+ Node reason = Constraint::externalExplainByAssertions(lb,ub);
d_keepAlive.push_back(reason);
assertionToEqualityEngine(true, s, reason);
<< ub << std::endl;
ArithVar x = lb->getVariable();
- Node reason = Constraint_::externalExplainByAssertions(lb,ub);
+ Node reason = Constraint::externalExplainByAssertions(lb,ub);
Node xAsNode = d_avariables.asNode(x);
Node asRational = mkRationalNode(lb->getValue().getNoninfinitesimalPart());
class ArithCongruenceManager {
private:
context::CDRaised d_inConflict;
- RaiseConflict d_raiseConflict;
+ RaiseEqualityEngineConflict d_raiseConflict;
/**
* The set of ArithVars equivalent to a pair of terms.
public:
- ArithCongruenceManager(context::Context* satContext, ConstraintDatabase&, SetupLiteralCallBack, const ArithVariables&, RaiseConflict raiseConflict);
+ ArithCongruenceManager(context::Context* satContext, ConstraintDatabase&, SetupLiteralCallBack, const ArithVariables&, RaiseEqualityEngineConflict raiseConflict);
Node explain(TNode literal);
void explain(TNode lit, NodeBuilder<>& out);
#include "theory/arith/arith_utilities.h"
#include "theory/arith/normal_form.h"
+#include "proof/proof.h"
+
#include <ostream>
#include <algorithm>
}
}
-Constraint_::Constraint_(ArithVar x, ConstraintType t, const DeltaRational& v)
+Constraint::Constraint(ArithVar x, ConstraintType t, const DeltaRational& v)
: d_variable(x),
d_type(t),
d_value(v),
d_canBePropagated(false),
d_assertionOrder(AssertionOrderSentinel),
d_witness(TNode::null()),
- d_proof(ProofIdSentinel),
+ d_crid(ConstraintRuleIdSentinel),
d_split(false),
d_variablePosition()
{
}
+std::ostream& operator<<(std::ostream& o, const ArithProofType apt){
+ switch(apt){
+ case NoAP: o << "NoAP"; break;
+ case AssumeAP: o << "AssumeAP"; break;
+ case InternalAssumeAP: o << "InternalAssumeAP"; break;
+ case FarkasAP: o << "FarkasAP"; break;
+ case TrichotomyAP: o << "TrichotomyAP"; break;
+ case EqualityEngineAP: o << "EqualityEngineAP"; break;
+ case IntHoleAP: o << "IntHoleAP"; break;
+ default: break;
+ }
+ return o;
+}
+
+std::ostream& operator<<(std::ostream& o, const ConstraintCP c){
+ if(c == NullConstraint){
+ return o << "NullConstraint";
+ }else{
+ return o << *c;
+ }
+}
+
std::ostream& operator<<(std::ostream& o, const ConstraintP c){
if(c == NullConstraint){
return o << "NullConstraint";
}
}
-std::ostream& operator<<(std::ostream& o, const Constraint_& c){
+std::ostream& operator<<(std::ostream& o, const Constraint& c){
o << c.getVariable() << ' ' << c.getType() << ' ' << c.getValue();
if(c.hasLiteral()){
o << "(node " << c.getLiteral() << ')';
return o;
}
-void Constraint_::debugPrint() const {
+void Constraint::debugPrint() const {
Message() << *this << endl;
}
}
}
-bool Constraint_::initialized() const {
+bool Constraint::initialized() const {
return d_database != NULL;
}
-void Constraint_::initialize(ConstraintDatabase* db, SortedConstraintMapIterator v, ConstraintP negation){
+const ConstraintDatabase& Constraint::getDatabase() const{
+ Assert(initialized());
+ return *d_database;
+}
+
+void Constraint::initialize(ConstraintDatabase* db, SortedConstraintMapIterator v, ConstraintP negation){
Assert(!initialized());
d_database = db;
d_variablePosition = v;
d_negation = negation;
}
-Constraint_::~Constraint_() {
- Assert(safeToGarbageCollect());
+Constraint::~Constraint() {
+ // Call this instead of safeToGarbageCollect()
+ Assert(!contextDependentDataIsSet());
if(initialized()){
ValueCollection& vc = d_variablePosition->second;
}
}
-const ValueCollection& Constraint_::getValueCollection() const{
+const ConstraintRule& Constraint::getConstraintRule() const {
+ Assert(hasProof());
+ return d_database->d_watches->d_constraintProofs[d_crid];
+}
+
+const ValueCollection& Constraint::getValueCollection() const{
return d_variablePosition->second;
}
-ConstraintP Constraint_::getCeiling() {
+
+ConstraintP Constraint::getCeiling() {
Debug("getCeiling") << "Constraint_::getCeiling on " << *this << endl;
Assert(getValue().getInfinitesimalPart().sgn() > 0);
return d_database->getConstraint(getVariable(), getType(), ceiling);
}
-ConstraintP Constraint_::getFloor() {
+ConstraintP Constraint::getFloor() {
Assert(getValue().getInfinitesimalPart().sgn() < 0);
DeltaRational floor(Rational(getValue().floor()));
return d_database->getConstraint(getVariable(), getType(), floor);
}
-void Constraint_::setCanBePropagated() {
+void Constraint::setCanBePropagated() {
Assert(!canBePropagated());
d_database->pushCanBePropagatedWatch(this);
}
-void Constraint_::setAssertedToTheTheoryWithNegationTrue(TNode witness) {
+void Constraint::setAssertedToTheTheory(TNode witness, bool nowInConflict) {
Assert(hasLiteral());
Assert(!assertedToTheTheory());
- Assert(d_negation->hasProof());
+ Assert(negationHasProof() == nowInConflict);
d_database->pushAssertionOrderWatch(this, witness);
-}
-void Constraint_::setAssertedToTheTheory(TNode witness) {
- Assert(hasLiteral());
- Assert(!assertedToTheTheory());
- Assert(!d_negation->assertedToTheTheory());
- d_database->pushAssertionOrderWatch(this, witness);
+ if(Debug.isOn("constraint::conflictCommit") && nowInConflict ){
+ Debug("constraint::conflictCommit") << "inConflict@setAssertedToTheTheory";
+ Debug("constraint::conflictCommit") << "\t" << this << std::endl;
+ Debug("constraint::conflictCommit") << "\t" << getNegation() << std::endl;
+ Debug("constraint::conflictCommit") << "\t" << getNegation()->externalExplainByAssertions() << std::endl;
+
+ }
}
-bool Constraint_::satisfiedBy(const DeltaRational& dr) const {
+bool Constraint::satisfiedBy(const DeltaRational& dr) const {
switch(getType()){
case LowerBound:
return getValue() <= dr;
Unreachable();
}
-bool Constraint_::isInternalDecision() const {
- return d_proof == d_database->d_internalDecisionProof;
+bool Constraint::isInternalAssumption() const {
+ return getProofType() == InternalAssumeAP;
+}
+
+bool Constraint::isAssumption() const {
+ return getProofType() == AssumeAP;
+}
+
+bool Constraint::hasEqualityEngineProof() const {
+ return getProofType() == EqualityEngineAP;
}
-bool Constraint_::isSelfExplaining() const {
- return d_proof == d_database->d_selfExplainingProof;
+bool Constraint::hasFarkasProof() const {
+ return getProofType() == FarkasAP;
}
-bool Constraint_::hasEqualityEngineProof() const {
- return d_proof == d_database->d_equalityEngineProof;
+bool Constraint::hasIntHoleProof() const {
+ return getProofType() == IntHoleAP;
}
-bool Constraint_::sanityChecking(Node n) const {
+bool Constraint::hasTrichotomyProof() const {
+ return getProofType() == TrichotomyAP;
+}
+
+bool Constraint::sanityChecking(Node n) const {
Comparison cmp = Comparison::parseNormalForm(n);
Kind k = cmp.comparisonKind();
Polynomial pleft = cmp.normalizedVariablePart();
const ArithVariables& avariables = d_database->getArithVariables();
- Debug("nf::tmp") << cmp.getNode() << endl;
- Debug("nf::tmp") << k << endl;
- Debug("nf::tmp") << pleft.getNode() << endl;
- Debug("nf::tmp") << left << endl;
- Debug("nf::tmp") << right << endl;
- Debug("nf::tmp") << getValue() << endl;
- Debug("nf::tmp") << avariables.hasArithVar(left) << endl;
- Debug("nf::tmp") << avariables.asArithVar(left) << endl;
- Debug("nf::tmp") << getVariable() << endl;
+ Debug("Constraint::sanityChecking") << cmp.getNode() << endl;
+ Debug("Constraint::sanityChecking") << k << endl;
+ Debug("Constraint::sanityChecking") << pleft.getNode() << endl;
+ Debug("Constraint::sanityChecking") << left << endl;
+ Debug("Constraint::sanityChecking") << right << endl;
+ Debug("Constraint::sanityChecking") << getValue() << endl;
+ Debug("Constraint::sanityChecking") << avariables.hasArithVar(left) << endl;
+ Debug("Constraint::sanityChecking") << avariables.asArithVar(left) << endl;
+ Debug("Constraint::sanityChecking") << getVariable() << endl;
if(avariables.hasArithVar(left) &&
}
}
-ConstraintP Constraint_::makeNegation(ArithVar v, ConstraintType t, const DeltaRational& r){
+void ConstraintRule::debugPrint() const {
+ print(std::cerr);
+}
+
+ConstraintCP ConstraintDatabase::getAntecedent (AntecedentId p) const {
+ Assert(p < d_antecedents.size());
+ return d_antecedents[p];
+}
+
+
+void ConstraintRule::print(std::ostream& out) const {
+
+ RationalVectorCP coeffs = NULLPROOF(d_farkasCoefficients);
+ out << "{ConstraintRule, ";
+ out << d_constraint << std::endl;
+ out << "d_proofType= " << d_proofType << ", " << std::endl;
+ out << "d_antecedentEnd= "<< d_antecedentEnd << std::endl;
+
+ if(d_constraint != NullConstraint){
+ const ConstraintDatabase& database = d_constraint->getDatabase();
+
+ size_t coeffIterator = (coeffs != RationalVectorCPSentinel) ? coeffs->size()-1 : 0;
+ AntecedentId p = d_antecedentEnd;
+ // must have at least one antecedent
+ ConstraintCP antecedent = database.getAntecedent(p);
+ while(antecedent != NullConstraint){
+ if(coeffs != RationalVectorCPSentinel){
+ out << coeffs->at(coeffIterator);
+ } else {
+ out << "_";
+ }
+ out << " * (" << *antecedent << ")" << std::endl;
+
+ Assert((coeffs == RationalVectorCPSentinel) || coeffIterator > 0);
+ --p;
+ coeffIterator = (coeffs != RationalVectorCPSentinel) ? coeffIterator-1 : 0;
+ antecedent = database.getAntecedent(p);
+ }
+ if(coeffs != RationalVectorCPSentinel){
+ out << coeffs->front();
+ } else {
+ out << "_";
+ }
+ out << " * (" << *(d_constraint->getNegation()) << ")";
+ out << " [not d_constraint] " << endl;
+ }
+ out << "}";
+}
+
+bool Constraint::wellFormedFarkasProof() const {
+ Assert(hasProof());
+
+ const ConstraintRule& cr = getConstraintRule();
+ if(cr.d_constraint != this){ return false; }
+ if(cr.d_proofType != FarkasAP){ return false; }
+
+ AntecedentId p = cr.d_antecedentEnd;
+
+ // must have at least one antecedent
+ ConstraintCP antecedent = d_database->d_antecedents[p];
+ if(antecedent == NullConstraint) { return false; }
+
+#ifdef CVC4_PROOF
+ if(!PROOF_ON()){ return cr.d_farkasCoefficients == RationalVectorCPSentinel; }
+ Assert(PROOF_ON());
+
+ if(cr.d_farkasCoefficients == RationalVectorCPSentinel){ return false; }
+ if(cr.d_farkasCoefficients->size() < 2){ return false; }
+
+ const ArithVariables& vars = d_database->getArithVariables();
+
+ DeltaRational rhs(0);
+ Node lhs = Polynomial::mkZero().getNode();
+
+ RationalVector::const_iterator coeffIterator = cr.d_farkasCoefficients->end()-1;
+ RationalVector::const_iterator coeffBegin = cr.d_farkasCoefficients->begin();
+
+ while(antecedent != NullConstraint){
+ Assert(lhs.isNull() || Polynomial::isMember(lhs));
+
+ const Rational& coeff = *coeffIterator;
+ int coeffSgn = coeff.sgn();
+
+ rhs += antecedent->getValue() * coeff;
+
+ ArithVar antVar = antecedent->getVariable();
+ if(!lhs.isNull() && vars.hasNode(antVar)){
+ Node antAsNode = vars.asNode(antVar);
+ if(Polynomial::isMember(antAsNode)){
+ Polynomial lhsPoly = Polynomial::parsePolynomial(lhs);
+ Polynomial antPoly = Polynomial::parsePolynomial(antAsNode);
+ Polynomial sum = lhsPoly + (antPoly * coeff);
+ lhs = sum.getNode();
+ }else{
+ lhs = Node::null();
+ }
+ } else {
+ lhs = Node::null();
+ }
+ Debug("constraints::wffp") << "running sum: " << lhs << " <= " << rhs << endl;
+
+ switch( antecedent->getType() ){
+ case LowerBound:
+ // fc[l] < 0, therefore return false if coeffSgn >= 0
+ if(coeffSgn >= 0){ return false; }
+ break;
+ case UpperBound:
+ // fc[u] > 0, therefore return false if coeffSgn <= 0
+ if(coeffSgn <= 0){ return false; }
+ break;
+ case Equality:
+ if(coeffSgn == 0) { return false; }
+ break;
+ case Disequality:
+ default:
+ return false;
+ }
+
+ if(coeffIterator == coeffBegin){ return false; }
+ --coeffIterator;
+ --p;
+ antecedent = d_database->d_antecedents[p];
+ }
+ if(coeffIterator != coeffBegin){ return false; }
+
+ const Rational& firstCoeff = (*coeffBegin);
+ int firstCoeffSgn = firstCoeff.sgn();
+ rhs += (getNegation()->getValue()) * firstCoeff;
+ if(!lhs.isNull() && vars.hasNode(getVariable())){
+ Node firstAsNode = vars.asNode(getVariable());
+ if(Polynomial::isMember(firstAsNode)){
+ Polynomial lhsPoly = Polynomial::parsePolynomial(lhs);
+ Polynomial firstPoly = Polynomial::parsePolynomial(firstAsNode);
+ Polynomial sum = lhsPoly + (firstPoly * firstCoeff);
+ lhs = sum.getNode();
+ }else{
+ lhs = Node::null();
+ }
+ }else{
+ lhs = Node::null();
+ }
+
+ switch( getNegation()->getType() ){
+ case LowerBound:
+ // fc[l] < 0, therefore return false if coeffSgn >= 0
+ if(firstCoeffSgn >= 0){ return false; }
+ break;
+ case UpperBound:
+ // fc[u] > 0, therefore return false if coeffSgn <= 0
+ if(firstCoeffSgn <= 0){ return false; }
+ break;
+ case Equality:
+ if(firstCoeffSgn == 0) { return false; }
+ break;
+ case Disequality:
+ default:
+ return false;
+ }
+ Debug("constraints::wffp") << "final sum: " << lhs << " <= " << rhs << endl;
+ // 0 = lhs <= rhs < 0
+ return
+ (lhs.isNull() || Constant::isMember(lhs) && Constant(lhs).isZero() ) &&
+ rhs.sgn() < 0;
+
+#else
+ return true;
+#endif
+}
+
+ConstraintP Constraint::makeNegation(ArithVar v, ConstraintType t, const DeltaRational& r){
switch(t){
case LowerBound:
{
Assert(r.getInfinitesimalPart() == 1);
// make (not (v > r)), which is (v <= r)
DeltaRational dropInf(r.getNoninfinitesimalPart(), 0);
- return new Constraint_(v, UpperBound, dropInf);
+ return new Constraint(v, UpperBound, dropInf);
}else{
Assert(r.infinitesimalSgn() == 0);
// make (not (v >= r)), which is (v < r)
DeltaRational addInf(r.getNoninfinitesimalPart(), -1);
- return new Constraint_(v, UpperBound, addInf);
+ return new Constraint(v, UpperBound, addInf);
}
}
case UpperBound:
Assert(r.getInfinitesimalPart() == -1);
// make (not (v < r)), which is (v >= r)
DeltaRational dropInf(r.getNoninfinitesimalPart(), 0);
- return new Constraint_(v, LowerBound, dropInf);
+ return new Constraint(v, LowerBound, dropInf);
}else{
Assert(r.infinitesimalSgn() == 0);
// make (not (v <= r)), which is (v > r)
DeltaRational addInf(r.getNoninfinitesimalPart(), 1);
- return new Constraint_(v, LowerBound, addInf);
+ return new Constraint(v, LowerBound, addInf);
}
}
case Equality:
- return new Constraint_(v, Disequality, r);
+ return new Constraint(v, Disequality, r);
case Disequality:
- return new Constraint_(v, Equality, r);
+ return new Constraint(v, Equality, r);
default:
Unreachable();
return NullConstraint;
}
ConstraintDatabase::ConstraintDatabase(context::Context* satContext, context::Context* userContext, const ArithVariables& avars, ArithCongruenceManager& cm, RaiseConflict raiseConflict)
- : d_varDatabases(),
- d_toPropagate(satContext),
- d_proofs(satContext, false),
- d_watches(new Watches(satContext, userContext)),
- d_avariables(avars),
- d_congruenceManager(cm),
- d_satContext(satContext),
- d_satAllocationLevel(d_satContext->getLevel()),
- d_raiseConflict(raiseConflict)
+ : d_varDatabases()
+ , d_toPropagate(satContext)
+ , d_antecedents(satContext, false)
+ , d_watches(new Watches(satContext, userContext))
+ , d_avariables(avars)
+ , d_congruenceManager(cm)
+ , d_satContext(satContext)
+ , d_raiseConflict(raiseConflict)
+ , d_one(1)
+ , d_negOne(-1)
{
- d_selfExplainingProof = d_proofs.size();
- d_proofs.push_back(NullConstraint);
-
- d_equalityEngineProof = d_proofs.size();
- d_proofs.push_back(NullConstraint);
-
- d_internalDecisionProof = d_proofs.size();
- d_proofs.push_back(NullConstraint);
+
}
SortedConstraintMap& ConstraintDatabase::getVariableSCM(ArithVar v) const{
d_watches->d_assertionOrderWatches.push_back(c);
}
-void ConstraintDatabase::pushProofWatch(ConstraintP c, ProofId pid){
- Assert(c->d_proof == ProofIdSentinel);
- c->d_proof = pid;
- d_watches->d_proofWatches.push_back(c);
+
+void ConstraintDatabase::pushConstraintRule(const ConstraintRule& crp){
+ ConstraintP c = crp.d_constraint;
+ Assert(c->d_crid == ConstraintRuleIdSentinel);
+ Assert(!c->hasProof());
+ c->d_crid = d_watches->d_constraintProofs.size();
+ d_watches->d_constraintProofs.push_back(crp);
}
ConstraintP ConstraintDatabase::getConstraint(ArithVar v, ConstraintType t, const DeltaRational& r){
if(vc.hasConstraintOfType(t)){
return vc.getConstraintOfType(t);
}else{
- ConstraintP c = new Constraint_(v, t, r);
- ConstraintP negC = Constraint_::makeNegation(v, t, r);
+ ConstraintP c = new Constraint(v, t, r);
+ ConstraintP negC = Constraint::makeNegation(v, t, r);
SortedConstraintMapIterator negPos;
if(t == Equality || t == Disequality){
}
ConstraintDatabase::~ConstraintDatabase(){
- Assert(d_satAllocationLevel <= d_satContext->getLevel());
-
delete d_watches;
std::vector<ConstraintP> constraintList;
d_reclaimable.add(v);
}
-bool Constraint_::safeToGarbageCollect() const{
- return !isSplit()
- && !canBePropagated()
- && !hasProof()
- && !assertedToTheTheory();
+bool Constraint::safeToGarbageCollect() const{
+ // Do not call during destructor as getNegation() may be Null by this point
+ Assert(getNegation() != NullConstraint);
+ return !contextDependentDataIsSet() && ! getNegation()->contextDependentDataIsSet();
}
-Node Constraint_::split(){
+bool Constraint::contextDependentDataIsSet() const{
+ return hasProof() || isSplit() || canBePropagated() || assertedToTheTheory();
+}
+
+Node Constraint::split(){
Assert(isEquality() || isDisequality());
bool isEq = isEquality();
return lookup(literal) != NullConstraint;
}
-// ConstraintP ConstraintDatabase::addLiteral(TNode literal){
-// Assert(!hasLiteral(literal));
-// bool isNot = (literal.getKind() == NOT);
-// TNode atom = isNot ? literal[0] : literal;
-
-// ConstraintP atomC = addAtom(atom);
-
-// return isNot ? atomC->d_negation : atomC;
-// }
-
-// ConstraintP ConstraintDatabase::allocateConstraintForComparison(ArithVar v, const Comparison cmp){
-// Debug("arith::constraint") << "allocateConstraintForLiteral(" << v << ", "<< cmp <<")" << endl;
-// Kind kind = cmp.comparisonKind();
-// ConstraintType type = constraintTypeOfLiteral(kind);
-
-// DeltaRational dr = cmp.getDeltaRational();
-// return new Constraint_(v, type, dr);
-// }
-
ConstraintP ConstraintDatabase::addLiteral(TNode literal){
Assert(!hasLiteral(literal));
bool isNot = (literal.getKind() == NOT);
ConstraintType posType = constraintTypeOfComparison(posCmp);
Polynomial nvp = posCmp.normalizedVariablePart();
- Debug("nf::tmp") << "here " << nvp.getNode() << " " << endl;
ArithVar v = d_avariables.asArithVar(nvp.getNode());
DeltaRational posDR = posCmp.normalizedDeltaRational();
- ConstraintP posC = new Constraint_(v, posType, posDR);
+ ConstraintP posC = new Constraint(v, posType, posDR);
Debug("arith::constraint") << "addliteral( literal ->" << literal << ")" << endl;
Debug("arith::constraint") << "addliteral( posC ->" << posC << ")" << endl;
ConstraintType negType = constraintTypeOfComparison(negCmp);
DeltaRational negDR = negCmp.normalizedDeltaRational();
- ConstraintP negC = new Constraint_(v, negType, negDR);
+ ConstraintP negC = new Constraint(v, negType, negDR);
SortedConstraintMapIterator negI;
}
}
-// ConstraintType constraintTypeOfLiteral(Kind k){
-// switch(k){
-// case LT:
-// case LEQ:
-// return UpperBound;
-// case GT:
-// case GEQ:
-// return LowerBound;
-// case EQUAL:
-// return Equality;
-// case DISTINCT:
-// return Disequality;
-// default:
-// Unreachable();
-// }
-// }
ConstraintP ConstraintDatabase::lookup(TNode literal) const{
NodetoConstraintMap::const_iterator iter = d_nodetoConstraintMap.find(literal);
}
}
-void Constraint_::selfExplainingWithNegationTrue(){
+void Constraint::setAssumption(bool nowInConflict){
Assert(!hasProof());
- Assert(getNegation()->hasProof());
+ Assert(negationHasProof() == nowInConflict);
Assert(hasLiteral());
Assert(assertedToTheTheory());
- d_database->pushProofWatch(this, d_database->d_selfExplainingProof);
+
+ d_database->pushConstraintRule(ConstraintRule(this, AssumeAP));
+
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict@setAssumption " << this << std::endl;
+ }
}
-void Constraint_::selfExplaining(){
- markAsTrue();
+void Constraint::tryToPropagate(){
+ Assert(hasProof());
+ Assert(!isAssumption());
+ Assert(!isInternalAssumption());
+
+ if(canBePropagated() && !assertedToTheTheory() && !isAssumption() && !isInternalAssumption()){
+ propagate();
+ }
}
-void Constraint_::propagate(){
+void Constraint::propagate(){
Assert(hasProof());
Assert(canBePropagated());
Assert(!assertedToTheTheory());
- Assert(!isSelfExplaining());
+ Assert(!isAssumption());
+ Assert(!isInternalAssumption());
d_database->d_toPropagate.push(this);
}
-void Constraint_::propagate(ConstraintCP a){
- Assert(!hasProof());
- Assert(canBePropagated());
-
- markAsTrue(a);
- propagate();
-}
-void Constraint_::propagate(ConstraintCP a, ConstraintCP b){
+/*
+ * Example:
+ * x <= a and a < b
+ * |= x <= b
+ * ---
+ * 1*(x <= a) + (-1)*(x > b) => (0 <= a-b)
+ */
+void Constraint::impliedByUnate(ConstraintCP imp, bool nowInConflict){
Assert(!hasProof());
- Assert(canBePropagated());
+ Assert(imp->hasProof());
+ Assert(negationHasProof() == nowInConflict);
- markAsTrue(a, b);
- propagate();
-}
-void Constraint_::propagate(const ConstraintCPVec& b){
- Assert(!hasProof());
- Assert(canBePropagated());
+ d_database->d_antecedents.push_back(NullConstraint);
+ d_database->d_antecedents.push_back(imp);
- markAsTrue(b);
- propagate();
-}
+ AntecedentId antecedentEnd = d_database->d_antecedents.size() - 1;
-void Constraint_::impliedBy(ConstraintCP a){
- Assert(truthIsUnknown());
+ RationalVectorP coeffs;
+ if(PROOF_ON()){
+ std::pair<int, int> sgns = unateFarkasSigns(getNegation(), imp);
- markAsTrue(a);
- if(canBePropagated()){
- propagate();
+ Rational first(sgns.first);
+ Rational second(sgns.second);
+
+ coeffs = new RationalVector();
+ coeffs->push_back(first);
+ coeffs->push_back(second);
+ } else {
+ coeffs = RationalVectorPSentinel;
}
-}
-void Constraint_::impliedBy(ConstraintCP a, ConstraintCP b){
- Assert(truthIsUnknown());
+ // no need to delete coeffs the memory is owned by ConstraintRule
+ d_database->pushConstraintRule(ConstraintRule(this, FarkasAP, antecedentEnd, coeffs));
- markAsTrue(a, b);
- if(canBePropagated()){
- propagate();
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict@impliedByUnate " << this << std::endl;
+ }
+
+ if(Debug.isOn("constraints::wffp") && !wellFormedFarkasProof()){
+ getConstraintRule().print(Debug("constraints::wffp"));
}
+ Assert(wellFormedFarkasProof());
}
-void Constraint_::impliedBy(const ConstraintCPVec& b){
- Assert(truthIsUnknown());
+void Constraint::impliedByTrichotomy(ConstraintCP a, ConstraintCP b, bool nowInConflict){
+ Assert(!hasProof());
+ Assert(negationHasProof() == nowInConflict);
+ Assert(a->hasProof());
+ Assert(b->hasProof());
- markAsTrue(b);
- if(canBePropagated()){
- propagate();
- }
-}
+ d_database->d_antecedents.push_back(NullConstraint);
+ d_database->d_antecedents.push_back(a);
+ d_database->d_antecedents.push_back(b);
-void Constraint_::setInternalDecision(){
- Assert(truthIsUnknown());
- Assert(!assertedToTheTheory());
+ AntecedentId antecedentEnd = d_database->d_antecedents.size() - 1;
+ d_database->pushConstraintRule(ConstraintRule(this, TrichotomyAP, antecedentEnd));
- d_database->pushProofWatch(this, d_database->d_internalDecisionProof);
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict@impliedByTrichotomy " << this << std::endl;
+ }
}
-void Constraint_::setEqualityEngineProof(){
- Assert(truthIsUnknown());
- Assert(hasLiteral());
- d_database->pushProofWatch(this, d_database->d_equalityEngineProof);
-}
-void Constraint_::markAsTrue(){
- Assert(truthIsUnknown());
- Assert(hasLiteral());
- Assert(assertedToTheTheory());
- d_database->pushProofWatch(this, d_database->d_selfExplainingProof);
+bool Constraint::allHaveProof(const ConstraintCPVec& b){
+ for(ConstraintCPVec::const_iterator i=b.begin(), i_end=b.end(); i != i_end; ++i){
+ ConstraintCP cp = *i;
+ if(! (cp->hasProof())){ return false; }
+ }
+ return true;
}
-void Constraint_::markAsTrue(ConstraintCP imp){
- Assert(truthIsUnknown());
- Assert(imp->hasProof());
+void Constraint::impliedByIntHole(ConstraintCP a, bool nowInConflict){
+ Assert(!hasProof());
+ Assert(negationHasProof() == nowInConflict);
+ Assert(a->hasProof());
+
+ d_database->d_antecedents.push_back(NullConstraint);
+ d_database->d_antecedents.push_back(a);
+ AntecedentId antecedentEnd = d_database->d_antecedents.size() - 1;
+ d_database->pushConstraintRule(ConstraintRule(this, IntHoleAP, antecedentEnd));
- d_database->d_proofs.push_back(NullConstraint);
- d_database->d_proofs.push_back(imp);
- ProofId proof = d_database->d_proofs.size() - 1;
- d_database->pushProofWatch(this, proof);
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict impliedByIntHole" << this << std::endl;
+ }
}
-void Constraint_::markAsTrue(ConstraintCP impA, ConstraintCP impB){
- Assert(truthIsUnknown());
- Assert(impA->hasProof());
- Assert(impB->hasProof());
+void Constraint::impliedByIntHole(const ConstraintCPVec& b, bool nowInConflict){
+ Assert(!hasProof());
+ Assert(negationHasProof() == nowInConflict);
+ Assert(allHaveProof(b));
+
+ CDConstraintList& antecedents = d_database->d_antecedents;
+ antecedents.push_back(NullConstraint);
+ for(ConstraintCPVec::const_iterator i=b.begin(), i_end=b.end(); i != i_end; ++i){
+ antecedents.push_back(*i);
+ }
+ AntecedentId antecedentEnd = antecedents.size() - 1;
- d_database->d_proofs.push_back(NullConstraint);
- d_database->d_proofs.push_back(impA);
- d_database->d_proofs.push_back(impB);
- ProofId proof = d_database->d_proofs.size() - 1;
+ d_database->pushConstraintRule(ConstraintRule(this, IntHoleAP, antecedentEnd));
- d_database->pushProofWatch(this, proof);
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict@impliedByIntHole[vec] " << this << std::endl;
+ }
}
-void Constraint_::markAsTrue(const ConstraintCPVec& a){
- Assert(truthIsUnknown());
+/*
+ * If proofs are off, coeffs == RationalVectorSentinal.
+ * If proofs are on,
+ * coeffs != RationalVectorSentinal,
+ * coeffs->size() = a.size() + 1,
+ * for i in [0,a.size) : coeff[i] corresponds to a[i], and
+ * coeff.back() corresponds to the current constraint.
+ */
+void Constraint::impliedByFarkas(const ConstraintCPVec& a, RationalVectorCP coeffs, bool nowInConflict){
+ Assert(!hasProof());
+ Assert(negationHasProof() == nowInConflict);
+ Assert(allHaveProof(a));
+
+ Assert( PROOF_ON() == (coeffs != RationalVectorCPSentinel) );
+ // !PROOF_ON() => coeffs == RationalVectorCPSentinel
+ // PROOF_ON() => coeffs->size() == a.size() + 1
+ Assert(!PROOF_ON() || coeffs->size() == a.size() + 1);
Assert(a.size() >= 1);
- d_database->d_proofs.push_back(NullConstraint);
+
+ d_database->d_antecedents.push_back(NullConstraint);
for(ConstraintCPVec::const_iterator i = a.begin(), end = a.end(); i != end; ++i){
ConstraintCP c_i = *i;
Assert(c_i->hasProof());
- //Assert(!c_i->isPseudoConstraint());
- d_database->d_proofs.push_back(c_i);
+ d_database->d_antecedents.push_back(c_i);
+ }
+ AntecedentId antecedentEnd = d_database->d_antecedents.size() - 1;
+
+ RationalVectorCP coeffsCopy;
+ if(PROOF_ON()){
+ Assert(coeffs != RationalVectorCPSentinel);
+ coeffsCopy = new RationalVector(*coeffs);
+ } else {
+ coeffsCopy = RationalVectorCPSentinel;
+ }
+ d_database->pushConstraintRule(ConstraintRule(this, FarkasAP, antecedentEnd, coeffsCopy));
+
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict@impliedByFarkas " << this << std::endl;
+ }
+ if(Debug.isOn("constraints::wffp") && !wellFormedFarkasProof()){
+ getConstraintRule().print(Debug("constraints::wffp"));
+ }
+ Assert(wellFormedFarkasProof());
+}
+
+
+void Constraint::setInternalAssumption(bool nowInConflict){
+ Assert(!hasProof());
+ Assert(negationHasProof() == nowInConflict);
+ Assert(!assertedToTheTheory());
+
+ d_database->pushConstraintRule(ConstraintRule(this, InternalAssumeAP));
+
+ Assert(inConflict() == nowInConflict);
+ if(Debug.isOn("constraint::conflictCommit") && inConflict()){
+ Debug("constraint::conflictCommit") << "inConflict@setInternalAssumption " << this << std::endl;
}
+}
- ProofId proof = d_database->d_proofs.size() - 1;
- d_database->pushProofWatch(this, proof);
+void Constraint::setEqualityEngineProof(){
+ Assert(truthIsUnknown());
+ Assert(hasLiteral());
+ d_database->pushConstraintRule(ConstraintRule(this, EqualityEngineAP));
}
-SortedConstraintMap& Constraint_::constraintSet() const{
+
+SortedConstraintMap& Constraint::constraintSet() const{
Assert(d_database->variableDatabaseIsSetup(d_variable));
return (d_database->d_varDatabases[d_variable])->d_constraints;
}
-bool Constraint_::proofIsEmpty() const{
+bool Constraint::antecentListIsEmpty() const{
Assert(hasProof());
- bool result = d_database->d_proofs[d_proof] == NullConstraint;
- //Assert((!result) || isSelfExplaining() || hasEqualityEngineProof() || isPseudoConstraint());
- Assert((!result) || isSelfExplaining() || hasEqualityEngineProof());
- return result;
+ return d_database->d_antecedents[getEndAntecedent()] == NullConstraint;
}
-Node Constraint_::externalImplication(const ConstraintCPVec& b) const{
+bool Constraint::antecedentListLengthIsOne() const {
+ Assert(hasProof());
+ return !antecentListIsEmpty() &&
+ d_database->d_antecedents[getEndAntecedent()-1] == NullConstraint;
+}
+
+Node Constraint::externalImplication(const ConstraintCPVec& b) const{
Assert(hasLiteral());
Node antecedent = externalExplainByAssertions(b);
Node implied = getLiteral();
}
-Node Constraint_::externalExplainByAssertions(const ConstraintCPVec& b){
+Node Constraint::externalExplainByAssertions(const ConstraintCPVec& b){
return externalExplain(b, AssertionOrderSentinel);
}
+Node Constraint::externalExplainConflict() const{
+ Assert(inConflict());
+ NodeBuilder<> nb(kind::AND);
+ externalExplainByAssertions(nb);
+ getNegation()->externalExplainByAssertions(nb);
+
+ return safeConstructNary(nb);
+}
+
struct ConstraintCPHash {
/* Todo replace with an id */
size_t operator()(ConstraintCP c) const{
}
};
-void Constraint_::assertionFringe(ConstraintCPVec& v){
+void Constraint::assertionFringe(ConstraintCPVec& v){
hash_set<ConstraintCP, ConstraintCPHash> visited;
size_t writePos = 0;
if(!v.empty()){
const ConstraintDatabase* db = v.back()->d_database;
- const CDConstraintList& proofs = db->d_proofs;
+ const CDConstraintList& antecedents = db->d_antecedents;
for(size_t i = 0; i < v.size(); ++i){
ConstraintCP vi = v[i];
if(visited.find(vi) == visited.end()){
v[writePos] = vi;
writePos++;
}else{
- Assert(!vi->isSelfExplaining());
- ProofId p = vi->d_proof;
- ConstraintCP antecedent = proofs[p];
+ Assert(vi->hasFarkasProof() || vi->hasIntHoleProof() );
+ AntecedentId p = vi->getEndAntecedent();
+
+ ConstraintCP antecedent = antecedents[p];
while(antecedent != NullConstraint){
v.push_back(antecedent);
--p;
- antecedent = proofs[p];
+ antecedent = antecedents[p];
}
}
}
}
}
-void Constraint_::assertionFringe(ConstraintCPVec& o, const ConstraintCPVec& i){
+void Constraint::assertionFringe(ConstraintCPVec& o, const ConstraintCPVec& i){
o.insert(o.end(), i.begin(), i.end());
assertionFringe(o);
}
-Node Constraint_::externalExplain(const ConstraintCPVec& v, AssertionOrder order){
+Node Constraint::externalExplain(const ConstraintCPVec& v, AssertionOrder order){
NodeBuilder<> nb(kind::AND);
ConstraintCPVec::const_iterator i, end;
for(i = v.begin(), end = v.end(); i != end; ++i){
return safeConstructNary(nb);
}
-void Constraint_::externalExplain(NodeBuilder<>& nb, AssertionOrder order) const{
+void Constraint::externalExplain(NodeBuilder<>& nb, AssertionOrder order) const{
Assert(hasProof());
- Assert(!isSelfExplaining() || assertedToTheTheory());
- Assert(!isInternalDecision());
+ Assert(!isAssumption() || assertedToTheTheory());
+ Assert(!isInternalAssumption());
if(assertedBefore(order)){
nb << getWitness();
}else if(hasEqualityEngineProof()){
d_database->eeExplain(this, nb);
}else{
- Assert(!isSelfExplaining());
- ProofId p = d_proof;
- ConstraintCP antecedent = d_database->d_proofs[p];
+ Assert(!isAssumption());
+ AntecedentId p = getEndAntecedent();
+ ConstraintCP antecedent = d_database->d_antecedents[p];
- for(; antecedent != NullConstraint; antecedent = d_database->d_proofs[--p] ){
+ while(antecedent != NullConstraint){
antecedent->externalExplain(nb, order);
+ --p;
+ antecedent = d_database->d_antecedents[p];
}
}
}
-Node Constraint_::externalExplain(AssertionOrder order) const{
+Node Constraint::externalExplain(AssertionOrder order) const{
Assert(hasProof());
- Assert(!isSelfExplaining() || assertedBefore(order));
- Assert(!isInternalDecision());
+ Assert(!isAssumption() || assertedBefore(order));
+ Assert(!isInternalAssumption());
if(assertedBefore(order)){
return getWitness();
}else if(hasEqualityEngineProof()){
return d_database->eeExplain(this);
}else{
- Assert(!proofIsEmpty());
+ Assert(hasFarkasProof() || hasIntHoleProof() || hasTrichotomyProof());
+ Assert(!antecentListIsEmpty());
//Force the selection of the layer above if the node is
// assertedToTheTheory()!
- if(d_database->d_proofs[d_proof-1] == NullConstraint){
- ConstraintCP antecedent = d_database->d_proofs[d_proof];
+
+ AntecedentId listEnd = getEndAntecedent();
+ if(antecedentListLengthIsOne()){
+ ConstraintCP antecedent = d_database->d_antecedents[listEnd];
return antecedent->externalExplain(order);
}else{
NodeBuilder<> nb(kind::AND);
- Assert(!isSelfExplaining());
+ Assert(!isAssumption());
- ProofId p = d_proof;
- ConstraintCP antecedent = d_database->d_proofs[p];
+ AntecedentId p = listEnd;
+ ConstraintCP antecedent = d_database->d_antecedents[p];
while(antecedent != NullConstraint){
antecedent->externalExplain(nb, order);
--p;
- antecedent = d_database->d_proofs[p];
+ antecedent = d_database->d_antecedents[p];
}
return nb;
}
}
}
-Node Constraint_::externalExplainByAssertions(ConstraintCP a, ConstraintCP b){
+Node Constraint::externalExplainByAssertions(ConstraintCP a, ConstraintCP b){
NodeBuilder<> nb(kind::AND);
a->externalExplainByAssertions(nb);
b->externalExplainByAssertions(nb);
return nb;
}
-Node Constraint_::externalExplainByAssertions(ConstraintCP a, ConstraintCP b, ConstraintCP c){
+Node Constraint::externalExplainByAssertions(ConstraintCP a, ConstraintCP b, ConstraintCP c){
NodeBuilder<> nb(kind::AND);
a->externalExplainByAssertions(nb);
b->externalExplainByAssertions(nb);
return nb;
}
-ConstraintP Constraint_::getStrictlyWeakerLowerBound(bool hasLiteral, bool asserted) const {
+ConstraintP Constraint::getStrictlyWeakerLowerBound(bool hasLiteral, bool asserted) const {
Assert(initialized());
Assert(!asserted || hasLiteral);
return NullConstraint;
}
-ConstraintP Constraint_::getStrictlyWeakerUpperBound(bool hasLiteral, bool asserted) const {
+ConstraintP Constraint::getStrictlyWeakerUpperBound(bool hasLiteral, bool asserted) const {
SortedConstraintMapConstIterator i = d_variablePosition;
const SortedConstraintMap& scm = constraintSet();
SortedConstraintMapConstIterator i_end = scm.end();
}
}
}
-Node ConstraintDatabase::eeExplain(const Constraint_* const c) const{
+Node ConstraintDatabase::eeExplain(const Constraint* const c) const{
Assert(c->hasLiteral());
return d_congruenceManager.explain(c->getLiteral());
}
-void ConstraintDatabase::eeExplain(const Constraint_* const c, NodeBuilder<>& nb) const{
+void ConstraintDatabase::eeExplain(const Constraint* const c, NodeBuilder<>& nb) const{
Assert(c->hasLiteral());
d_congruenceManager.explain(c->getLiteral(), nb);
}
ConstraintDatabase::Watches::Watches(context::Context* satContext, context::Context* userContext):
- d_proofWatches(satContext),
+ d_constraintProofs(satContext),
d_canBePropagatedWatches(satContext),
d_assertionOrderWatches(satContext),
d_splitWatches(userContext)
{}
-void Constraint_::setLiteral(Node n) {
+void Constraint::setLiteral(Node n) {
Assert(!hasLiteral());
Assert(sanityChecking(n));
d_literal = n;
}
}
-void ConstraintDatabase::raiseUnateConflict(ConstraintP ant, ConstraintP cons){
- Assert(ant->hasProof());
- ConstraintP negCons = cons->getNegation();
- Assert(negCons->hasProof());
-
- Debug("arith::unate::conf") << ant << "implies " << cons << endl;
- Debug("arith::unate::conf") << negCons << " is true." << endl;
-
- d_raiseConflict.addConstraint(ant);
- d_raiseConflict.addConstraint(negCons);
- d_raiseConflict.commitConflict();
+bool ConstraintDatabase::handleUnateProp(ConstraintP ant, ConstraintP cons){
+ if(cons->negationHasProof()){
+ Debug("arith::unate") << "handleUnate: " << ant << " implies " << cons << endl;
+ cons->impliedByUnate(ant, true);
+ d_raiseConflict.raiseConflict(cons);
+ return true;
+ }else if(!cons->isTrue()){
+ ++d_statistics.d_unatePropagateImplications;
+ Debug("arith::unate") << "handleUnate: " << ant << " implies " << cons << endl;
+ cons->impliedByUnate(ant, false);
+ cons->tryToPropagate();
+ return false;
+ } else {
+ return false;
+ }
}
void ConstraintDatabase::unatePropLowerBound(ConstraintP curr, ConstraintP prev){
//These should all be handled by propagating the LowerBounds!
if(vc.hasLowerBound()){
ConstraintP lb = vc.getLowerBound();
- if(lb->negationHasProof()){
- raiseUnateConflict(curr, lb);
- return;
- }else if(!lb->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unatePropLowerBound " << curr << " implies " << lb << endl;
-
- lb->impliedBy(curr);
- }
+ if(handleUnateProp(curr, lb)){ return; }
}
if(vc.hasDisequality()){
ConstraintP dis = vc.getDisequality();
- if(dis->negationHasProof()){
- raiseUnateConflict(curr, dis);
- return;
- }else if(!dis->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unatePropLowerBound " << curr << " implies " << dis << endl;
-
- dis->impliedBy(curr);
- }
+ if(handleUnateProp(curr, dis)){ return; }
}
}
}
//These should all be handled by propagating the UpperBounds!
if(vc.hasUpperBound()){
ConstraintP ub = vc.getUpperBound();
- if(ub->negationHasProof()){
- raiseUnateConflict(curr, ub);
- return;
- }else if(!ub->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unatePropUpperBound " << curr << " implies " << ub << endl;
- ub->impliedBy(curr);
- }
+ if(handleUnateProp(curr, ub)){ return; }
}
if(vc.hasDisequality()){
ConstraintP dis = vc.getDisequality();
- if(dis->negationHasProof()){
- raiseUnateConflict(curr, dis);
- return;
- }else if(!dis->isTrue()){
- Debug("arith::unate") << "unatePropUpperBound " << curr << " implies " << dis << endl;
- ++d_statistics.d_unatePropagateImplications;
-
- dis->impliedBy(curr);
- }
+ if(handleUnateProp(curr, dis)){ return; }
}
}
}
//These should all be handled by propagating the LowerBounds!
if(vc.hasLowerBound()){
ConstraintP lb = vc.getLowerBound();
- if(lb->negationHasProof()){
- raiseUnateConflict(curr, lb);
- return;
- }else if(!lb->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unatePropUpperBound " << curr << " implies " << lb << endl;
- lb->impliedBy(curr);
- }
+ if(handleUnateProp(curr, lb)){ return; }
}
if(vc.hasDisequality()){
ConstraintP dis = vc.getDisequality();
- if(dis->negationHasProof()){
- raiseUnateConflict(curr, dis);
- return;
- }else if(!dis->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unatePropUpperBound " << curr << " implies " << dis << endl;
-
- dis->impliedBy(curr);
- }
+ if(handleUnateProp(curr, dis)){ return; }
}
}
Assert(scm_i == scm_curr);
//These should all be handled by propagating the UpperBounds!
if(vc.hasUpperBound()){
ConstraintP ub = vc.getUpperBound();
- if(ub->negationHasProof()){
- raiseUnateConflict(curr, ub);
- return;
- }else if(!ub->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unateProp " << curr << " implies " << ub << endl;
-
- ub->impliedBy(curr);
- }
+ if(handleUnateProp(curr, ub)){ return; }
}
if(vc.hasDisequality()){
ConstraintP dis = vc.getDisequality();
- if(dis->negationHasProof()){
- raiseUnateConflict(curr, dis);
- return;
- }else if(!dis->isTrue()){
- ++d_statistics.d_unatePropagateImplications;
- Debug("arith::unate") << "unateProp " << curr << " implies " << dis << endl;
- dis->impliedBy(curr);
- }
+ if(handleUnateProp(curr, dis)){ return; }
+ }
+ }
+}
+
+std::pair<int, int> Constraint::unateFarkasSigns(ConstraintCP ca, ConstraintCP cb){
+ ConstraintType a = ca->getType();
+ ConstraintType b = cb->getType();
+
+ Assert(a != Disequality);
+ Assert(b != Disequality);
+
+ int a_sgn = (a == LowerBound) ? -1 : ((a == UpperBound) ? 1 : 0);
+ int b_sgn = (b == LowerBound) ? -1 : ((b == UpperBound) ? 1 : 0);
+
+ if(a_sgn == 0 && b_sgn == 0){
+ Assert(a == Equality);
+ Assert(b == Equality);
+ Assert(ca->getValue() != cb->getValue());
+ if(ca->getValue() < cb->getValue()){
+ a_sgn = 1;
+ b_sgn = -1;
+ }else{
+ a_sgn = -1;
+ b_sgn = 1;
}
+ }else if(a_sgn == 0){
+ Assert(b_sgn != 0);
+ Assert(a == Equality);
+ a_sgn = -b_sgn;
+ }else if(b_sgn == 0){
+ Assert(a_sgn != 0);
+ Assert(b == Equality);
+ b_sgn = -a_sgn;
}
+ Assert(a_sgn != 0);
+ Assert(b_sgn != 0);
+
+ Debug("arith::unateFarkasSigns") << "Constraint::unateFarkasSigns("<<a <<", " << b << ") -> "
+ << "("<<a_sgn<<", "<< b_sgn <<")"<< endl;
+ return make_pair(a_sgn, b_sgn);
}
}/* CVC4::theory::arith namespace */
** the TheoryEngine.
**
** In addition, Constraints keep track of the following:
- ** - A Constrain that is the negation of the Constraint.
+ ** - A Constraint that is the negation of the Constraint.
** - An iterator into a set of Constraints for the ArithVar sorted by
** DeltaRational value.
** - A context dependent internal proof of the node that can be used for
** Internals:
** - Constraints are pointers to ConstraintValues.
** - Undefined Constraints are NullConstraint.
+
+ **
+ ** Assumption vs. Assertion:
+ ** - An assertion is anything on the theory d_fact queue.
+ ** This includes any thing propagated and returned to the fact queue.
+ ** These can be used in external conflicts and propagations of earlier proofs.
+ ** - An assumption is anything on the theory d_fact queue that has no further
+ ** explanation i.e. this theory did not propagate it.
+ ** - To set something an assumption, first set it as being as assertion.
+ ** - Internal assumptions have no explanations and must be regressed out of the proof.
**/
#include "cvc4_private.h"
#define __CVC4__THEORY__ARITH__CONSTRAINT_H
#include "expr/node.h"
+#include "proof/proof.h"
#include "context/context.h"
#include "context/cdlist.h"
namespace theory {
namespace arith {
+/**
+ * Logs the types of different proofs.
+ * Current, proof types:
+ * - NoAP : This constraint is not known to be true.
+ * - AssumeAP : This is an input assertion. There is no proof.
+ * : Something can be both asserted and have a proof.
+ * - InternalAssumeAP : An internal assumption. This has no guarantee of having an external proof.
+ * : This must be removed by regression.
+ * - FarkasAP : A proof with Farka's coefficients, i.e.
+ * : \sum lambda_i ( asNode(x_i) <= c_i ) |= 0 < 0
+ * : If proofs are on, coefficients will be logged.
+ * : If proofs are off, coefficients will not be logged.
+ * : A unate implication is a FarkasAP.
+ * - TrichotomyAP : This is any entailment using (x<= a and x >=a) => x = a
+ * : Equivalently, (x > a or x < a or x = a)
+ * : There are 3 candidate ways this can propagate:
+ * : !(x > a) and !(x = a) => x < a
+ * : !(x < a) and !(x = a) => x > a
+ * : !(x > a) and !(x < a) => x = a
+ * - EqualityEngineAP : This is propagated by the equality engine.
+ * : Consult this for the proof.
+ * - IntHoleAP : This is currently a catch-all for all integer specific reason.
+ */
+enum ArithProofType
+ { NoAP,
+ AssumeAP,
+ InternalAssumeAP,
+ FarkasAP,
+ TrichotomyAP,
+ EqualityEngineAP,
+ IntHoleAP};
+
/**
* The types of constraints.
* The convex constraints are the constraints are LowerBound, Equality,
typedef __gnu_cxx::hash_map<Node, ConstraintP, NodeHashFunction> NodetoConstraintMap;
-typedef size_t ProofId;
-static ProofId ProofIdSentinel = std::numeric_limits<ProofId>::max();
+typedef size_t ConstraintRuleID;
+static const ConstraintRuleID ConstraintRuleIdSentinel = std::numeric_limits<ConstraintRuleID>::max();
+
+typedef size_t AntecedentId;
+static const AntecedentId AntecedentIdSentinel = std::numeric_limits<AntecedentId>::max();
+
typedef size_t AssertionOrder;
-static AssertionOrder AssertionOrderSentinel = std::numeric_limits<AssertionOrder>::max();
+static const AssertionOrder AssertionOrderSentinel = std::numeric_limits<AssertionOrder>::max();
+
/**
* A ValueCollection binds together convex constraints that have the same
}
};
-class Constraint_ {
+
+/**
+ * If proofs are on, there is a vector of rationals for farkas coefficients.
+ * This is the owner of the memory for the vector, and calls delete upon cleanup.
+ *
+ */
+struct ConstraintRule {
+ ConstraintP d_constraint;
+ ArithProofType d_proofType;
+ AntecedentId d_antecedentEnd;
+
+ /**
+ * In this comment, we abbreviate ConstraintDatabase::d_antecedents
+ * and d_farkasCoefficients as ans and fc.
+ *
+ * This list is always empty if proofs are not enabled.
+ *
+ * If proofs are enabled, the proof of constraint c at p in ans[p] of length n is
+ * (NullConstraint, ans[p-(n-1)], ... , ans[p-1], ans[p])
+ *
+ * Farkas' proofs show a contradiction with the negation of c, c_not = c->getNegation().
+ *
+ * We treat the position for NullConstraint (p-n) as the position for the farkas
+ * coefficient for so we pretend c_not is ans[p-n].
+ * So this correlation for the constraints we are going to use:
+ * (c_not, ans[p-(n-1)], ... , ans[p-1], ans[p])
+ * With the coefficients at positions:
+ * (fc[0], fc[1)], ... fc[n])
+ *
+ * The index of the constraints in the proof are {i | i <= 0 <= n] } (with c_not being p-n).
+ * Partition the indices into L, U, and E, the lower bounds, the upper bounds and equalities.
+ *
+ * We standardize the proofs to be upper bound oriented following the convention:
+ * A x <= b
+ * with the proof witness of the form
+ * (lambda) Ax <= (lambda) b and lambda >= 0.
+ *
+ * To accomplish this cleanly, the fc coefficients must be negative for lower bounds.
+ * The signs of equalities can be either positive or negative.
+ *
+ * Thus the proof corresponds to (with multiplication over inequalities):
+ * \sum_{u in U} fc[u] ans[p-n+u] + \sum_{e in E} fc[e] ans[p-n+e]
+ * + \sum_{l in L} fc[l] ans[p-n+l]
+ * |= 0 < 0
+ * where fc[u] > 0, fc[l] < 0, and fc[e] != 0 (i.e. it can be either +/-).
+ *
+ * There is no requirement that the proof is minimal.
+ * We do however use all of the constraints by requiring non-zero coefficients.
+ */
+#ifdef CVC4_PROOF
+ RationalVectorCP d_farkasCoefficients;
+#endif
+ ConstraintRule()
+ : d_constraint(NullConstraint)
+ , d_proofType(NoAP)
+ , d_antecedentEnd(AntecedentIdSentinel)
+ {
+#ifdef CVC4_PROOF
+ d_farkasCoefficients = RationalVectorCPSentinel;
+#endif
+ }
+
+ ConstraintRule(ConstraintP con, ArithProofType pt)
+ : d_constraint(con)
+ , d_proofType(pt)
+ , d_antecedentEnd(AntecedentIdSentinel)
+ {
+#ifdef CVC4_PROOF
+ d_farkasCoefficients = RationalVectorCPSentinel;
+#endif
+ }
+ ConstraintRule(ConstraintP con, ArithProofType pt, AntecedentId antecedentEnd)
+ : d_constraint(con)
+ , d_proofType(pt)
+ , d_antecedentEnd(antecedentEnd)
+ {
+#ifdef CVC4_PROOF
+ d_farkasCoefficients = RationalVectorCPSentinel;
+#endif
+ }
+
+ ConstraintRule(ConstraintP con, ArithProofType pt, AntecedentId antecedentEnd, RationalVectorCP coeffs)
+ : d_constraint(con)
+ , d_proofType(pt)
+ , d_antecedentEnd(antecedentEnd)
+ {
+ Assert(PROOF_ON() || coeffs == RationalVectorCPSentinel);
+#ifdef CVC4_PROOF
+ d_farkasCoefficients = coeffs;
+#endif
+ }
+
+ void print(std::ostream& out) const;
+ void debugPrint() const;
+}; /* class ConstraintRule */
+
+class Constraint {
private:
/** The ArithVar associated with the constraint. */
const ArithVar d_variable;
const DeltaRational d_value;
/** A pointer to the associated database for the Constraint. */
- ConstraintDatabase * d_database;
+ ConstraintDatabase* d_database;
/**
* The node to be communicated with the TheoryEngine.
TNode d_witness;
/**
- * This points at the proof for the constraint in the current context.
+ * The position of the constraint in the constraint rule id.
*
* Sat Context Dependent.
- * This is initially ProofIdSentinel.
+ * This is initially
*/
- ProofId d_proof;
+ ConstraintRuleID d_crid;
+
/**
* True if the equality has been split.
* Because of circular dependencies a Constraint is not fully valid until
* initialize has been called on it.
*/
- Constraint_(ArithVar x, ConstraintType t, const DeltaRational& v);
+ Constraint(ArithVar x, ConstraintType t, const DeltaRational& v);
/**
* Destructor for a constraint.
* This should only be called if safeToGarbageCollect() is true.
*/
- ~Constraint_();
+ ~Constraint();
+ /** Returns true if the constraint has been initialized. */
bool initialized() const;
/**
*/
void initialize(ConstraintDatabase* db, SortedConstraintMapIterator v, ConstraintP negation);
- class ProofCleanup {
+
+ class ConstraintRuleCleanup {
public:
- inline void operator()(ConstraintP* p){
- ConstraintP constraint = *p;
- Assert(constraint->d_proof != ProofIdSentinel);
- constraint->d_proof = ProofIdSentinel;
+ inline void operator()(ConstraintRule* crp){
+ Assert(crp != NULL);
+ ConstraintP constraint = crp->d_constraint;
+ Assert(constraint->d_crid != ConstraintRuleIdSentinel);
+ constraint->d_crid = ConstraintRuleIdSentinel;
+
+ PROOF(if(crp->d_farkasCoefficients != RationalVectorCPSentinel){
+ delete crp->d_farkasCoefficients;
+ });
}
};
}
};
- /** Returns true if the node is safe to garbage collect. */
+ /**
+ * Returns true if the node is safe to garbage collect.
+ * Both it and its negation must have no context dependent data set.
+ */
bool safeToGarbageCollect() const;
+ /**
+ * Returns true if the constraint has no context dependent data set.
+ */
+ bool contextDependentDataIsSet() const;
+
/**
* Returns true if the node correctly corresponds to the constraint that is
* being set.
/** Returns a reference to the map for d_variable. */
SortedConstraintMap& constraintSet() const;
+ /** Returns coefficients for the proofs for farkas cancellation. */
+ static std::pair<int, int> unateFarkasSigns(ConstraintCP a, ConstraintCP b);
+
+
public:
- ConstraintType getType() const {
+ inline ConstraintType getType() const {
return d_type;
}
- ArithVar getVariable() const {
+ inline ArithVar getVariable() const {
return d_variable;
}
return d_value;
}
- ConstraintP getNegation() const {
+ inline ConstraintP getNegation() const {
return d_negation;
}
return d_assertionOrder < time;
}
- /** Sets the witness literal for a node being on the assertion stack.
- * The negation of the node cannot be true. */
- void setAssertedToTheTheory(TNode witness);
-
- /** Sets the witness literal for a node being on the assertion stack.
- * The negation of the node must be true!
- * This is for conflict generation specificially! */
- void setAssertedToTheTheoryWithNegationTrue(TNode witness);
+ /**
+ * Sets the witness literal for a node being on the assertion stack.
+ *
+ * If the negation of the node is true, inConflict must be true.
+ * If the negation of the node is false, inConflict must be false.
+ * Hence, negationHasProof() == inConflict.
+ *
+ * This replaces:
+ * void setAssertedToTheTheory(TNode witness);
+ * void setAssertedToTheTheoryWithNegationTrue(TNode witness);
+ */
+ void setAssertedToTheTheory(TNode witness, bool inConflict);
bool hasLiteral() const {
return !d_literal.isNull();
}
/**
- * Set the node as selfExplaining().
+ * Set the node as having a proof and being an assumption.
* The node must be assertedToTheTheory().
+ *
+ * Precondition: negationHasProof() == inConflict.
+ *
+ * Replaces:
+ * selfExplaining().
+ * selfExplainingWithNegationTrue().
*/
- void selfExplaining();
-
- void selfExplainingWithNegationTrue();
+ void setAssumption(bool inConflict);
- /** Returns true if the node is selfExplaining.*/
- bool isSelfExplaining() const;
+ /** Returns true if the node is an assumption.*/
+ bool isAssumption() const;
- /**
- * Set the constraint to be a EqualityEngine proof.
- */
+ /** Set the constraint to have an EqualityEngine proof. */
void setEqualityEngineProof();
bool hasEqualityEngineProof() const;
+ /** Returns true if the node has a Farkas' proof. */
+ bool hasFarkasProof() const;
+
+ /** Returns true if the node has a int hole proof. */
+ bool hasIntHoleProof() const;
+
+ /** Returns true if the node has a trichotomy proof. */
+ bool hasTrichotomyProof() const;
/**
- * A sets the constraint to be an internal decision.
+ * A sets the constraint to be an internal assumption.
*
* This does not need to have a witness or an associated literal.
* This is always itself in the explanation fringe for both conflicts
* This cannot be converted back into a Node conflict or explanation.
*
* This cannot have a proof or be asserted to the theory!
+ *
*/
- void setInternalDecision();
- bool isInternalDecision() const;
+ void setInternalAssumption(bool inConflict);
+ bool isInternalAssumption() const;
/**
* Returns a explanation of the constraint that is appropriate for conflicts.
/* Equivalent to calling externalExplainByAssertions on all constraints in b */
static Node externalExplainByAssertions(const ConstraintCPVec& b);
- /* utilities for calling externalExplainByAssertions on 2 constraints */
static Node externalExplainByAssertions(ConstraintCP a, ConstraintCP b);
static Node externalExplainByAssertions(ConstraintCP a, ConstraintCP b, ConstraintCP c);
- //static Node externalExplainByAssertions(ConstraintCP a);
/**
* This is the minimum fringe of the implication tree s.t. every constraint is
static void assertionFringe(ConstraintCPVec& v);
static void assertionFringe(ConstraintCPVec& out, const ConstraintCPVec& in);
- /** Utility function built from explainForConflict. */
- //static Node explainConflict(ConstraintP a, ConstraintP b);
- //static Node explainConflict(ConstraintP a, ConstraintP b, Constraint c);
-
- //static Node explainConflictForEE(ConstraintCP a, ConstraintCP b);
- //static Node explainConflictForEE(ConstraintCP a);
- //static Node explainConflictForDio(ConstraintCP a);
- //static Node explainConflictForDio(ConstraintCP a, ConstraintCP b);
-
+ /** The fringe of a farkas' proof. */
bool onFringe() const {
- return assertedToTheTheory() || isInternalDecision() || hasEqualityEngineProof();
+ return assertedToTheTheory() || isInternalAssumption() || hasEqualityEngineProof();
}
/**
* Returns an explanation of a propagation by the ConstraintDatabase.
* The constraint must have a proof.
- * The constraint cannot be selfExplaining().
+ * The constraint cannot be an assumption.
*
* This is the minimum fringe of the implication tree (excluding the constraint itself)
* s.t. every constraint is assertedToTheTheory() or hasEqualityEngineProof().
*/
Node externalExplainForPropagation() const {
Assert(hasProof());
- Assert(!isSelfExplaining());
+ Assert(!isAssumption());
+ Assert(!isInternalAssumption());
return externalExplain(d_assertionOrder);
}
- // void externalExplainForPropagation(NodeBuilder<>& nb) const{
- // Assert(hasProof());
- // Assert(!isSelfExplaining());
- // externalExplain(nb, d_assertionOrder);
- // }
+ /**
+ * Explain the constraint and its negation in terms of assertions.
+ * The constraint must be in conflict.
+ */
+ Node externalExplainConflict() const;
private:
Node externalExplain(AssertionOrder order) const;
static Node externalExplain(const ConstraintCPVec& b, AssertionOrder order);
public:
- bool hasProof() const {
- return d_proof != ProofIdSentinel;
+
+ /** The constraint is known to be true. */
+ inline bool hasProof() const {
+ return d_crid != ConstraintRuleIdSentinel;
}
- bool negationHasProof() const {
+
+ /** The negation of the constraint is known to hold. */
+ inline bool negationHasProof() const {
return d_negation->hasProof();
}
- /* Neither the contraint has a proof nor the negation has a proof.*/
+ /** Neither the contraint has a proof nor the negation has a proof.*/
bool truthIsUnknown() const {
return !hasProof() && !negationHasProof();
}
- /* This is a synonym for hasProof(). */
- bool isTrue() const {
+ /** This is a synonym for hasProof(). */
+ inline bool isTrue() const {
return hasProof();
}
+ /** Both the constraint and its negation are true. */
+ inline bool inConflict() const {
+ return hasProof() && negationHasProof();
+ }
+
/**
* Returns the constraint that corresponds to taking
* x r ceiling(getValue()) where r is the node's getType().
ConstraintP getStrictlyWeakerLowerBound(bool hasLiteral, bool mustBeAsserted) const;
/**
- * Marks the node as having a proof a.
- * Adds the node the database's propagation queue.
+ * Marks a the constraint c as being entailed by a.
+ * The Farkas proof 1*(a) + -1 (c) |= 0<0
+ *
+ * After calling impliedByUnate(), the caller should either raise a conflict
+ * or try call tryToPropagate().
+ */
+ void impliedByUnate(ConstraintCP a, bool inConflict);
+
+ /**
+ * Marks a the constraint c as being entailed by a.
+ * The reason has to do with integer rounding.
+ *
+ * After calling impliedByIntHole(), the caller should either raise a conflict
+ * or try call tryToPropagate().
+ */
+ void impliedByIntHole(ConstraintCP a, bool inConflict);
+
+ /**
+ * Marks a the constraint c as being entailed by a.
+ * The reason has to do with integer rounding.
+ *
+ * After calling impliedByIntHole(), the caller should either raise a conflict
+ * or try call tryToPropagate().
+ */
+ void impliedByIntHole(const ConstraintCPVec& b, bool inConflict);
+
+ /**
+ * This is a lemma of the form:
+ * x < d or x = d or x > d
+ * The current constraint c is one of the above constraints and {a,b}
+ * are the negation of the other two constraints.
*
* Preconditions:
- * canBePropagated()
- * !assertedToTheTheory()
+ * - negationHasProof() == inConflict.
+ *
+ * After calling impliedByTrichotomy(), the caller should either raise a conflict
+ * or try call tryToPropagate().
*/
- void propagate(ConstraintCP a);
- void propagate(ConstraintCP a, ConstraintCP b);
- //void propagate(const std::vector<Constraint>& b);
- void propagate(const ConstraintCPVec& b);
+ void impliedByTrichotomy(ConstraintCP a, ConstraintCP b, bool inConflict);
/**
- * The only restriction is that this is not known be true.
- * This propagates if there is a node.
+ * Marks the node as having a Farkas proof.
+ *
+ * Preconditions:
+ * - coeffs == NULL if proofs are off.
+ * - See the comments for ConstraintRule for the form of coeffs when
+ * proofs are on.
+ * - negationHasProof() == inConflict.
+ *
+ * After calling impliedByFarkas(), the caller should either raise a conflict
+ * or try call tryToPropagate().
*/
- void impliedBy(ConstraintCP a);
- void impliedBy(ConstraintCP a, ConstraintCP b);
- //void impliedBy(const std::vector<Constraint>& b);
- void impliedBy(const ConstraintCPVec& b);
+ void impliedByFarkas(const ConstraintCPVec& b, RationalVectorCP coeffs, bool inConflict);
+ /**
+ * Generates an implication node, B => getLiteral(),
+ * where B is the result of externalExplainByAssertions(b).
+ * Does not guarantee b is the explanation of the constraint.
+ */
Node externalImplication(const ConstraintCPVec& b) const;
- static Node externalConjunction(const ConstraintCPVec& b);
- //static Node makeConflictNode(const ConstraintCPVec& b);
-
- /** The node must have a proof already and be eligible for propagation! */
- void propagate();
+ /**
+ * Returns true if the variable is assigned the value dr,
+ * the constraint would be satisfied.
+ */
bool satisfiedBy(const DeltaRational& dr) const;
-private:
/**
- * Marks the node as having a proof and being selfExplaining.
- * Neither the node nor its negation can have a proof.
- * This is internal!
+ * The node must have a proof already and be eligible for propagation!
+ * You probably want to call tryToPropagate() instead.
+ *
+ * Preconditions:
+ * - hasProof()
+ * - canBePropagated()
+ * - !assertedToTheTheory()
*/
- void markAsTrue();
+ void propagate();
+
/**
- * Marks the node as having a proof a.
- * This is safe if the node does not have
+ * If the constraint
+ * canBePropagated() and
+ * !assertedToTheTheory(),
+ * the constraint is added to the database's propagation queue.
+ *
+ * Precondition:
+ * - hasProof()
*/
- void markAsTrue(ConstraintCP a);
+ void tryToPropagate();
+
+ /**
+ * Returns a reference to the containing database.
+ * Precondition: the constraint must be initialized.
+ */
+ const ConstraintDatabase& getDatabase() const;
+
+private:
+
+ /** Returns the constraint rule at the position. */
+ const ConstraintRule& getConstraintRule() const;
+
+ inline ArithProofType getProofType() const {
+ return getConstraintRule().d_proofType;
+ }
- void markAsTrue(ConstraintCP a, ConstraintCP b);
- //void markAsTrue(const std::vector<Constraint>& b);
- void markAsTrue(const ConstraintCPVec& b);
+ inline AntecedentId getEndAntecedent() const {
+ return getConstraintRule().d_antecedentEnd;
+ }
+ inline RationalVectorCP getFarkasCoefficients() const {
+ return NULLPROOF(getConstraintRule().d_farkasCoefficients);
+ }
+
void debugPrint() const;
/**
* isSelfExplaining() or
* hasEqualityEngineProof()
*/
- bool proofIsEmpty() const;
+ bool antecentListIsEmpty() const;
+
+ bool antecedentListLengthIsOne() const;
+
+ /** Return true if every element in b has a proof. */
+ static bool allHaveProof(const ConstraintCPVec& b);
+ /** Precondition: hasFarkasProof() */
+ bool wellFormedFarkasProof() const;
+
}; /* class ConstraintValue */
-std::ostream& operator<<(std::ostream& o, const Constraint_& c);
+std::ostream& operator<<(std::ostream& o, const Constraint& c);
std::ostream& operator<<(std::ostream& o, const ConstraintP c);
+std::ostream& operator<<(std::ostream& o, const ConstraintCP c);
std::ostream& operator<<(std::ostream& o, const ConstraintType t);
std::ostream& operator<<(std::ostream& o, const ValueCollection& c);
std::ostream& operator<<(std::ostream& o, const ConstraintCPVec& v);
+std::ostream& operator<<(std::ostream& o, const ArithProofType);
class ConstraintDatabase {
context::CDQueue<ConstraintCP> d_toPropagate;
/**
- * Proof Lists.
- * Proofs are lists of valid constraints terminated by the first smaller
+ * Proofs are lists of valid constraints terminated by the first null
* sentinel value in the proof list.
- * The proof at p in d_proofs[p] of length n is
- * (NullConstraint, d_proofs[p-(n-1)], ... , d_proofs[p-1], d_proofs[p])
+ * We abbreviate d_antecedents as ans in the comment.
+ *
+ * The proof at p in ans[p] of length n is
+ * (NullConstraint, ans[p-(n-1)], ... , ans[p-1], ans[p])
+ *
* The proof at p corresponds to the conjunction:
* (and x_i)
*
* So the proof of a Constraint c corresponds to the horn clause:
* (implies (and x_i) c)
- * where (and x_i) is the proof at c.d_proofs.
+ * where (and x_i) is the proof at c.d_crid d_antecedentEnd.
*
- * Constraints are pointers so this list is designed not to require any
- * destruction.
+ * Constraints are pointers so this list is designed not to require any destruction.
*/
- CDConstraintList d_proofs;
+ CDConstraintList d_antecedents;
- /**
- * This is a special proof for marking that nodes are their own explanation
- * from the perspective of the theory.
- * These must always be asserted to the theory.
- *
- * This proof is always a member of the list.
- */
- ProofId d_selfExplainingProof;
-
- /**
- * Marks a node as being proved by the equality engine.
- * The equality engine will be asked for the explanation of such nodes.
- *
- * This is a special proof that is always a member of the list.
- */
- ProofId d_equalityEngineProof;
+ typedef context::CDList<ConstraintRule, Constraint::ConstraintRuleCleanup> ConstraintRuleList;
+ typedef context::CDList<ConstraintP, Constraint::CanBePropagatedCleanup> CBPList;
+ typedef context::CDList<ConstraintP, Constraint::AssertionOrderCleanup> AOList;
+ typedef context::CDList<ConstraintP, Constraint::SplitCleanup> SplitList;
- /**
- * Marks a constraint as being proved by making an internal
- * decision. Such nodes cannot be used in external explanations
- * but can be used internally.
- */
- ProofId d_internalDecisionProof;
- typedef context::CDList<ConstraintP, Constraint_::ProofCleanup> ProofCleanupList;
- typedef context::CDList<ConstraintP, Constraint_::CanBePropagatedCleanup> CBPList;
- typedef context::CDList<ConstraintP, Constraint_::AssertionOrderCleanup> AOList;
- typedef context::CDList<ConstraintP, Constraint_::SplitCleanup> SplitList;
/**
* The watch lists are collected together as they need to be garbage collected
* carefully.
*/
struct Watches{
+
/**
- * Contains the exact list of atoms that have a proof.
+ * Contains the exact list of constraints that have a proof.
+ * Upon pop, this unsets d_crid to NoAP.
+ *
+ * The index in this list is the proper ordering of the proofs.
*/
- ProofCleanupList d_proofWatches;
-
+ ConstraintRuleList d_constraintProofs;
+
+
/**
* Contains the exact list of constraints that can be used for propagation.
*/
void pushSplitWatch(ConstraintP c);
void pushCanBePropagatedWatch(ConstraintP c);
void pushAssertionOrderWatch(ConstraintP c, TNode witness);
- void pushProofWatch(ConstraintP c, ProofId pid);
+
+ /** Assumes that antecedents have already been pushed. */
+ void pushConstraintRule(const ConstraintRule& crp);
/** Returns true if all of the entries of the vector are empty. */
static bool emptyDatabase(const std::vector<PerVariableDatabase>& vec);
ArithCongruenceManager& d_congruenceManager;
const context::Context * const d_satContext;
- const int d_satAllocationLevel;
RaiseConflict d_raiseConflict;
- friend class Constraint_;
+ const Rational d_one;
+ const Rational d_negOne;
+
+ friend class Constraint;
+
public:
ConstraintDatabase( context::Context* satContext,
void unatePropUpperBound(ConstraintP curr, ConstraintP prev);
void unatePropEquality(ConstraintP curr, ConstraintP prevLB, ConstraintP prevUB);
+ /** AntecendentID must be in range. */
+ ConstraintCP getAntecedent(AntecedentId p) const;
+
private:
- void raiseUnateConflict(ConstraintP ant, ConstraintP cons);
+ /** returns true if cons is now in conflict. */
+ bool handleUnateProp(ConstraintP ant, ConstraintP cons);
DenseSet d_reclaimable;
namespace theory {
namespace arith {
-class Constraint_;
-typedef Constraint_* ConstraintP;
-typedef const Constraint_* ConstraintCP;
+class Constraint;
+typedef Constraint* ConstraintP;
+typedef const Constraint* ConstraintCP;
-const ConstraintP NullConstraint = NULL;
+static const ConstraintP NullConstraint = NULL;
class ConstraintDatabase;
typedef std::vector<ConstraintCP> ConstraintCPVec;
+typedef std::vector<Rational> RationalVector;
+typedef RationalVector* RationalVectorP;
+typedef const RationalVector* RationalVectorCP;
+static const RationalVectorCP RationalVectorCPSentinel = NULL;
+static const RationalVectorP RationalVectorPSentinel = NULL;
+
}/* CVC4::theory::arith namespace */
}/* CVC4::theory namespace */
}/* CVC4 namespace */
return;
}
+
+
uint32_t length = sp.maxLength();
if(length > d_maxInputCoefficientLength){
d_maxInputCoefficientLength = length;
//Variable proofVariable(makeIntegerVariable());
TrailIndex posInTrail = d_trail.size();
- d_trail.push_back(Constraint(sp,Polynomial(Monomial(VarList(proofVariable)))));
+ Debug("dio::pushInputConstraint") << "pushInputConstraint @ " << posInTrail
+ << " " << eq.getNode()
+ << " " << reason << endl;
+ d_trail.push_back(Constraint(sp,Polynomial::mkPolynomial(proofVariable)));
size_t posInConstraintList = d_inputConstraints.size();
d_inputConstraints.push_back(InputConstraint(reason, posInTrail));
d_basicVariableUpdates(f),
d_increasing(1),
d_decreasing(-1),
+ d_upperBoundDifference(),
+ d_lowerBoundDifference(),
+ d_one(1),
+ d_negOne(-1),
d_btracking(boundsTracking),
d_areTracking(false),
d_trackCallback(this)
RowIndex ridx = d_tableau.basicToRowIndex(basic);
ConstraintCPVec bounds;
- propagateRow(bounds, ridx, upperBound, c);
- c->impliedBy(bounds);
+ RationalVectorP coeffs = NULLPROOF(new RationalVector());
+ propagateRow(bounds, ridx, upperBound, c, coeffs);
+ c->impliedByFarkas(bounds, coeffs, false);
+ c->tryToPropagate();
+
+ if(coeffs != RationalVectorPSentinel) { delete coeffs; }
}
-void LinearEqualityModule::propagateRow(ConstraintCPVec& into, RowIndex ridx, bool rowUp, ConstraintP c){
+/* An explanation of the farkas coefficients.
+ *
+ * We are proving c using the other variables on the row.
+ * The proof is in terms of the other constraints and the negation of c, ~c.
+ *
+ * A row has the form:
+ * sum a_i * x_i = 0
+ * or
+ * sx + sum r y + sum q z = 0
+ * where r > 0 and q < 0.
+ *
+ * If rowUp, we are proving c
+ * g = sum r u_y + sum q l_z
+ * and c is entailed by -sx <= g
+ * If !rowUp, we are proving c
+ * g = sum r l_y + sum q u_z
+ * and c is entailed by -sx >= g
+ *
+ * | s | c | ~c | u_i | l_i
+ * if rowUp | s > 0 | x >= -g/s | x < -g/s | a_i > 0 | a_i < 0
+ * if rowUp | s < 0 | x <= -g/s | x > -g/s | a_i > 0 | a_i < 0
+ * if !rowUp | s > 0 | x <= -g/s | x > -g/s | a_i < 0 | a_i > 0
+ * if !rowUp | s < 0 | x >= -g/s | x < -g/s | a_i < 0 | a_i > 0
+ *
+ *
+ * Thus we treat !rowUp as multiplying the row by -1 and rowUp as 1
+ * for the entire row.
+ */
+void LinearEqualityModule::propagateRow(ConstraintCPVec& into, RowIndex ridx, bool rowUp, ConstraintP c, RationalVectorP farkas){
Assert(!c->assertedToTheTheory());
Assert(c->canBePropagated());
Assert(!c->hasProof());
+ if(farkas != RationalVectorPSentinel){
+ Assert(farkas->empty());
+ farkas->push_back(Rational(0));
+ }
+
ArithVar v = c->getVariable();
- Debug("arith::explainNonbasics") << "LinearEqualityModule::explainNonbasics("
- << v <<") start" << endl;
+ Debug("arith::propagateRow") << "LinearEqualityModule::propagateRow("
+ << ridx << ", " << rowUp << ", " << v << ") start" << endl;
+
+ const Rational& multiple = rowUp ? d_one : d_negOne;
+ Debug("arith::propagateRow") << "multiple: " << multiple << endl;
+
Tableau::RowIterator iter = d_tableau.ridRowIterator(ridx);
for(; !iter.atEnd(); ++iter){
const Tableau::Entry& entry = *iter;
ArithVar nonbasic = entry.getColVar();
- if(nonbasic == v){ continue; }
-
const Rational& a_ij = entry.getCoefficient();
-
int sgn = a_ij.sgn();
Assert(sgn != 0);
bool selectUb = rowUp ? (sgn > 0) : (sgn < 0);
- ConstraintCP bound = selectUb
- ? d_variables.getUpperBoundConstraint(nonbasic)
- : d_variables.getLowerBoundConstraint(nonbasic);
- Assert(bound != NullConstraint);
- Debug("arith::explainNonbasics") << "explainNonbasics" << bound << " for " << c << endl;
- into.push_back(bound);
+ Assert( nonbasic != v ||
+ ( rowUp && a_ij.sgn() > 0 && c->isLowerBound()) ||
+ ( rowUp && a_ij.sgn() < 0 && c->isUpperBound()) ||
+ (!rowUp && a_ij.sgn() > 0 && c->isUpperBound()) ||
+ (!rowUp && a_ij.sgn() < 0 && c->isLowerBound())
+ );
+
+ if(Debug.isOn("arith::propagateRow")){
+ if(nonbasic == v){
+ Debug("arith::propagateRow") << "(target) "
+ << rowUp << " "
+ << a_ij.sgn() << " "
+ << c->isLowerBound() << " "
+ << c->isUpperBound() << endl;
+
+ Debug("arith::propagateRow") << "(target) ";
+ }
+ Debug("arith::propagateRow") << "propagateRow " << a_ij << " * " << nonbasic ;
+ }
+
+ if(nonbasic == v){
+ if(farkas != RationalVectorPSentinel){
+ Assert(farkas->front().isZero());
+ Rational multAij = multiple * a_ij;
+ Debug("arith::propagateRow") << "("<<multAij<<") ";
+ farkas->front() = multAij;
+ }
+
+ Debug("arith::propagateRow") << c << endl;
+ }else{
+
+ ConstraintCP bound = selectUb
+ ? d_variables.getUpperBoundConstraint(nonbasic)
+ : d_variables.getLowerBoundConstraint(nonbasic);
+
+ if(farkas != RationalVectorPSentinel){
+ Rational multAij = multiple * a_ij;
+ Debug("arith::propagateRow") << "("<<multAij<<") ";
+ farkas->push_back(multAij);
+ }
+ Assert(bound != NullConstraint);
+ Debug("arith::propagateRow") << bound << endl;
+ into.push_back(bound);
+ }
}
- Debug("arith::explainNonbasics") << "LinearEqualityModule::explainNonbasics("
- << v << ") done" << endl;
+ Debug("arith::propagateRow") << "LinearEqualityModule::propagateRow("
+ << ridx << ", " << rowUp << ", " << v << ") done" << endl;
+
}
ConstraintP LinearEqualityModule::weakestExplanation(bool aboveUpper, DeltaRational& surplus, ArithVar v, const Rational& coeff, bool& anyWeakening, ArithVar basic) const {
anyWeakening = true;
surplus = surplus - diff;
- Debug("weak") << "found:" << endl;
+ Debug("arith::weak") << "found:" << endl;
if(v == basic){
- Debug("weak") << " basic: ";
+ Debug("arith::weak") << " basic: ";
}
- Debug("weak") << " " << surplus << " "<< diff << endl
- << " " << bound << c << endl
- << " " << weakerBound << weaker << endl;
+ Debug("arith::weak") << " " << surplus << " "<< diff << endl
+ << " " << bound << c << endl
+ << " " << weakerBound << weaker << endl;
Assert(diff.sgn() > 0);
c = weaker;
return c;
}
-void LinearEqualityModule::minimallyWeakConflict(bool aboveUpper, ArithVar basicVar, RaiseConflict& rc) const {
+/* An explanation of the farkas coefficients.
+ *
+ * We are proving a conflict on the basic variable x_b.
+ * If aboveUpper, then the conflict is with the constraint c : x_b <= u_b.
+ * If !aboveUpper, then the conflict is with the constraint c : x_b >= l_b.
+ *
+ * A row has the form:
+ * -x_b sum a_i * x_i = 0
+ * or
+ * -x_b + sum r y + sum q z = 0,
+ * x_b = sum r y + sum q z
+ * where r > 0 and q < 0.
+ *
+ *
+ * If !aboveUp, we are proving ~c: x_b < l_b
+ * g = sum r u_y + sum q l_z
+ * x_b <= g < l_b
+ * and ~c is entailed by x_b <= g
+ *
+ * If aboveUp, we are proving ~c : x_b > u_b
+ * g = sum r l_y + sum q u_z
+ * x_b >= g > u_b
+ * and ~c is entailed by x_b >= g
+ *
+ *
+ * | s | c | ~c | u_i | l_i
+ * if !aboveUp | s > 0 | x >= -g/s | x < -g/s | a_i > 0 | a_i < 0
+ * if !aboveUp | s < 0 | x <= -g/s | x > -g/s | a_i > 0 | a_i < 0
+ * if aboveUp | s > 0 | x <= -g/s | x > -g/s | a_i < 0 | a_i > 0
+ * if aboveUp | s < 0 | x >= -g/s | x < -g/s | a_i < 0 | a_i > 0
+ *
+ * Thus we treat aboveUp as multiplying the row by -1 and !aboveUp as 1
+ * for the entire row.
+ */
+ConstraintCP LinearEqualityModule::minimallyWeakConflict(bool aboveUpper, ArithVar basicVar, FarkasConflictBuilder& fcs) const {
+ Assert(!fcs.underConstruction());
TimerStat::CodeTimer codeTimer(d_statistics.d_weakenTime);
+ Debug("arith::weak") << "LinearEqualityModule::minimallyWeakConflict("
+ << aboveUpper <<", "<< basicVar << ", ...) start" << endl;
+
+ const Rational& adjustSgn = aboveUpper ? d_negOne : d_one;
const DeltaRational& assignment = d_variables.getAssignment(basicVar);
DeltaRational surplus;
if(aboveUpper){
Assert(assignment < d_variables.getLowerBound(basicVar));
surplus = d_variables.getLowerBound(basicVar) - assignment;
}
-
+
bool anyWeakenings = false;
for(Tableau::RowIterator i = d_tableau.basicRowIterator(basicVar); !i.atEnd(); ++i){
const Tableau::Entry& entry = *i;
const Rational& coeff = entry.getCoefficient();
bool weakening = false;
ConstraintP c = weakestExplanation(aboveUpper, surplus, v, coeff, weakening, basicVar);
- Debug("weak") << "weak : " << weakening << " "
- << c->assertedToTheTheory() << " "
- << d_variables.getAssignment(v) << " "
- << c << endl;
+ Debug("arith::weak") << "weak : " << weakening << " "
+ << c->assertedToTheTheory() << " "
+ << d_variables.getAssignment(v) << " "
+ << c << endl;
anyWeakenings = anyWeakenings || weakening;
- rc.addConstraint(c);
+ fcs.addConstraint(c, coeff, adjustSgn);
+ if(basicVar == v){
+ Assert(! c->negationHasProof() );
+ fcs.makeLastConsequent();
+ }
}
+ Assert(fcs.consequentIsSet());
+
+ ConstraintCP conflicted = fcs.commitConflict();
+
++d_statistics.d_weakeningAttempts;
if(anyWeakenings){
++d_statistics.d_weakeningSuccesses;
}
+ Debug("arith::weak") << "LinearEqualityModule::minimallyWeakConflict("
+ << aboveUpper <<", "<< basicVar << ", ...) done" << endl;
+ return conflicted;
}
ArithVar LinearEqualityModule::minVarOrder(ArithVar x, ArithVar y) const {
typedef bool (LinearEqualityModule::*UpdatePreferenceFunction)(const UpdateInfo&, const UpdateInfo&) const;
+
private:
/**
* Manages information about the assignment and upper and lower bounds on the
Maybe<DeltaRational> d_upperBoundDifference;
Maybe<DeltaRational> d_lowerBoundDifference;
+ Rational d_one;
+ Rational d_negOne;
public:
/**
void propagateBasicFromRow(ConstraintP c);
/**
+ * Let v be the variable for the constraint c.
* Exports either the explanation of an upperbound or a lower bound
- * of the basic variable basic, using the non-basic variables in the row.
+ * of v using the other variables in the row.
+ *
+ * If farkas != RationalVectorPSentinel, this function additionally
+ * stores the farkas coefficients of the constraints stored in into.
+ * Position 0 is the coefficient of v.
+ * Position i > 0, corresponds to the order of the other constraints.
*/
- void propagateRow(ConstraintCPVec& into, RowIndex ridx, bool rowUp, ConstraintP c);
+ void propagateRow(ConstraintCPVec& into
+ , RowIndex ridx
+ , bool rowUp
+ , ConstraintP c
+ , RationalVectorP farkas);
/**
* Computes the value of a basic variable using the assignments
public:
/**
* Constructs a minimally weak conflict for the basic variable basicVar.
+ *
+ * Returns a constraint that is now in conflict.
*/
- void minimallyWeakConflict(bool aboveUpper, ArithVar basicVar, RaiseConflict& rc) const;
+ ConstraintCP minimallyWeakConflict(bool aboveUpper, ArithVar basicVar, FarkasConflictBuilder& rc) const;
/**
- * Given a non-basic variable that is know to have a conflict on it,
+ * Given a basic variable that is know to have a conflict on it,
* construct and return a conflict.
* Follows section 4.2 in the CAV06 paper.
*/
- inline void generateConflictAboveUpperBound(ArithVar conflictVar, RaiseConflict& rc) const {
- minimallyWeakConflict(true, conflictVar, rc);
+ inline ConstraintCP generateConflictAboveUpperBound(ArithVar conflictVar, FarkasConflictBuilder& rc) const {
+ return minimallyWeakConflict(true, conflictVar, rc);
}
- inline void generateConflictBelowLowerBound(ArithVar conflictVar, RaiseConflict& rc) const {
- minimallyWeakConflict(false, conflictVar, rc);
+ inline ConstraintCP generateConflictBelowLowerBound(ArithVar conflictVar, FarkasConflictBuilder& rc) const {
+ return minimallyWeakConflict(false, conflictVar, rc);
}
/**
return Monomial(c, vl);
}
}
+
+Monomial Monomial::mkMonomial(const VarList& vl) {
+ // acts like Monomial::mkMonomial( 1, vl)
+ if( vl.empty() ) {
+ return Monomial::mkOne();
+ } else if(true){
+ return Monomial(vl);
+ }
+}
+
Monomial Monomial::parseMonomial(Node n) {
if(n.getKind() == kind::CONST_RATIONAL) {
return Monomial(Constant(n));
return result;
}
+Polynomial Polynomial::exactDivide(const Integer& z) const {
+ Assert(isIntegral());
+ if(z.isOne()){
+ return (*this);
+ }else {
+ Constant invz = Constant::mkConstant(Rational(1,z));
+ Polynomial prod = (*this) * Monomial::mkMonomial(invz);
+ Assert(prod.isIntegral());
+ return prod;
+ }
+}
Polynomial Polynomial::sumPolynomials(const std::vector<Polynomial>& ps){
if(ps.empty()){
Constant c = Constant::mkConstant((*ci).second);
Node n = (*ci).first;
VarList vl = VarList::parseVarList(n);
- if(vl.empty()){
- monos.push_back(Monomial(c));
- }else{
- monos.push_back(Monomial(c, vl));
- }
+ monos.push_back(Monomial::mkMonomial(c, vl));
}
}
Monomial::sort(monos);
Constant coeffInv = -(minimalVList.getConstant().inverse());
Polynomial newRight = (p - minimalVList) * coeffInv;
- Polynomial newLeft(minimalVList.getVarList());
+ Polynomial newLeft(Monomial::mkMonomial(minimalVList.getVarList()));
return toNode(kind::EQUAL, newLeft, newRight);
}
Monomial m = varPartMult.selectAbsMinimum();
bool mIsPositive = m.getConstant().isPositive();
- Polynomial noM = (varPartMult + (- m)) + Polynomial(constMult);
+ Polynomial noM = (varPartMult + (- m)) + Polynomial::mkPolynomial(constMult);
// m + noM = 0
Polynomial newRight = mIsPositive ? -noM : noM;
};/* class VarList */
+/** Constructors have side conditions. Use the static mkMonomial functions instead. */
class Monomial : public NodeWrapper {
private:
Constant constant;
n.getNumChildren() == 2;
}
-public:
-
Monomial(const Constant& c):
NodeWrapper(c.getNode()), constant(c), varList(VarList::mkEmptyVarList())
{ }
-
+
Monomial(const VarList& vl):
NodeWrapper(vl.getNode()), constant(Constant::mkConstant(1)), varList(vl)
{
Assert(multStructured(getNode()));
}
-
+public:
static bool isMember(TNode n);
/** Makes a monomial with no restrictions on c and vl. */
static Monomial mkMonomial(const Constant& c, const VarList& vl);
+ /** If vl is empty, this make one. */
+ static Monomial mkMonomial(const VarList& vl);
+
+ static Monomial mkMonomial(const Constant& c){
+ return Monomial(c);
+ }
+
static Monomial mkMonomial(const Variable& v){
return Monomial(VarList(v));
}
}
const Constant& getConstant() const { return constant; }
const VarList& getVarList() const { return varList; }
-
+
bool isConstant() const {
return varList.empty();
}
Assert( Monomial::isStrictlySorted(m) );
}
+ static Polynomial mkPolynomial(const Constant& c){
+ return Polynomial(Monomial::mkMonomial(c));
+ }
+
static Polynomial mkPolynomial(const Variable& v){
- return Monomial::mkMonomial(v);
+ return Polynomial(Monomial::mkMonomial(v));
}
static Polynomial mkPolynomial(const std::vector<Monomial>& m) {
*/
Integer gcd() const;
- Polynomial exactDivide(const Integer& z) const {
- Assert(isIntegral());
- Constant invz = Constant::mkConstant(Rational(1,z));
- Polynomial prod = (*this) * Monomial(invz);
- Assert(prod.isIntegral());
- return prod;
- }
+ /** z must divide all of the coefficients of the polynomial. */
+ Polynomial exactDivide(const Integer& z) const;
Polynomial operator+(const Polynomial& vl) const;
Polynomial operator-(const Polynomial& vl) const;
#include "theory/arith/options.h"
#include "theory/arith/constraint.h"
+
using namespace std;
namespace CVC4 {
namespace theory {
namespace arith {
+
SimplexDecisionProcedure::SimplexDecisionProcedure(LinearEqualityModule& linEq, ErrorSet& errors, RaiseConflict conflictChannel, TempVarMalloc tvmalloc)
: d_conflictVariables()
, d_linEq(linEq)
, d_errorSet(errors)
, d_numVariables(0)
, d_conflictChannel(conflictChannel)
+ , d_conflictBuilder(NULL)
, d_arithVarMalloc(tvmalloc)
, d_errorSize(0)
, d_zero(0)
+ , d_posOne(1)
+ , d_negOne(-1)
{
d_heuristicRule = options::arithErrorSelectionRule();
d_errorSet.setSelectionRule(d_heuristicRule);
+ d_conflictBuilder = new FarkasConflictBuilder();
}
+SimplexDecisionProcedure::~SimplexDecisionProcedure(){
+ delete d_conflictBuilder;
+}
+
+
bool SimplexDecisionProcedure::standardProcessSignals(TimerStat &timer, IntStat& conflicts) {
TimerStat::CodeTimer codeTimer(timer);
Assert( d_conflictVariables.empty() );
void SimplexDecisionProcedure::reportConflict(ArithVar basic){
Assert(!d_conflictVariables.isMember(basic));
Assert(checkBasicForConflict(basic));
- RaiseConflict rc( d_conflictChannel);
- generateConflictForBasic(basic, rc);
+ ConstraintCP conflicted = generateConflictForBasic(basic);
+ Assert(conflicted != NullConstraint);
+ d_conflictChannel.raiseConflict(conflicted);
- // static bool verbose = false;
- // if(verbose) { Message() << "conflict " << basic << " " << conflict << endl; }
- // Assert(!conflict.isNull());
- //d_conflictChannel(conflict);
- rc.commitConflict();
d_conflictVariables.add(basic);
}
-void SimplexDecisionProcedure::generateConflictForBasic(ArithVar basic, RaiseConflict& rc) const {
+ConstraintCP SimplexDecisionProcedure::generateConflictForBasic(ArithVar basic) const {
Assert(d_tableau.isBasic(basic));
Assert(checkBasicForConflict(basic));
if(d_variables.cmpAssignmentLowerBound(basic) < 0){
Assert(d_linEq.nonbasicsAtUpperBounds(basic));
- return d_linEq.generateConflictBelowLowerBound(basic, rc);
+ return d_linEq.generateConflictBelowLowerBound(basic, *d_conflictBuilder);
}else if(d_variables.cmpAssignmentUpperBound(basic) > 0){
Assert(d_linEq.nonbasicsAtLowerBounds(basic));
- return d_linEq.generateConflictAboveUpperBound(basic, rc);
+ return d_linEq.generateConflictAboveUpperBound(basic, *d_conflictBuilder);
}else{
Unreachable();
+ return NullConstraint;
}
}
bool SimplexDecisionProcedure::maybeGenerateConflictForBasic(ArithVar basic) const {
if(checkBasicForConflict(basic)){
- RaiseConflict rc(d_conflictChannel);
- generateConflictForBasic(basic, rc);
+ ConstraintCP conflicted = generateConflictForBasic(basic);
+ d_conflictChannel.raiseConflict(conflicted);
return true;
}else{
return false;
}
ArithVar SimplexDecisionProcedure::constructInfeasiblityFunction(TimerStat& timer, const ArithVarVec& set){
+ Debug("constructInfeasiblityFunction") << "constructInfeasiblityFunction start" << endl;
+
TimerStat::CodeTimer codeTimer(timer);
Assert(!d_errorSet.focusEmpty());
-
+ Assert(debugIsASet(set));
+
ArithVar inf = requestVariable();
Assert(inf != ARITHVAR_SENTINEL);
Assert(!d_variables.assignmentIsConsistent(e));
int sgn = d_errorSet.getSgn(e);
- coeffs.push_back(Rational(sgn));
+ Assert(sgn == -1 || sgn == 1);
+ const Rational& violatedCoeff = sgn < 0 ? d_negOne : d_posOne;
+ coeffs.push_back(violatedCoeff);
variables.push_back(e);
+
+ Debug("constructInfeasiblityFunction") << violatedCoeff << " " << e << endl;
+
}
d_tableau.addRow(inf, coeffs, variables);
DeltaRational newAssignment = d_linEq.computeRowValue(inf, false);
d_linEq.trackRowIndex(d_tableau.basicToRowIndex(inf));
Debug("constructInfeasiblityFunction") << inf << " " << newAssignment << endl;
-
+ Debug("constructInfeasiblityFunction") << "constructInfeasiblityFunction done" << endl;
return inf;
}
/** This is the call back channel for Simplex to report conflicts. */
RaiseConflict d_conflictChannel;
+ /** This is the call back channel for Simplex to report conflicts. */
+ FarkasConflictBuilder* d_conflictBuilder;
+
/** Used for requesting d_opt, bound and error variables for primal.*/
TempVarMalloc d_arithVarMalloc;
/** A local copy of 0. */
const Rational d_zero;
+ /** A local copy of 1. */
+ const Rational d_posOne;
+
+ /** A local copy of -1. */
+ const Rational d_negOne;
+
ArithVar constructInfeasiblityFunction(TimerStat& timer);
ArithVar constructInfeasiblityFunction(TimerStat& timer, ArithVar e);
ArithVar constructInfeasiblityFunction(TimerStat& timer, const ArithVarVec& set);
public:
SimplexDecisionProcedure(LinearEqualityModule& linEq, ErrorSet& errors, RaiseConflict conflictChannel, TempVarMalloc tvmalloc);
+ ~SimplexDecisionProcedure();
/**
* Tries to update the assignments of variables such that all of the
* If a basic variable has a conflict on its row,
* this produces a minimized row on the conflict channel.
*/
- void generateConflictForBasic(ArithVar basic, RaiseConflict& rc) const;
+ ConstraintCP generateConflictForBasic(ArithVar basic) const;
/** Gets a fresh variable from TheoryArith. */
}
std::vector< ArithVarVec > SumOfInfeasibilitiesSPD::greedyConflictSubsets(){
+ Debug("arith::greedyConflictSubsets") << "greedyConflictSubsets start" << endl;
+
std::vector< ArithVarVec > subsets;
Assert(d_soiVar == ARITHVAR_SENTINEL);
if(d_errorSize <= 2){
ArithVarVec inError;
d_errorSet.pushFocusInto(inError);
+
+ Assert(debugIsASet(inError));
subsets.push_back(inError);
return subsets;
}
ArithVar e = *iter;
addRowSgns(sgns, e, d_errorSet.getSgn(e));
- //cout << "basic error var: " << e << endl;
- //d_tableau.debugPrintIsBasic(e);
- //d_tableau.printBasicRow(e, cout);
+ Debug("arith::greedyConflictSubsets") << "basic error var: " << e << endl;
+ if(Debug.isOn("arith::greedyConflictSubsets")){
+ d_tableau.debugPrintIsBasic(e);
+ d_tableau.printBasicRow(e, Debug("arith::greedyConflictSubsets"));
+ }
}
// Phase 1: Try to find at least 1 pair for every element
tmp[0] = e;
tmp[1] = b;
if(trySet(tmp) == 2){
- //cout << "found a pair" << endl;
+ Debug("arith::greedyConflictSubsets") << "found a pair " << b << " " << e << endl;
hasParticipated.softAdd(b);
hasParticipated.softAdd(e);
+ Assert(debugIsASet(tmp));
subsets.push_back(tmp);
++(d_statistics.d_soiConflicts);
++(d_statistics.d_hasToBeMinimal);
possibleStarts.pop_back();
if(hasParticipated.isMember(v)){ continue; }
+ hasParticipated.add(v);
+
Assert(d_soiVar == ARITHVAR_SENTINEL);
//d_soiVar's row = \sumofinfeasibilites underConstruction
ArithVarVec underConstruction;
underConstruction.push_back(v);
d_soiVar = constructInfeasiblityFunction(d_statistics.d_soiConflictMinimization, v);
- //cout << "trying " << v << endl;
+ Debug("arith::greedyConflictSubsets") << "trying " << v << endl;
const Tableau::Entry* spoiler = NULL;
while( (spoiler = d_linEq.selectSlackEntry(d_soiVar, false)) != NULL){
int oppositeSgn = -(spoiler->getCoefficient().sgn());
Assert(oppositeSgn != 0);
- //cout << "looking for " << nb << " " << oppositeSgn << endl;
+ Debug("arith::greedyConflictSubsets") << "looking for " << nb << " " << oppositeSgn << endl;
ArithVar basicWithOp = find_basic_in_sgns(sgns, nb, oppositeSgn, hasParticipated, false);
if(basicWithOp == ARITHVAR_SENTINEL){
- //cout << "search did not work for " << nb << endl;
+ Debug("arith::greedyConflictSubsets") << "search did not work for " << nb << endl;
// greedy construction has failed
break;
}else{
- //cout << "found " << basicWithOp << endl;
+ Debug("arith::greedyConflictSubsets") << "found " << basicWithOp << endl;
addToInfeasFunc(d_statistics.d_soiConflictMinimization, d_soiVar, basicWithOp);
hasParticipated.softAdd(basicWithOp);
}
}
if(spoiler == NULL){
- //cout << "success" << endl;
+ Debug("arith::greedyConflictSubsets") << "success" << endl;
//then underConstruction contains a conflicting subset
+ Assert(debugIsASet(underConstruction));
subsets.push_back(underConstruction);
++d_statistics.d_soiConflicts;
if(underConstruction.size() == 3){
++d_statistics.d_maybeNotMinimal;
}
}else{
- //cout << "failure" << endl;
+ Debug("arith::greedyConflictSubsets") << "failure" << endl;
}
tearDownInfeasiblityFunction(d_statistics.d_soiConflictMinimization, d_soiVar);
d_soiVar = ARITHVAR_SENTINEL;
Assert(d_soiVar == ARITHVAR_SENTINEL);
+ Debug("arith::greedyConflictSubsets") << "greedyConflictSubsets done" << endl;
return subsets;
}
-void SumOfInfeasibilitiesSPD::generateSOIConflict(const ArithVarVec& subset){
+bool SumOfInfeasibilitiesSPD::generateSOIConflict(const ArithVarVec& subset){
Assert(d_soiVar == ARITHVAR_SENTINEL);
d_soiVar = constructInfeasiblityFunction(d_statistics.d_soiConflictMinimization, subset);
+ Assert(!subset.empty());
+ Assert(!d_conflictBuilder->underConstruction());
+
+ Debug("arith::generateSOIConflict") << "SumOfInfeasibilitiesSPD::generateSOIConflict(...) start" << endl;
- //NodeBuilder<> conflict(kind::AND);
+ bool success = false;
+
for(ArithVarVec::const_iterator iter = subset.begin(), end = subset.end(); iter != end; ++iter){
ArithVar e = *iter;
ConstraintP violated = d_errorSet.getViolated(e);
- //cout << "basic error var: " << violated << endl;
- d_conflictChannel.addConstraint(violated);
- //violated->explainForConflict(conflict);
+ Assert(violated != NullConstraint);
- //d_tableau.debugPrintIsBasic(e);
- //d_tableau.printBasicRow(e, cout);
- }
- for(Tableau::RowIterator i = d_tableau.basicRowIterator(d_soiVar); !i.atEnd(); ++i){
- const Tableau::Entry& entry = *i;
- ArithVar v = entry.getColVar();
- if(v == d_soiVar){ continue; }
- const Rational& coeff = entry.getCoefficient();
- ConstraintP c = (coeff.sgn() > 0) ?
- d_variables.getUpperBoundConstraint(v) :
- d_variables.getLowerBoundConstraint(v);
+ int sgn = d_errorSet.getSgn(e);
+ const Rational& violatedCoeff = sgn > 0 ? d_negOne : d_posOne;
+ Debug("arith::generateSOIConflict") << "basic error var: "
+ << "(" << violatedCoeff << ")"
+ << " " << violated
+ << endl;
- //cout << "nb : " << c << endl;
- d_conflictChannel.addConstraint(c);
+
+ d_conflictBuilder->addConstraint(violated, violatedCoeff);
+ Assert(violated->hasProof());
+ if(!success && !violated->negationHasProof()){
+ success = true;
+ d_conflictBuilder->makeLastConsequent();
+ }
+ }
+
+ if(!success){
+ // failure
+ d_conflictBuilder->reset();
+ } else {
+ // pick a violated constraint arbitrarily. any of them may be selected for the conflict
+ Assert(d_conflictBuilder->underConstruction());
+ Assert(d_conflictBuilder->consequentIsSet());
+
+ for(Tableau::RowIterator i = d_tableau.basicRowIterator(d_soiVar); !i.atEnd(); ++i){
+ const Tableau::Entry& entry = *i;
+ ArithVar v = entry.getColVar();
+ if(v == d_soiVar){ continue; }
+ const Rational& coeff = entry.getCoefficient();
+
+ ConstraintP c = (coeff.sgn() > 0) ?
+ d_variables.getUpperBoundConstraint(v) :
+ d_variables.getLowerBoundConstraint(v);
+
+ Debug("arith::generateSOIConflict") << "non-basic var: "
+ << "(" << coeff << ")"
+ << " " << c
+ << endl;
+ d_conflictBuilder->addConstraint(c, coeff);
+ }
+ ConstraintCP conflicted = d_conflictBuilder->commitConflict();
+ d_conflictChannel.raiseConflict(conflicted);
}
- //Node conf = conflict;
tearDownInfeasiblityFunction(d_statistics.d_soiConflictMinimization, d_soiVar);
d_soiVar = ARITHVAR_SENTINEL;
- d_conflictChannel.commitConflict();
- //return conf;
+ Debug("arith::generateSOIConflict") << "SumOfInfeasibilitiesSPD::generateSOIConflict(...) done" << endl;
+ Assert(d_soiVar == ARITHVAR_SENTINEL);
+ Assert(!d_conflictBuilder->underConstruction());
+ return success;
}
WitnessImprovement SumOfInfeasibilitiesSPD::SOIConflict(){
static int instance = 0;
instance++;
- //cout << "SOI conflict " << instance << ": |E| = " << d_errorSize << endl;
- //d_errorSet.debugPrint(cout);
- //cout << endl;
+
+ Debug("arith::SOIConflict") << "SumOfInfeasibilitiesSPD::SOIConflict() start "
+ << instance << ": |E| = " << d_errorSize << endl;
+ if(Debug.isOn("arith::SOIConflict")){
+ d_errorSet.debugPrint(cout);
+ }
+ Debug("arith::SOIConflict") << endl;
tearDownInfeasiblityFunction(d_statistics.d_soiConflictMinimization, d_soiVar);
d_soiVar = ARITHVAR_SENTINEL;
if(options::soiQuickExplain()){
quickExplain();
generateSOIConflict(d_qeConflict);
- //Node conflict = generateSOIConflict(d_qeConflict);
- //cout << conflict << endl;
- //d_conflictChannel(conflict);
}else{
-
vector<ArithVarVec> subsets = greedyConflictSubsets();
Assert( d_soiVar == ARITHVAR_SENTINEL);
-
+ bool anySuccess = false;
Assert(!subsets.empty());
for(vector<ArithVarVec>::const_iterator i = subsets.begin(), end = subsets.end(); i != end; ++i){
const ArithVarVec& subset = *i;
- generateSOIConflict(subset);
+ Assert(debugIsASet(subset));
+ anySuccess = generateSOIConflict(subset) || anySuccess;
//Node conflict = generateSOIConflict(subset);
//cout << conflict << endl;
//reportConflict(conf); do not do this. We need a custom explanations!
//d_conflictChannel(conflict);
}
+ Assert(anySuccess);
}
Assert( d_soiVar == ARITHVAR_SENTINEL);
d_soiVar = constructInfeasiblityFunction(d_statistics.d_soiConflictMinimization);
//reportConflict(conf); do not do this. We need a custom explanations!
d_conflictVariables.add(d_soiVar);
- //cout << "SOI conflict " << instance << "end" << endl;
+ Debug("arith::SOIConflict") << "SumOfInfeasibilitiesSPD::SOIConflict() done "
+ << instance << "end" << endl;
return ConflictFound;
}
}
bool SumOfInfeasibilitiesSPD::debugSOI(WitnessImprovement w, ostream& out, int instance) const{
-//#warning "Redo SOI"
return true;
// out << "DLV("<<instance<<") ";
// switch(w){
WitnessImprovement soiRound();
WitnessImprovement SOIConflict();
std::vector< ArithVarVec > greedyConflictSubsets();
- void generateSOIConflict(const ArithVarVec& subset);
+ bool generateSOIConflict(const ArithVarVec& subset);
// WitnessImprovement focusUsingSignDisagreements(ArithVar basic);
// WitnessImprovement focusDownToLastHalf();
cb.multiplyRow(rid, -a_rs_sgn);
}
-
-
void Tableau::addRow(ArithVar basic,
const std::vector<Rational>& coefficients,
const std::vector<ArithVar>& variables)
{
Assert(basic < getNumColumns());
-
+ Assert(debugIsASet(variables));
Assert(coefficients.size() == variables.size() );
Assert(!isBasic(basic));
d_containing(containing),
d_nlIncomplete( false),
d_rowTracking(),
- d_conflictBuffer(),
- d_constraintDatabase(c, u, d_partialModel, d_congruenceManager, RaiseConflict(*this, d_conflictBuffer)),
+ d_constraintDatabase(c, u, d_partialModel, d_congruenceManager, RaiseConflict(*this)),
d_qflraStatus(Result::SAT_UNKNOWN),
d_unknownsInARow(0),
d_hasDoneWorkSinceCut(false),
d_tableauResetDensity(1.6),
d_tableauResetPeriod(10),
d_conflicts(c),
-
d_blackBoxConflict(c, Node::null()),
- d_congruenceManager(c, d_constraintDatabase, SetupLiteralCallBack(*this), d_partialModel, RaiseConflict(*this, d_conflictBuffer)),
+ d_congruenceManager(c, d_constraintDatabase, SetupLiteralCallBack(*this), d_partialModel, RaiseEqualityEngineConflict(*this)),
d_cmEnabled(c, true),
- d_dualSimplex(d_linEq, d_errorSet, RaiseConflict(*this, d_conflictBuffer), TempVarMalloc(*this)),
- d_fcSimplex(d_linEq, d_errorSet, RaiseConflict(*this, d_conflictBuffer), TempVarMalloc(*this)),
- d_soiSimplex(d_linEq, d_errorSet, RaiseConflict(*this, d_conflictBuffer), TempVarMalloc(*this)),
- d_attemptSolSimplex(d_linEq, d_errorSet, RaiseConflict(*this, d_conflictBuffer), TempVarMalloc(*this)),
+ d_dualSimplex(d_linEq, d_errorSet, RaiseConflict(*this), TempVarMalloc(*this)),
+ d_fcSimplex(d_linEq, d_errorSet, RaiseConflict(*this), TempVarMalloc(*this)),
+ d_soiSimplex(d_linEq, d_errorSet, RaiseConflict(*this), TempVarMalloc(*this)),
+ d_attemptSolSimplex(d_linEq, d_errorSet, RaiseConflict(*this), TempVarMalloc(*this)),
d_pass1SDP(NULL),
d_otherSDP(NULL),
return true;
}
+void TheoryArithPrivate::raiseConflict(ConstraintCP a){
+ Assert(a->inConflict());
+ d_conflicts.push_back(a);
+}
+
+void TheoryArithPrivate::raiseBlackBoxConflict(Node bb){
+ if(d_blackBoxConflict.get().isNull()){
+ d_blackBoxConflict = bb;
+ }
+}
+
void TheoryArithPrivate::revertOutOfConflict(){
d_partialModel.revertAssignmentChanges();
clearUpdates();
d_updatedBounds.purge();
}
-void TheoryArithPrivate::raiseConflict(ConstraintCP a, ConstraintCP b){
- ConstraintCPVec v;
- v.push_back(a);
- v.push_back(b);
- d_conflicts.push_back(v);
-}
+// void TheoryArithPrivate::raiseConflict(ConstraintCP a, ConstraintCP b){
+// ConstraintCPVec v;
+// v.push_back(a);
+// v.push_back(b);
+// d_conflicts.push_back(v);
+// }
-void TheoryArithPrivate::raiseConflict(ConstraintCP a, ConstraintCP b, ConstraintCP c){
- ConstraintCPVec v;
- v.push_back(a);
- v.push_back(b);
- v.push_back(c);
- d_conflicts.push_back(v);
-}
+// void TheoryArithPrivate::raiseConflict(ConstraintCP a, ConstraintCP b, ConstraintCP c){
+// ConstraintCPVec v;
+// v.push_back(a);
+// v.push_back(b);
+// v.push_back(c);
+// d_conflicts.push_back(v);
+// }
void TheoryArithPrivate::zeroDifferenceDetected(ArithVar x){
if(d_cmEnabled){
bool TheoryArithPrivate::AssertLower(ConstraintP constraint){
Assert(constraint != NullConstraint);
Assert(constraint->isLowerBound());
+ Assert(constraint->isTrue());
+ Assert(!constraint->negationHasProof());
+
ArithVar x_i = constraint->getVariable();
const DeltaRational& c_i = constraint->getValue();
int cmpToUB = d_partialModel.cmpToUpperBound(x_i, c_i);
if(cmpToUB > 0){ // c_i < \lowerbound(x_i)
ConstraintP ubc = d_partialModel.getUpperBoundConstraint(x_i);
- raiseConflict(ubc, constraint);
+ ConstraintP negation = constraint->getNegation();
+ negation->impliedByUnate(ubc, true);
+
+ raiseConflict(constraint);
- // Node conflict = ConstraintValue::explainConflict(ubc, constraint);
- // Debug("arith") << "AssertLower conflict " << conflict << endl;
- // raiseConflict(conflict);
++(d_statistics.d_statAssertLowerConflicts);
return true;
}else if(cmpToUB == 0){
if(isInteger(x_i)){
d_constantIntegerVariables.push_back(x_i);
- Debug("dio::push") << x_i << endl;
+ Debug("dio::push") << "dio::push " << x_i << endl;
}
ConstraintP ub = d_partialModel.getUpperBoundConstraint(x_i);
}
const ValueCollection& vc = constraint->getValueCollection();
- if(vc.hasDisequality()){
- Assert(vc.hasEquality());
+ if(vc.hasEquality()){
+
+ Assert(vc.hasDisequality());
ConstraintP eq = vc.getEquality();
ConstraintP diseq = vc.getDisequality();
- if(diseq->isTrue()){
- //const ConstraintP ub = vc.getUpperBound();
- raiseConflict(diseq, ub, constraint);
- //Node conflict = ConstraintValue::explainConflict(diseq, ub, constraint);
-
+ // x <= b, x >= b |= x = b
+ // (x > b or x < b or x = b)
+ Debug("arith::eq") << "lb == ub, propagate eq" << eq << endl;
+ bool triConflict = diseq->isTrue();
+
+ if(!eq->isTrue()){
+ eq->impliedByTrichotomy(constraint, ub, triConflict);
+ eq->tryToPropagate();
+ }
+ if(triConflict){
++(d_statistics.d_statDisequalityConflicts);
- //Debug("eq") << " assert lower conflict " << conflict << endl;
- //raiseConflict(conflict);
+ raiseConflict(eq);
return true;
- }else if(!eq->isTrue()){
- Debug("eq") << "lb == ub, propagate eq" << eq << endl;
- eq->impliedBy(constraint, d_partialModel.getUpperBoundConstraint(x_i));
- // do not need to add to d_learnedBounds
}
}
}else{
+ // l <= x <= u and l < u
Assert(cmpToUB < 0);
const ValueCollection& vc = constraint->getValueCollection();
const ConstraintP diseq = vc.getDisequality();
if(diseq->isTrue()){
const ConstraintP ub = d_constraintDatabase.ensureConstraint(const_cast<ValueCollection&>(vc), UpperBound);
-
- if(ub->hasProof()){
- raiseConflict(diseq, ub, constraint);
+ ConstraintP negUb = ub->getNegation();
+
+ // l <= x, l != x |= l < x
+ // |= not (l >= x)
+ bool ubInConflict = ub->hasProof();
+ bool learnNegUb = !(negUb->hasProof());
+ if(learnNegUb){
+ negUb->impliedByTrichotomy(constraint, diseq, ubInConflict);
+ negUb->tryToPropagate();
+ }
+ if(ubInConflict){
+ raiseConflict(ub);
return true;
- // Node conflict = ConstraintValue::explainConflict(diseq, ub, constraint);
- // Debug("eq") << " assert upper conflict " << conflict << endl;
- // raiseConflict(conflict);
- // return true;
- }else if(!ub->negationHasProof()){
- ConstraintP negUb = ub->getNegation();
- negUb->impliedBy(constraint, diseq);
+ }else if(learnNegUb){
d_learnedBounds.push_back(negUb);
}
}
/* procedure AssertUpper( x_i <= c_i) */
bool TheoryArithPrivate::AssertUpper(ConstraintP constraint){
+ Assert(constraint != NullConstraint);
+ Assert(constraint->isUpperBound());
+ Assert(constraint->isTrue());
+ Assert(!constraint->negationHasProof());
+
ArithVar x_i = constraint->getVariable();
const DeltaRational& c_i = constraint->getValue();
Debug("arith") << "AssertUpper(" << x_i << " " << c_i << ")"<< std::endl;
- AssertArgument(constraint != NullConstraint,
- "AssertUpper() called on a NullConstraint.");
- Assert(constraint->isUpperBound());
+
//Too strong because of rounding with integers
//Assert(!constraint->hasLiteral() || original == constraint->getLiteral());
if(d_partialModel.greaterThanUpperBound(x_i, c_i) ){ // \upperbound(x_i) <= c_i
return false; //sat
}
-
+
// cmpToLb = \lowerbound(x_i).cmp(c_i)
int cmpToLB = d_partialModel.cmpToLowerBound(x_i, c_i);
if( cmpToLB < 0 ){ // \upperbound(x_i) < \lowerbound(x_i)
+ // l_i <= x_i and c_i < l_i |= c_i < x_i
+ // or ... |= not (x_i <= c_i)
ConstraintP lbc = d_partialModel.getLowerBoundConstraint(x_i);
- raiseConflict(lbc, constraint);
- //Node conflict = ConstraintValue::explainConflict(lbc, constraint);
- //Debug("arith") << "AssertUpper conflict " << conflict << endl;
+ ConstraintP negConstraint = constraint->getNegation();
+ negConstraint->impliedByUnate(lbc, true);
+ raiseConflict(constraint);
++(d_statistics.d_statAssertUpperConflicts);
- //raiseConflict(conflict);
return true;
}else if(cmpToLB == 0){ // \lowerBound(x_i) == \upperbound(x_i)
if(isInteger(x_i)){
d_constantIntegerVariables.push_back(x_i);
- Debug("dio::push") << x_i << endl;
+ Debug("dio::push") << "dio::push " << x_i << endl;
}
+
+ const ValueCollection& vc = constraint->getValueCollection();
ConstraintP lb = d_partialModel.getLowerBoundConstraint(x_i);
if(d_cmEnabled){
if(!d_congruenceManager.isWatchedVariable(x_i) || c_i.sgn() != 0){
}
}
- const ValueCollection& vc = constraint->getValueCollection();
if(vc.hasDisequality()){
- Assert(vc.hasEquality());
- const ConstraintP diseq = vc.getDisequality();
- const ConstraintP eq = vc.getEquality();
- if(diseq->isTrue()){
- raiseConflict(diseq, lb, constraint);
- //Node conflict = ConstraintValue::explainConflict(diseq, lb, constraint);
- //Debug("eq") << " assert upper conflict " << conflict << endl;
- //raiseConflict(conflict);
- return true;
- }else if(!eq->isTrue()){
- Debug("eq") << "lb == ub, propagate eq" << eq << endl;
- eq->impliedBy(constraint, d_partialModel.getLowerBoundConstraint(x_i));
- //do not bother to add to d_learnedBounds
+ Assert(vc.hasDisequality());
+ ConstraintP eq = vc.getEquality();
+ ConstraintP diseq = vc.getDisequality();
+ // x <= b, x >= b |= x = b
+ // (x > b or x < b or x = b)
+ Debug("arith::eq") << "lb == ub, propagate eq" << eq << endl;
+ bool triConflict = diseq->isTrue();
+ if(!eq->isTrue()){
+ eq->impliedByTrichotomy(constraint, lb, triConflict);
+ eq->tryToPropagate();
}
+ if(triConflict){
+ ++(d_statistics.d_statDisequalityConflicts);
+ raiseConflict(eq);
+ return true;
+ }
}
}else if(cmpToLB > 0){
+ // l <= x <= u and l < u
+ Assert(cmpToLB > 0);
const ValueCollection& vc = constraint->getValueCollection();
+
if(vc.hasDisequality()){
const ConstraintP diseq = vc.getDisequality();
if(diseq->isTrue()){
- const ConstraintP lb =
- d_constraintDatabase.ensureConstraint(const_cast<ValueCollection&>(vc), LowerBound);
- if(lb->hasProof()){
- raiseConflict(diseq, lb, constraint);
- //Node conflict = ConstraintValue::explainConflict(diseq, lb, constraint);
- //Debug("eq") << " assert upper conflict " << conflict << endl;
- //raiseConflict(conflict);
+ const ConstraintP lb = d_constraintDatabase.ensureConstraint(const_cast<ValueCollection&>(vc), LowerBound);
+ ConstraintP negLb = lb->getNegation();
+
+ // x <= u, u != x |= u < x
+ // |= not (u >= x)
+ bool lbInConflict = lb->hasProof();
+ bool learnNegLb = !(negLb->hasProof());
+ if(learnNegLb){
+ negLb->impliedByTrichotomy(constraint, diseq, lbInConflict);
+ negLb->tryToPropagate();
+ }
+ if(lbInConflict){
+ raiseConflict(lb);
return true;
- }else if(!lb->negationHasProof()){
- ConstraintP negLb = lb->getNegation();
- negLb->impliedBy(constraint, diseq);
+ }else if(learnNegLb){
d_learnedBounds.push_back(negLb);
}
}
/* procedure AssertEquality( x_i == c_i ) */
bool TheoryArithPrivate::AssertEquality(ConstraintP constraint){
- AssertArgument(constraint != NullConstraint,
- "AssertUpper() called on a NullConstraint.");
+ Assert(constraint != NullConstraint);
+ Assert(constraint->isEquality());
+ Assert(constraint->isTrue());
+ Assert(!constraint->negationHasProof());
ArithVar x_i = constraint->getVariable();
const DeltaRational& c_i = constraint->getValue();
return false; //sat
}
- if(cmpToUB > 0){
- ConstraintP ubc = d_partialModel.getUpperBoundConstraint(x_i);
- raiseConflict(ubc, constraint);
- //Node conflict = ConstraintValue::explainConflict(ubc, constraint);
- //Debug("arith") << "AssertEquality conflicts with upper bound " << conflict << endl;
- //raiseConflict(conflict);
- return true;
- }
-
- if(cmpToLB < 0){
- ConstraintP lbc = d_partialModel.getLowerBoundConstraint(x_i);
- raiseConflict(lbc, constraint);
-
- // Node conflict = ConstraintValue::explainConflict(lbc, constraint);
- // Debug("arith") << "AssertEquality conflicts with lower bound" << conflict << endl;
- // raiseConflict(conflict);
+ if(cmpToUB > 0 || cmpToLB < 0){
+ ConstraintP cb = (cmpToUB > 0) ? d_partialModel.getUpperBoundConstraint(x_i) :
+ d_partialModel.getLowerBoundConstraint(x_i);
+ ConstraintP diseq = constraint->getNegation();
+ Assert(!diseq->isTrue());
+ diseq->impliedByUnate(cb, true);
+ raiseConflict(constraint);
return true;
}
if(isInteger(x_i)){
d_constantIntegerVariables.push_back(x_i);
- Debug("dio::push") << x_i << endl;
+ Debug("dio::push") << "dio::push " << x_i << endl;
}
// Don't bother to check whether x_i != c_i is in d_diseq
/* procedure AssertDisequality( x_i != c_i ) */
bool TheoryArithPrivate::AssertDisequality(ConstraintP constraint){
+ Assert(constraint != NullConstraint);
+ Assert(constraint->isDisequality());
+ Assert(constraint->isTrue());
+ Assert(!constraint->negationHasProof());
- AssertArgument(constraint != NullConstraint,
- "AssertUpper() called on a NullConstraint.");
ArithVar x_i = constraint->getVariable();
const DeltaRational& c_i = constraint->getValue();
-
Debug("arith") << "AssertDisequality(" << x_i << " " << c_i << ")"<< std::endl;
//Should be fine in integers
const ConstraintP lb = vc.getLowerBound();
const ConstraintP ub = vc.getUpperBound();
if(lb->isTrue() && ub->isTrue()){
+ ConstraintP eq = constraint->getNegation();
+ eq->impliedByTrichotomy(lb, ub, true);
+ raiseConflict(constraint);
//in conflict
- Debug("eq") << "explaining" << endl;
++(d_statistics.d_statDisequalityConflicts);
- raiseConflict(constraint, lb, ub);
- //Node conflict = ConstraintValue::explainConflict(constraint, lb, ub);
- //raiseConflict(conflict);
return true;
}
}
const ConstraintP lb = vc.getLowerBound();
if(lb->isTrue()){
const ConstraintP ub = d_constraintDatabase.ensureConstraint(const_cast<ValueCollection&>(vc), UpperBound);
- Debug("eq") << "propagate UpperBound " << constraint << lb << ub << endl;
+ Assert(!ub->isTrue());
+ Debug("arith::eq") << "propagate UpperBound " << constraint << lb << ub << endl;
const ConstraintP negUb = ub->getNegation();
if(!negUb->isTrue()){
- negUb->impliedBy(constraint, lb);
+ negUb->impliedByTrichotomy(constraint, lb, false);
+ negUb->tryToPropagate();
d_learnedBounds.push_back(negUb);
}
}
const ConstraintP ub = vc.getUpperBound();
if(ub->isTrue()){
const ConstraintP lb = d_constraintDatabase.ensureConstraint(const_cast<ValueCollection&>(vc), LowerBound);
+ Assert(!lb->isTrue());
- Debug("eq") << "propagate LowerBound " << constraint << lb << ub << endl;
+ Debug("arith::eq") << "propagate LowerBound " << constraint << lb << ub << endl;
const ConstraintP negLb = lb->getNegation();
if(!negLb->isTrue()){
- negLb->impliedBy(constraint, ub);
+ negLb->impliedByTrichotomy(constraint, ub, false);
+ negLb->tryToPropagate();
d_learnedBounds.push_back(negLb);
}
}
bool split = constraint->isSplit();
if(!split && c_i == d_partialModel.getAssignment(x_i)){
- Debug("eq") << "lemma now! " << constraint << endl;
+ Debug("arith::eq") << "lemma now! " << constraint << endl;
outputLemma(constraint->split());
return false;
}else if(d_partialModel.strictlyLessThanLowerBound(x_i, c_i)){
- Debug("eq") << "can drop as less than lb" << constraint << endl;
+ Debug("arith::eq") << "can drop as less than lb" << constraint << endl;
}else if(d_partialModel.strictlyGreaterThanUpperBound(x_i, c_i)){
- Debug("eq") << "can drop as less than ub" << constraint << endl;
+ Debug("arith::eq") << "can drop as less than ub" << constraint << endl;
}else if(!split){
- Debug("eq") << "push back" << constraint << endl;
+ Debug("arith::eq") << "push back" << constraint << endl;
d_diseqQueue.push(constraint);
d_partialModel.invalidateDelta();
}else{
- Debug("eq") << "skipping already split " << constraint << endl;
+ Debug("arith::eq") << "skipping already split " << constraint << endl;
}
return false;
}
const DeltaRational& beta = d_partialModel.getAssignment(v);
Assert(beta.isIntegral());
- Polynomial betaAsPolynomial( Constant::mkConstant(beta.floor()) );
+ Polynomial betaAsPolynomial = Polynomial::mkPolynomial( Constant::mkConstant(beta.floor()) );
TNode var = d_partialModel.asNode(v);
Polynomial varAsPolynomial = Polynomial::parsePolynomial(var);
// If the bounds are equal this is already in the dioSolver
//Add v = dr as a speculation.
Comparison eq = mkIntegerEqualityFromAssignment(v);
- Debug("dio::push") <<v << " " << eq.getNode() << endl;
+ Debug("dio::push") << "dio::push " << v << " " << eq.getNode() << endl;
Assert(!eq.isBoolean());
d_diosolver.pushInputConstraint(eq, eq.getNode());
// It does not matter what the explanation of eq is.
return Node::null();
}else{
Polynomial p = plane.getPolynomial();
- Polynomial c(plane.getConstant() * Constant::mkConstant(-1));
+ Polynomial c = Polynomial::mkPolynomial(plane.getConstant() * Constant::mkConstant(-1));
Integer gcd = p.gcd();
Assert(p.isIntegral());
Assert(c.isIntegral());
ArithVar v = d_constantIntegerVariables.front();
d_constantIntegerVariables.pop();
- Debug("arith::dio") << v << endl;
+ Debug("arith::dio") << "callDioSolver " << v << endl;
Assert(isInteger(v));
Assert(d_partialModel.boundsAreEqual(v));
}else if(ub->isEquality()){
orig = ub->externalExplainByAssertions();
}else {
- orig = Constraint_::externalExplainByAssertions(ub, lb);
+ orig = Constraint::externalExplainByAssertions(ub, lb);
}
Assert(d_partialModel.assignmentIsConsistent(v));
Assert(orig.getKind() != EQUAL);
return orig;
}else{
- Debug("dio::push") << v << " " << eq.getNode() << " with reason " << orig << endl;
+ Debug("dio::push") << "dio::push " << v << " " << eq.getNode() << " with reason " << orig << endl;
d_diosolver.pushInputConstraint(eq, orig);
}
}
// if is (not true), or false
Assert((reEq.getConst<bool>() && isDistinct) ||
(!reEq.getConst<bool>() && !isDistinct));
- blackBoxConflict(assertion);
+ raiseBlackBoxConflict(assertion);
}
return NullConstraint;
}
Assert(constraint != NullConstraint);
- if(constraint->negationHasProof()){
+ if(constraint->assertedToTheTheory()){
+ //Do nothing
+ return NullConstraint;
+ }
+ Assert(!constraint->assertedToTheTheory());
+ bool inConflict = constraint->negationHasProof();
+ constraint->setAssertedToTheTheory(assertion, inConflict);
+
+ if(!constraint->hasProof()){
+ Debug("arith::constraint") << "marking as constraint as self explaining " << endl;
+ constraint->setAssumption(inConflict);
+ } else {
+ Debug("arith::constraint") << "already has proof: " << constraint->externalExplainByAssertions() << endl;
+ }
+
+
+ if(Debug.isOn("arith::negatedassumption") && inConflict){
ConstraintP negation = constraint->getNegation();
- if(negation->isSelfExplaining()){
- if(Debug.isOn("whytheoryenginewhy")){
- debugPrintFacts();
- }
+ if(Debug.isOn("arith::negatedassumption") && negation->isAssumption()){
+ debugPrintFacts();
}
+ Debug("arith::eq") << "negation has proof" << endl;
Debug("arith::eq") << constraint << endl;
Debug("arith::eq") << negation << endl;
-
- constraint->setAssertedToTheTheoryWithNegationTrue(assertion);
- if(!constraint->hasProof()){
- Debug("arith::constraint") << "marking as constraint as self explaining " << endl;
- constraint->selfExplainingWithNegationTrue();
- }else{
- Debug("arith::constraint") << "already has proof: " << constraint->externalExplainByAssertions() << endl;
- }
-
- raiseConflict(constraint, negation);
- // NodeBuilder<> nb(kind::AND);
- // nb << assertion;
- // negation->explainForConflict(nb);
- // Node conflict = nb;
- // Debug("arith::eq") << "conflict" << conflict << endl;
- // raiseConflict(conflict);
- return NullConstraint;
}
- Assert(!constraint->negationHasProof());
- if(constraint->assertedToTheTheory()){
- //Do nothing
+ if(inConflict){
+ ConstraintP negation = constraint->getNegation();
+ if(Debug.isOn("arith::negatedassumption") && negation->isAssumption()){
+ debugPrintFacts();
+ }
+ Debug("arith::eq") << "negation has proof" << endl;
+ Debug("arith::eq") << constraint << endl;
+ Debug("arith::eq") << negation << endl;
+ raiseConflict(negation);
return NullConstraint;
}else{
- Debug("arith::constraint") << "arith constraint " << constraint << std::endl;
- constraint->setAssertedToTheTheory(assertion);
-
- if(!constraint->hasProof()){
- Debug("arith::constraint") << "marking as constraint as self explaining " << endl;
- constraint->selfExplaining();
- }else{
- Debug("arith::constraint") << "already has proof: " << constraint->externalExplainByAssertions() << endl;
- }
-
return constraint;
}
}
if(isInteger(x_i) && constraint->isStrictUpperBound()){
ConstraintP floorConstraint = constraint->getFloor();
if(!floorConstraint->isTrue()){
- if(floorConstraint->negationHasProof()){
- raiseConflict(constraint, floorConstraint->getNegation());
- //Node conf = Constraint_::explainConflict(constraint, floorConstraint->getNegation());
- //raiseConflict(conf);
+ bool inConflict = floorConstraint->negationHasProof();
+ floorConstraint->impliedByIntHole(constraint, inConflict);
+ floorConstraint->tryToPropagate();
+ if(inConflict){
+ raiseConflict(floorConstraint);
return true;
- }else{
- floorConstraint->impliedBy(constraint);
- // Do not need to add to d_learnedBounds
}
}
return AssertUpper(floorConstraint);
if(isInteger(x_i) && constraint->isStrictLowerBound()){
ConstraintP ceilingConstraint = constraint->getCeiling();
if(!ceilingConstraint->isTrue()){
- if(ceilingConstraint->negationHasProof()){
- raiseConflict(constraint, ceilingConstraint->getNegation());
- //Node conf = Constraint_::explainConflict(constraint, ceilingConstraint->getNegation());
- //raiseConflict(conf);
+ bool inConflict = ceilingConstraint->negationHasProof();
+ ceilingConstraint->impliedByIntHole(constraint, inConflict);
+ ceilingConstraint->tryToPropagate();
+ if(inConflict){
+ raiseConflict(ceilingConstraint);
return true;
}
- ceilingConstraint->impliedBy(constraint);
- // Do not need to add to learnedBounds
}
return AssertLower(ceilingConstraint);
}else{
}
}
+
+Node flattenAndSort(Node n){
+ Kind k = n.getKind();
+ switch(k){
+ case kind::OR:
+ case kind::AND:
+ case kind::PLUS:
+ case kind::MULT:
+ break;
+ default:
+ return n;
+ }
+
+ std::vector<Node> out;
+ std::vector<Node> process;
+ process.push_back(n);
+ while(!process.empty()){
+ Node b = process.back();
+ process.pop_back();
+ if(b.getKind() == k){
+ for(Node::iterator i=b.begin(), end=b.end(); i!=end; ++i){
+ process.push_back(*i);
+ }
+ } else {
+ out.push_back(b);
+ }
+ }
+ Assert(out.size() >= 2);
+ std::sort(out.begin(), out.end());
+ return NodeManager::currentNM()->mkNode(k, out);
+}
+
+
+
/** Outputs conflicts to the output channel. */
void TheoryArithPrivate::outputConflicts(){
Assert(anyConflict());
+ static unsigned int conflicts = 0;
+
if(!conflictQueueEmpty()){
Assert(!d_conflicts.empty());
for(size_t i = 0, i_end = d_conflicts.size(); i < i_end; ++i){
- const ConstraintCPVec& vec = d_conflicts[i];
- Node conflict = Constraint_::externalExplainByAssertions(vec);
- Debug("arith::conflict") << "d_conflicts[" << i << "] " << conflict << endl;
+ ConstraintCP confConstraint = d_conflicts[i];
+ Assert(confConstraint->inConflict());
+ Node conflict = confConstraint->externalExplainConflict();
+
+ ++conflicts;
+ Debug("arith::conflict") << "d_conflicts[" << i << "] " << conflict
+ // << "("<<conflicts<<")"
+ << endl;
+ if(Debug.isOn("arith::normalize::external")){
+ conflict = flattenAndSort(conflict);
+ Debug("arith::conflict") << "(normalized to) " << conflict << endl;
+ }
+
(d_containing.d_out)->conflict(conflict);
}
}
if(!d_blackBoxConflict.get().isNull()){
Node bb = d_blackBoxConflict.get();
- Debug("arith::conflict") << "black box conflict" << bb << endl;
+ ++conflicts;
+ Debug("arith::conflict") << "black box conflict" << bb
+ //<< "("<<conflicts<<")"
+ << endl;
+ if(Debug.isOn("arith::normalize::external")){
+ bb = flattenAndSort(bb);
+ Debug("arith::conflict") << "(normalized to) " << bb << endl;
+ }
+
(d_containing.d_out)->conflict(bb);
}
}
turnOffApproxFor(options::replayNumericFailurePenalty());
}
- for(size_t i =0, N = res.size(); i < N; ++i){
- raiseConflict(res[i]);
- }
+
+
if(res.empty()){
++d_statistics.d_replayAttemptFailed;
}else{
- ++d_statistics.d_mipProofsSuccessful;
+ unsigned successes = 0;
+ for(size_t i =0, N = res.size(); i < N; ++i){
+ ConstraintCPVec& vec = res[i];
+ Assert(vec.size() >= 2);
+ for(size_t j=0, M = vec.size(); j < M; ++j){
+ ConstraintCP at_j = vec[j];
+ Assert(at_j->isTrue());
+ if(!at_j->negationHasProof()){
+ successes++;
+ vec[j] = vec.back();
+ vec.pop_back();
+ ConstraintP neg_at_j = at_j->getNegation();
+
+ Debug("approx::replayLog") << "Setting the proof for the replayLog conflict on:" << endl
+ << " (" << neg_at_j->isTrue() <<") " << neg_at_j << endl
+ << " (" << at_j->isTrue() <<") " << at_j << endl;
+ neg_at_j->impliedByIntHole(vec, true);
+ raiseConflict(at_j);
+ break;
+ }
+ }
+ }
+ if(successes > 0){
+ ++d_statistics.d_mipProofsSuccessful;
+ }
}
if(d_currentPropagationList.size() > enteringPropN){
return safeConstructNary(nb);
}
+ConstraintCP TheoryArithPrivate::vectorToIntHoleConflict(const ConstraintCPVec& conflict){
+ Assert(conflict.size() >= 2);
+ ConstraintCPVec exp(conflict.begin(), conflict.end()-1);
+ ConstraintCP back = conflict.back();
+ ConstraintP negBack = back->getNegation();
+ negBack->impliedByIntHole(exp, true);
+ return back;
+}
+
+void TheoryArithPrivate::intHoleConflictToVector(ConstraintCP conflicting, ConstraintCPVec& conflict){
+ ConstraintCP negConflicting = conflicting->getNegation();
+ Assert(conflicting->hasProof());
+ Assert(negConflicting->hasProof());
+
+ conflict.push_back(conflicting);
+ conflict.push_back(negConflicting);
+
+ Constraint::assertionFringe(conflict);
+}
void TheoryArithPrivate::tryBranchCut(ApproximateSimplex* approx, int nid, BranchCutInfo& bci){
Assert(conflictQueueEmpty());
d_partialModel.startQueueingBoundCounts();
}
for(size_t i = 0, N = d_conflicts.size(); i < N; ++i){
- conflicts.push_back(d_conflicts[i]);
- // remove the floor/ceiling contraint implied by bcneg
- Constraint_::assertionFringe(conflicts.back());
+
+ conflicts.push_back(ConstraintCPVec());
+ intHoleConflictToVector(d_conflicts[i], conflicts.back());
+ Constraint::assertionFringe(conflicts.back());
+
+ // ConstraintCP conflicting = d_conflicts[i];
+ // ConstraintCP negConflicting = conflicting->getNegation();
+ // Assert(conflicting->hasProof());
+ // Assert(negConflicting->hasProof());
+
+ // conflicts.push_back(ConstraintCPVec());
+ // ConstraintCPVec& back = conflicts.back();
+ // back.push_back(conflicting);
+ // back.push_back(negConflicting);
+
+ // // remove the floor/ceiling contraint implied by bcneg
+ // Constraint::assertionFringe(back);
}
if(Debug.isOn("approx::branch")){
// make sure to be working on the assertion fringe!
if(!contains(conf, bcneg)){
Debug("approx::branch") << "reraise " << conf << endl;
- raiseConflict(conf);
+ ConstraintCP conflicting = vectorToIntHoleConflict(conf);
+ raiseConflict(conflicting);
}else if(!bci.proven()){
drop(conf, bcneg);
bci.setExplanation(conf);
void TheoryArithPrivate::replayAssert(ConstraintP c) {
if(!c->assertedToTheTheory()){
- if(c->negationHasProof()){
- ConstraintP neg = c->getNegation();
- raiseConflict(c, neg);
- Debug("approx::replayAssert") << "replayAssertion conflict " << neg << " : " << c << endl;
- }else if(!c->hasProof()){
- c->setInternalDecision();
- assertionCases(c);
+ bool inConflict = c->negationHasProof();
+ if(!c->hasProof()){
+ c->setInternalAssumption(inConflict);
Debug("approx::replayAssert") << "replayAssert " << c << " set internal" << endl;
}else{
- assertionCases(c);
Debug("approx::replayAssert") << "replayAssert " << c << " has explanation" << endl;
}
+ Debug("approx::replayAssert") << "replayAssertion " << c << endl;
+ if(inConflict){
+ raiseConflict(c);
+ }else{
+ assertionCases(c);
+ }
}else{
- Debug("approx::replayAssert") << "replayAssert " << c << " already asserted" << endl;
+ Debug("approx::replayAssert") << "replayAssert " << c << " already asserted" << endl;
}
}
-// ConstraintCPVec TheoryArithPrivate::toExplanation(Node n) const {
-// ConstraintCPVec res;
-// cout << "toExplanation" << endl;
-// if(n.getKind() == kind::AND){
-// for(unsigned i = 0; i < n.getNumChildren(); ++i){
-// ConstraintP c = d_constraintDatabase.lookup(n[i]);
-// if(c == NullConstraint){ return std::vector<Constraint>(); }
-// res.push_back(c);
-// cout << "\t"<<c << endl;
-// }
-// }else{
-// ConstraintP c = d_constraintDatabase.lookup(n);
-// if(c == NullConstraint){ return std::vector<Constraint>(); }
-// res.push_back(c);
-// }
-// return res;
-// }
-
-// void TheoryArithPrivate::enqueueConstraints(std::vector<Constraint>& out, Node n) const{
-// if(n.getKind() == kind::AND){
-// for(unsigned i = 0, N = n.getNumChildren(); i < N; ++i){
-// enqueueConstraints(out, n[i]);
-// }
-// }else{
-// ConstraintP c = d_constraintDatabase.lookup(n);
-// if(c == NullConstraint){
-// cout << "failing on " << n << endl;
-// }
-// Assert(c != NullConstraint);
-// out.push_back(c);
-// }
-// }
-
-// ConstraintCPVec TheoryArithPrivate::resolveOutPropagated(const ConstraintCPVec& v, const std::set<ConstraintCP>& propagated) const {
-// cout << "resolveOutPropagated()" << conf << endl;
-// std::set<ConstraintCP> final;
-// std::set<ConstraintCP> processed;
-// std::vector<ConstraintCP> to_process;
-// enqueueConstraints(to_process, conf);
-// while(!to_process.empty()){
-// ConstraintP c = to_process.back(); to_process.pop_back();
-// if(processed.find(c) != processed.end()){
-// continue;
-// }else{
-// if(propagated.find(c) == propagated.end()){
-// final.insert(c);
-// }else{
-// Node exp = c->explainForPropagation();
-// enqueueConstraints(to_process, exp);
-// }
-// processed.insert(c);
-// }
-// }
-// cout << "final size: " << final.size() << std::endl;
-// NodeBuilder<> nb(kind::AND);
-// std::set<Constraint>::const_iterator iter = final.begin(), end = final.end();
-// for(; iter != end; ++iter){
-// ConstraintP c = *iter;
-// c->explainForConflict(nb);
-// }
-// Node newConf = safeConstructNary(nb);
-// cout << "resolveOutPropagated("<<conf<<", ...) ->" << newConf << endl;
-// return newConf;
-// }
void TheoryArithPrivate::resolveOutPropagated(std::vector<ConstraintCPVec>& confs, const std::set<ConstraintCP>& propagated) const {
Debug("arith::resolveOutPropagated")
for(size_t i =0, N = confs.size(); i < N; ++i){
ConstraintCPVec& conf = confs[i];
size_t orig = conf.size();
- Constraint_::assertionFringe(conf);
+ Constraint::assertionFringe(conf);
Debug("arith::resolveOutPropagated")
<< " conf["<<i<<"] " << orig << " to " << conf.size() << endl;
}
if(con->isTrue()){
Debug("approx::replayLogRec") << "not asserted?" << endl;
}else{
- con->impliedBy(exp);
+ con->impliedByIntHole(exp, false);
replayAssert(con);
Debug("approx::replayLogRec") << "cut prop" << endl;
}
if(!conflictQueueEmpty()){
/* if a conflict has been found stop */
for(size_t i = 0, N = d_conflicts.size(); i < N; ++i){
- res.push_back(d_conflicts[i]);
+ res.push_back(ConstraintCPVec());
+ intHoleConflictToVector(d_conflicts[i], res.back());
}
++d_statistics.d_replayLogRecEarlyExit;
}else if(nl.isBranch()){
Node cutConstraint = cutToLiteral(approx, *cut);
if(!cutConstraint.isNull()){
const ConstraintCPVec& exp = cut->getExplanation();
- Node asLemma = Constraint_::externalExplainByAssertions(exp);
+ Node asLemma = Constraint::externalExplainByAssertions(exp);
Node implied = Rewriter::rewrite(cutConstraint);
anythingnew = anythingnew || !isSatLiteral(implied);
bool useApprox = options::useApprox() && ApproximateSimplex::enabled() && getSolveIntegerResource();
+ Debug("TheoryArithPrivate::solveRealRelaxation")
+ << "solveRealRelaxation() approx"
+ << " " << options::useApprox()
+ << " " << ApproximateSimplex::enabled()
+ << " " << useApprox
+ << " " << safeToCallApprox()
+ << endl;
+
bool noPivotLimitPass1 = noPivotLimit && !useApprox;
d_qflraStatus = simplex.findModel(noPivotLimitPass1);
+ Debug("TheoryArithPrivate::solveRealRelaxation")
+ << "solveRealRelaxation()" << " pass1 " << d_qflraStatus << endl;
+
if(d_qflraStatus == Result::SAT_UNKNOWN && useApprox && safeToCallApprox()){
// pass2: fancy-final
static const int32_t relaxationLimit = 10000;
break;
case Result::UNSAT:
d_unknownsInARow = 0;
- if(false && previous == Result::SAT){
- ++d_statistics.d_revertsOnConflicts;
- Debug("arith::bt") << "clearing on conflict" << " " << newFacts << " " << previous << " " << d_qflraStatus << endl;
- revertOutOfConflict();
- d_errorSet.clear();
- }else{
- ++d_statistics.d_commitsOnConflicts;
- Debug("arith::bt") << "committing on conflict" << " " << newFacts << " " << previous << " " << d_qflraStatus << endl;
- d_partialModel.commitAssignmentChanges();
- revertOutOfConflict();
+ ++d_statistics.d_commitsOnConflicts;
- if(Debug.isOn("arith::consistency::comitonconflict")){
- entireStateIsConsistent("commit on conflict");
- }
+ Debug("arith::bt") << "committing on conflict" << " " << newFacts << " " << previous << " " << d_qflraStatus << endl;
+ d_partialModel.commitAssignmentChanges();
+ revertOutOfConflict();
+
+ if(Debug.isOn("arith::consistency::comitonconflict")){
+ entireStateIsConsistent("commit on conflict");
}
outputConflicts();
emmittedConflictOrSplit = true;
if(possibleConflict != Node::null()){
revertOutOfConflict();
Debug("arith::conflict") << "dio conflict " << possibleConflict << endl;
- blackBoxConflict(possibleConflict);
+ raiseBlackBoxConflict(possibleConflict);
outputConflicts();
emmittedConflictOrSplit = true;
}
d_diseqQueue.pop();
if(front->isSplit()){
- Debug("eq") << "split already" << endl;
+ Debug("arith::eq") << "split already" << endl;
}else{
- Debug("eq") << "not split already" << endl;
+ Debug("arith::eq") << "not split already" << endl;
ArithVar lhsVar = front->getVariable();
//cout << "Now " << Rewriter::rewrite(lemma) << endl;
splitSomething = true;
}else if(d_partialModel.strictlyLessThanLowerBound(lhsVar, rhsValue)){
- Debug("eq") << "can drop as less than lb" << front << endl;
+ Debug("arith::eq") << "can drop as less than lb" << front << endl;
}else if(d_partialModel.strictlyGreaterThanUpperBound(lhsVar, rhsValue)){
- Debug("eq") << "can drop as greater than ub" << front << endl;
+ Debug("arith::eq") << "can drop as greater than ub" << front << endl;
}else{
- Debug("eq") << "save" << front << ": " <<lhsValue << " != " << rhsValue << endl;
+ Debug("arith::eq") << "save" << front << ": " <<lhsValue << " != " << rhsValue << endl;
save.push_back(front);
}
}
ConstraintP c = d_constraintDatabase.lookup(n);
if(c != NullConstraint){
- Assert(!c->isSelfExplaining());
+ Assert(!c->isAssumption());
Node exp = c->externalExplainForPropagation();
Debug("arith::explain") << "constraint explanation" << n << ":" << exp << endl;
return exp;
}else if(d_assertionsThatDoNotMatchTheirLiterals.find(n) != d_assertionsThatDoNotMatchTheirLiterals.end()){
c = d_assertionsThatDoNotMatchTheirLiterals[n];
- if(!c->isSelfExplaining()){
+ if(!c->isAssumption()){
Node exp = c->externalExplainForPropagation();
Debug("arith::explain") << "assertions explanation" << n << ":" << exp << endl;
return exp;
normalized[0] : normalized.notNode();
Node lp = flattenAnd(exp.andNode(notNormalized));
Debug("arith::prop") << "propagate conflict" << lp << endl;
- blackBoxConflict(lp);
+ raiseBlackBoxConflict(lp);
outputConflicts();
return;
}else{
ConstraintP implied = d_constraintDatabase.getBestImpliedBound(v, t, bound);
if(implied != NullConstraint){
+
return rowImplicationCanBeApplied(ridx, rowUp, implied);
}
}
<< " " << hasProof
<< endl;
- if(implied->negationHasProof()){
- Warning() << "the negation of " << implied << " : " << endl
- << "has proof " << implied->getNegation() << endl
- << implied->getNegation()->externalExplainByAssertions() << endl;
- }
- if(!assertedToTheTheory && canBePropagated && !hasProof ){
+ if( !assertedToTheTheory && canBePropagated && !hasProof ){
ConstraintCPVec explain;
- d_linEq.propagateRow(explain, ridx, rowUp, implied);
+
+ PROOF(d_farkasBuffer.clear());
+ RationalVectorP coeffs = NULLPROOF(&d_farkasBuffer);
+
+ d_linEq.propagateRow(explain, ridx, rowUp, implied, coeffs);
if(d_tableau.getRowLength(ridx) <= options::arithPropAsLemmaLength()){
Node implication = implied->externalImplication(explain);
Node clause = flattenImplication(implication);
outputLemma(clause);
}else{
- implied->impliedBy(explain);
+ Assert(!implied->negationHasProof());
+ implied->impliedByFarkas(explain, coeffs, false);
+ implied->tryToPropagate();
}
return true;
}
double fRand(double fMin, double fMax)
{
- double f = (double)rand() / RAND_MAX;
- return fMin + f * (fMax - fMin);
+ double f = (double)rand() / RAND_MAX;
+ return fMin + f * (fMax - fMin);
}
bool TheoryArithPrivate::propagateCandidateRow(RowIndex ridx){
BoundInfoMap d_rowTracking;
- ConstraintCPVec d_conflictBuffer;
-
/**
* The constraint database associated with the theory.
* This must be declared before ArithPartialModel.
/** This is only used by simplex at the moment. */
- context::CDList<ConstraintCPVec> d_conflicts;
+ context::CDList<ConstraintCP> d_conflicts;
+
+ /** This is only used by simplex at the moment. */
context::CDO<Node> d_blackBoxConflict;
public:
- inline void raiseConflict(const ConstraintCPVec& cv){
- d_conflicts.push_back(cv);
- }
- void raiseConflict(ConstraintCP a, ConstraintCP b);
- void raiseConflict(ConstraintCP a, ConstraintCP b, ConstraintCP c);
+ /**
+ * This adds the constraint a to the queue of conflicts in d_conflicts.
+ * Both a and ~a must have a proof.
+ */
+ void raiseConflict(ConstraintCP a);
- inline void blackBoxConflict(Node bb){
- if(d_blackBoxConflict.get().isNull()){
- d_blackBoxConflict = bb;
- }
- }
+ // inline void raiseConflict(const ConstraintCPVec& cv){
+ // d_conflicts.push_back(cv);
+ // }
+
+ // void raiseConflict(ConstraintCP a, ConstraintCP b);
+ // void raiseConflict(ConstraintCP a, ConstraintCP b, ConstraintCP c);
+
+ /** This is a conflict that is magically known to hold. */
+ void raiseBlackBoxConflict(Node bb);
private:
/**
* Outputs the contents of d_conflicts onto d_out.
- * Must be inConflict().
+ * The conditions of anyConflict() must hold.
*/
void outputConflicts();
std::pair<ConstraintP, ArithVar> replayGetConstraint(const DenseMap<Rational>& lhs, Kind k, const Rational& rhs, bool branch);
void replayAssert(ConstraintP c);
- //ConstConstraintVec toExplanation(Node n) const;
+ static ConstraintCP vectorToIntHoleConflict(const ConstraintCPVec& conflict);
+ static void intHoleConflictToVector(ConstraintCP conflicting, ConstraintCPVec& conflict);
+
// Returns true if the node contains a literal
// that is an arithmetic literal and is not a sat literal
// No caching is done so this should likely only
uint32_t d_solveIntMaybeHelp, d_solveIntAttempts;
+ RationalVector d_farkasBuffer;
+
/** These fields are designed to be accessible to TheoryArith methods. */
class Statistics {
public: