default = "false"
help = "Unification-based function synthesis"
+[[option]]
+ name = "sygusBoolIteReturnConst"
+ category = "regular"
+ long = "sygus-bool-ite-return-const"
+ type = "bool"
+ default = "false"
+ help = "Only use Boolean constants for return values in unification-based function synthesis"
+
[[option]]
name = "sygusQePreproc"
category = "regular"
const std::vector<Node>& candidates,
std::vector<Node>& lemmas)
{
- Trace("cegis-unif") << "Initialize CegisUnif : " << n << std::endl;
// list of strategy points for unification candidates
std::vector<Node> unif_candidate_pts;
// map from strategy points to their conditions
<< " to strategy point " << ci.second.d_pt
<< "\n";
d_tds->registerEnumerator(e, ci.second.d_pt, d_parent);
- // if the sygus datatype is interpreted as an infinite type
- // (this should be the case for almost all examples)
- TypeNode et = e.getType();
- if (!et.isInterpretedFinite())
- {
- // it is disequal from all previous ones
- for (const Node& ei : ci.second.d_enums[index])
- {
- if (ei == e)
- {
- continue;
- }
- Node deq = e.eqNode(ei).negate();
- Trace("cegis-unif-enum-lemma")
- << "CegisUnifEnum::lemma, enum deq:" << deq << "\n";
- d_qe->getOutputChannel().lemma(deq);
- }
- }
+ // TODO symmetry breaking for making
+ // e distinct from ei : (ci.second.d_enums[index] \ {e})
+ // if its respective type has had at least
+ // ci.second.d_enums[index].size() distinct values enumerated
}
}
// register the evaluation points at the new value
cargs.push_back( std::vector< CVC4::Type >() );
}
}
- //add constants if no variables and no connected types
- if( ops.back().empty() && types.empty() ){
- std::vector< Node > consts;
- mkSygusConstantsForType( btype, consts );
- for( unsigned j=0; j<consts.size(); j++ ){
- std::stringstream ss;
- ss << consts[j];
- Trace("sygus-grammar-def") << "...add for constant " << ss.str() << std::endl;
- ops.back().push_back( consts[j].toExpr() );
- cnames.push_back( ss.str() );
- cargs.push_back( std::vector< CVC4::Type >() );
- }
+ //add constants
+ std::vector<Node> consts;
+ mkSygusConstantsForType(btype, consts);
+ for (unsigned j = 0; j < consts.size(); j++)
+ {
+ std::stringstream ss;
+ ss << consts[j];
+ Trace("sygus-grammar-def") << "...add for constant " << ss.str()
+ << std::endl;
+ ops.back().push_back(consts[j].toExpr());
+ cnames.push_back(ss.str());
+ cargs.push_back(std::vector<CVC4::Type>());
}
//add operators
for (unsigned i = 0; i < 4; i++)
CVC4::Kind k = i == 0
? kind::NOT
: (i == 1 ? kind::AND : (i == 2 ? kind::OR : kind::ITE));
+ // TODO #1935 ITEs are added to Boolean grammars so that we can infer
+ // unification strategies. We can do away with this if we can infer
+ // unification strategies from and/or/not
+ if (k == ITE && !options::sygusUnif())
+ {
+ continue;
+ }
Trace("sygus-grammar-def") << "...add for " << k << std::endl;
ops.back().push_back(NodeManager::currentNM()->operatorOf(k).toExpr());
cnames.push_back(kind::kindToString(k));
#include "theory/quantifiers/sygus/sygus_unif_rl.h"
#include "options/base_options.h"
+#include "options/quantifiers_options.h"
#include "printer/printer.h"
#include "theory/quantifiers/sygus/ce_guided_conjecture.h"
#include "theory/quantifiers/sygus/term_database_sygus.h"
SygusUnif::initializeCandidate(qe, f, all_enums, strategy_lemmas);
// based on the strategy inferred for each function, determine if we are
// using a unification strategy that is compatible our approach.
- d_strategy[f].staticLearnRedundantOps(strategy_lemmas);
+ StrategyRestrictions restrictions;
+ if (options::sygusBoolIteReturnConst())
+ {
+ restrictions.d_iteReturnBoolConst = true;
+ }
+ d_strategy[f].staticLearnRedundantOps(strategy_lemmas, restrictions);
registerStrategy(f, enums);
// Copy candidates and check whether CegisUnif for any of them
if (d_unif_candidates.find(f) != d_unif_candidates.end())
void SygusUnifStrategy::staticLearnRedundantOps(
std::map<Node, std::vector<Node>>& strategy_lemmas)
+{
+ StrategyRestrictions restrictions;
+ staticLearnRedundantOps(strategy_lemmas, restrictions);
+}
+
+void SygusUnifStrategy::staticLearnRedundantOps(
+ std::map<Node, std::vector<Node>>& strategy_lemmas,
+ StrategyRestrictions& restrictions)
{
for (unsigned i = 0; i < d_esym_list.size(); i++)
{
debugPrint("sygus-unif");
std::map<Node, std::map<NodeRole, bool> > visited;
std::map<Node, std::map<unsigned, bool> > needs_cons;
- staticLearnRedundantOps(getRootEnumerator(), role_equal, visited, needs_cons);
+ staticLearnRedundantOps(
+ getRootEnumerator(), role_equal, visited, needs_cons, restrictions);
// now, check the needs_cons map
for (std::pair<const Node, std::map<unsigned, bool> >& nce : needs_cons)
{
void SygusUnifStrategy::staticLearnRedundantOps(
Node e,
NodeRole nrole,
- std::map<Node, std::map<NodeRole, bool> >& visited,
- std::map<Node, std::map<unsigned, bool> >& needs_cons)
+ std::map<Node, std::map<NodeRole, bool>>& visited,
+ std::map<Node, std::map<unsigned, bool>>& needs_cons,
+ StrategyRestrictions& restrictions)
{
if (visited[e].find(nrole) != visited[e].end())
{
for (unsigned j = 0, size = snode.d_strats.size(); j < size; j++)
{
EnumTypeInfoStrat* etis = snode.d_strats[j];
- int cindex = Datatype::indexOf(etis->d_cons.toExpr());
- Assert(cindex != -1);
- Trace("sygus-strat-slearn")
- << "...by strategy, can exclude operator " << etis->d_cons << std::endl;
- needs_cons_curr[static_cast<unsigned>(cindex)] = false;
+ unsigned cindex =
+ static_cast<unsigned>(Datatype::indexOf(etis->d_cons.toExpr()));
+ Trace("sygus-strat-slearn") << "...by strategy, can exclude operator "
+ << etis->d_cons << std::endl;
+ needs_cons_curr[cindex] = false;
+ // try to eliminate from etn's datatype all operators except TRUE/FALSE if
+ // arguments of ITE are the same BOOL type
+ if (restrictions.d_iteReturnBoolConst)
+ {
+ const Datatype& dt =
+ static_cast<DatatypeType>(etn.toType()).getDatatype();
+ Node op = Node::fromExpr(dt[cindex].getSygusOp());
+ TypeNode sygus_tn = TypeNode::fromType(dt.getSygusType());
+ if (op.getKind() == kind::BUILTIN
+ && NodeManager::operatorToKind(op) == ITE
+ && sygus_tn.isBoolean()
+ && (TypeNode::fromType(dt[cindex].getArgType(1))
+ == TypeNode::fromType(dt[cindex].getArgType(2))))
+ {
+ unsigned ncons = dt.getNumConstructors(), indexT = ncons,
+ indexF = ncons;
+ for (unsigned k = 0; k < ncons; ++k)
+ {
+ Node op_arg = Node::fromExpr(dt[k].getSygusOp());
+ if (dt[k].getNumArgs() > 0 || !op_arg.isConst())
+ {
+ continue;
+ }
+ if (op_arg.getConst<bool>())
+ {
+ indexT = k;
+ }
+ else
+ {
+ indexF = k;
+ }
+ }
+ if (indexT < ncons && indexF < ncons)
+ {
+ Trace("sygus-strat-slearn")
+ << "...for ite boolean arg, can exclude all operators but T/F\n";
+ for (unsigned k = 0; k < ncons; ++k)
+ {
+ needs_cons_curr[k] = false;
+ }
+ needs_cons_curr[indexT] = true;
+ needs_cons_curr[indexF] = true;
+ }
+ }
+ }
for (std::pair<Node, NodeRole>& cec : etis->d_cenum)
{
- staticLearnRedundantOps(cec.first, cec.second, visited, needs_cons);
+ staticLearnRedundantOps(
+ cec.first, cec.second, visited, needs_cons, restrictions);
}
}
// get the current datatype
const Datatype& dt = static_cast<DatatypeType>(etn.toType()).getDatatype();
// do not use recursive Boolean connectives for conditions of ITEs
- if (nrole == role_ite_condition)
+ if (nrole == role_ite_condition && restrictions.d_iteCondOnlyAtoms)
{
+ TypeNode sygus_tn = TypeNode::fromType(dt.getSygusType());
for (unsigned j = 0, size = dt.getNumConstructors(); j < size; j++)
{
Node op = Node::fromExpr(dt[j].getSygusOp());
Trace("sygus-strat-slearn")
<< "...for ite condition, look at operator : " << op << std::endl;
+ if (op.isConst() && dt[j].getNumArgs() == 0)
+ {
+ Trace("sygus-strat-slearn")
+ << "...for ite condition, can exclude Boolean constant " << op
+ << std::endl;
+ needs_cons_curr[j] = false;
+ continue;
+ }
if (op.getKind() == kind::BUILTIN)
{
Kind k = NodeManager::operatorToKind(op);
bool isValid(UnifContext& x);
};
+/**
+ * flags for extra restrictions to be inferred during redundant operators
+ * learning
+ */
+struct StrategyRestrictions
+{
+ StrategyRestrictions() : d_iteReturnBoolConst(false), d_iteCondOnlyAtoms(true)
+ {
+ }
+ /**
+ * if this flag is true then staticLearnRedundantOps will also try to make
+ * the return value of boolean ITEs to be restricted to constants
+ */
+ bool d_iteReturnBoolConst;
+ /**
+ * if this flag is true then staticLearnRedundantOps will also try to make
+ * the condition values of ITEs to be restricted to atoms
+ */
+ bool d_iteCondOnlyAtoms;
+};
+
/**
* Stores strategy and enumeration information for a function-to-synthesize.
*
* These may correspond to static symmetry breaking predicates (for example,
* those that exclude ITE from enumerators whose role is enum_io when the
* strategy is ITE_strat).
+ *
+ * then the module may also try to apply the given pruning restrictions (see
+ * StrategyRestrictions for more details)
+ */
+ void staticLearnRedundantOps(
+ std::map<Node, std::vector<Node>>& strategy_lemmas,
+ StrategyRestrictions& restrictions);
+ /**
+ * creates the default restrictions when they are not given and calls the
+ * above function
*/
void staticLearnRedundantOps(
std::map<Node, std::vector<Node>>& strategy_lemmas);
void staticLearnRedundantOps(
Node e,
NodeRole nrole,
- std::map<Node, std::map<NodeRole, bool> >& visited,
- std::map<Node, std::map<unsigned, bool> >& needs_cons);
+ std::map<Node, std::map<NodeRole, bool>>& visited,
+ std::map<Node, std::map<unsigned, bool>>& needs_cons,
+ StrategyRestrictions& restrictions);
/** finish initialization of the strategy tree
*
* (e, nrole) specify the strategy node in the graph we are currently