struct nv_ref *src0 = nvi->src[0];
struct nv_ref *src1 = nvi->src[1];
- if (!nv_op_commutative(nvi->opcode) && NV_BASEOP(nvi->opcode) != NV_OP_SET)
+ if (!nv_op_commutative(nvi->opcode) &&
+ NV_BASEOP(nvi->opcode) != NV_OP_SET &&
+ NV_BASEOP(nvi->opcode) != NV_OP_SLCT)
return;
assert(src0 && src1 && src0->value && src1->value);
}
}
- if (nvi->src[0] != src0 && NV_BASEOP(nvi->opcode) == NV_OP_SET)
- nvi->set_cond = (nvi->set_cond & ~7) | cc_swapped[nvi->set_cond & 7];
+ if (nvi->src[0] != src0) {
+ if (NV_BASEOP(nvi->opcode) == NV_OP_SET)
+ nvi->set_cond = (nvi->set_cond & ~7) | cc_swapped[nvi->set_cond & 7];
+ else
+ if (NV_BASEOP(nvi->opcode) == NV_OP_SLCT)
+ nvi->set_cond = NV_CC_INVERSE(nvi->set_cond);
+ }
}
static void
{ NV_OP_SELP, "selp", NV_TYPE_U32, 0, 0, 0, 0, 1, 0, 0, 0 },
- { NV_OP_SLCT_F32, "slct", NV_TYPE_F32, 0, 0, 0, 0, 1, 0, 0, 0 },
- { NV_OP_SLCT_F32, "slct", NV_TYPE_S32, 0, 0, 0, 0, 1, 0, 0, 0 },
- { NV_OP_SLCT_F32, "slct", NV_TYPE_U32, 0, 0, 0, 0, 1, 0, 0, 0 },
+ { NV_OP_SLCT, "slct", NV_TYPE_F32, 0, 0, 0, 0, 1, 0, 2, 2 },
+ { NV_OP_SLCT, "slct", NV_TYPE_S32, 0, 0, 0, 0, 1, 0, 2, 2 },
+ { NV_OP_SLCT, "slct", NV_TYPE_U32, 0, 0, 0, 0, 1, 0, 2, 2 },
{ NV_OP_ADD, "sub", NV_TYPE_F32, 0, 0, 0, 0, 1, 0, 1, 0 },
case TGSI_OPCODE_CMP:
FOR_EACH_DST0_ENABLED_CHANNEL(c, insn) {
src0 = emit_fetch(bld, insn, 0, c);
- src0 = bld_setp(bld, NV_OP_SET_F32, NV_CC_LT, src0, bld->zero);
src1 = emit_fetch(bld, insn, 1, c);
src2 = emit_fetch(bld, insn, 2, c);
- dst0[c] = bld_insn_3(bld, NV_OP_SELP, src1, src2, src0);
+ dst0[c] = bld_insn_3(bld, NV_OP_SLCT_F32, src1, src2, src0);
+ dst0[c]->insn->set_cond = NV_CC_LT;
}
break;
case TGSI_OPCODE_COS: