instr->num_components - 2);
}
- if (c->execute.file != QFILE_NULL)
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ if (c->execute.file != QFILE_NULL) {
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute),
+ V3D_QPU_PF_PUSHZ);
+ }
struct qreg dest;
if (config == ~0)
/* Set the flags to the current exec mask.
*/
c->cursor = vir_before_inst(last_inst);
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute),
+ V3D_QPU_PF_PUSHZ);
c->cursor = vir_after_inst(last_inst);
vir_set_cond(last_inst, V3D_QPU_COND_IFA);
struct qreg t = vir_get_temp(c);
vir_MOV_dest(c, t, vir_uniform_f(c, 0.0));
- vir_PF(c, vir_FMOV(c, src), V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_FMOV_dest(c, vir_nop_reg(), src), V3D_QPU_PF_PUSHZ);
vir_MOV_cond(c, V3D_QPU_COND_IFNA, t, vir_uniform_f(c, 1.0));
- vir_PF(c, vir_FMOV(c, src), V3D_QPU_PF_PUSHN);
+ vir_set_pf(vir_FMOV_dest(c, vir_nop_reg(), src), V3D_QPU_PF_PUSHN);
vir_MOV_cond(c, V3D_QPU_COND_IFA, t, vir_uniform_f(c, -1.0));
return vir_MOV(c, t);
}
return cond;
out:
- vir_PF(c, ntq_get_src(c, src, 0), V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), ntq_get_src(c, src, 0)),
+ V3D_QPU_PF_PUSHZ);
return V3D_QPU_COND_IFNA;
}
break;
case nir_op_i2b32:
case nir_op_f2b32:
- vir_PF(c, src[0], V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), src[0]),
+ V3D_QPU_PF_PUSHZ);
result = vir_MOV(c, vir_SEL(c, V3D_QPU_COND_IFNA,
vir_uniform_ui(c, ~0),
vir_uniform_ui(c, 0)));
break;
case nir_op_fcsel:
- vir_PF(c, src[0], V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), src[0]),
+ V3D_QPU_PF_PUSHZ);
result = vir_MOV(c, vir_SEL(c, V3D_QPU_COND_IFNA,
src[1], src[2]));
break;
break;
case nir_op_uadd_carry:
- vir_PF(c, vir_ADD(c, src[0], src[1]), V3D_QPU_PF_PUSHC);
+ vir_set_pf(vir_ADD_dest(c, vir_nop_reg(), src[0], src[1]),
+ V3D_QPU_PF_PUSHC);
result = vir_MOV(c, vir_SEL(c, V3D_QPU_COND_IFA,
vir_uniform_ui(c, ~0),
vir_uniform_ui(c, 0)));
break;
case nir_intrinsic_load_helper_invocation:
- vir_PF(c, vir_MSF(c), V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MSF_dest(c, vir_nop_reg()), V3D_QPU_PF_PUSHZ);
ntq_store_dest(c, &instr->dest, 0,
vir_MOV(c, vir_SEL(c, V3D_QPU_COND_IFA,
vir_uniform_ui(c, ~0),
case nir_intrinsic_discard:
if (c->execute.file != QFILE_NULL) {
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute),
+ V3D_QPU_PF_PUSHZ);
vir_set_cond(vir_SETMSF_dest(c, vir_nop_reg(),
vir_uniform_ui(c, 0)),
V3D_QPU_COND_IFA);
/* Jump to ELSE if nothing is active for THEN, otherwise fall
* through.
*/
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute), V3D_QPU_PF_PUSHZ);
vir_BRANCH(c, V3D_QPU_BRANCH_COND_ALLNA);
vir_link_blocks(c->cur_block, else_block);
vir_link_blocks(c->cur_block, then_block);
* active channels update their execute flags to point to
* ENDIF
*/
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute),
+ V3D_QPU_PF_PUSHZ);
vir_MOV_cond(c, V3D_QPU_COND_IFA, c->execute,
vir_uniform_ui(c, after_block->index));
/* If everything points at ENDIF, then jump there immediately. */
- vir_PF(c, vir_XOR(c, c->execute,
- vir_uniform_ui(c, after_block->index)),
- V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_XOR_dest(c, vir_nop_reg(),
+ c->execute,
+ vir_uniform_ui(c, after_block->index)),
+ V3D_QPU_PF_PUSHZ);
vir_BRANCH(c, V3D_QPU_BRANCH_COND_ALLA);
vir_link_blocks(c->cur_block, after_block);
vir_link_blocks(c->cur_block, else_block);
{
switch (jump->type) {
case nir_jump_break:
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute),
+ V3D_QPU_PF_PUSHZ);
vir_MOV_cond(c, V3D_QPU_COND_IFA, c->execute,
vir_uniform_ui(c, c->loop_break_block->index));
break;
case nir_jump_continue:
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute),
+ V3D_QPU_PF_PUSHZ);
vir_MOV_cond(c, V3D_QPU_COND_IFA, c->execute,
vir_uniform_ui(c, c->loop_cont_block->index));
break;
*
* XXX: Use the .ORZ flags update, instead.
*/
- vir_PF(c, vir_XOR(c,
- c->execute,
- vir_uniform_ui(c, c->loop_cont_block->index)),
- V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_XOR_dest(c,
+ vir_nop_reg(),
+ c->execute,
+ vir_uniform_ui(c, c->loop_cont_block->index)),
+ V3D_QPU_PF_PUSHZ);
vir_MOV_cond(c, V3D_QPU_COND_IFA, c->execute, vir_uniform_ui(c, 0));
- vir_PF(c, c->execute, V3D_QPU_PF_PUSHZ);
+ vir_set_pf(vir_MOV_dest(c, vir_nop_reg(), c->execute), V3D_QPU_PF_PUSHZ);
struct qinst *branch = vir_BRANCH(c, V3D_QPU_BRANCH_COND_ANYA);
/* Pixels that were not dispatched or have been discarded should not