/* Pipeline & streamout query controls. */
#define R600_CONTEXT_START_PIPELINE_STATS (1u << 1)
#define R600_CONTEXT_STOP_PIPELINE_STATS (1u << 2)
-#define R600_CONTEXT_PRIVATE_FLAG (1u << 3)
+#define R600_CONTEXT_FLUSH_FOR_RENDER_COND (1u << 3)
+#define R600_CONTEXT_PRIVATE_FLAG (1u << 4)
/* special primitive types */
#define R600_PRIM_RECTANGLE_LIST PIPE_PRIM_MAX
/* Settings this in the render cond atom is too late,
* so set it here. */
- rctx->flags |= rctx->screen->barrier_flags.L2_to_cp;
-
- atom->num_dw = 5;
+ rctx->flags |= rctx->screen->barrier_flags.L2_to_cp |
+ R600_CONTEXT_FLUSH_FOR_RENDER_COND;
rctx->render_cond_force_off = old_force_off;
+ }
+
+ if (needs_workaround) {
+ atom->num_dw = 5;
} else {
for (qbuf = &rquery->buffer; qbuf; qbuf = qbuf->previous)
atom->num_dw += (qbuf->results_end / rquery->result_size) * 5;
* the wait and the draw)
*/
struct r600_atom *shader_pointers = &sctx->shader_pointers.atom;
+ unsigned masked_atoms = 1u << shader_pointers->id;
- /* Emit all states except shader pointers. */
- si_emit_all_states(sctx, info, 1 << shader_pointers->id);
+ if (unlikely(sctx->b.flags & R600_CONTEXT_FLUSH_FOR_RENDER_COND))
+ masked_atoms |= 1u << sctx->b.render_cond_atom.id;
+
+ /* Emit all states except shader pointers and render condition. */
+ si_emit_all_states(sctx, info, masked_atoms);
si_emit_cache_flush(sctx);
/* <-- CUs are idle here. */
return;
/* Set shader pointers after descriptors are uploaded. */
- if (si_is_atom_dirty(sctx, shader_pointers)) {
+ if (si_is_atom_dirty(sctx, shader_pointers))
shader_pointers->emit(&sctx->b, NULL);
- sctx->dirty_atoms = 0;
- }
+ if (si_is_atom_dirty(sctx, &sctx->b.render_cond_atom))
+ sctx->b.render_cond_atom.emit(&sctx->b, NULL);
+ sctx->dirty_atoms = 0;
si_emit_draw_packets(sctx, info, indexbuf, index_size, index_offset);
/* <-- CUs are busy here. */