*/
fd_batch_set_stage(batch, FD_STAGE_NULL);
- fd_context_all_dirty(batch->ctx);
batch_flush_reset_dependencies(batch, true);
batch->flushed = true;
struct fd_batch *new_batch;
if (ctx->screen->reorder) {
- new_batch = fd_batch_from_fb(&ctx->screen->batch_cache,
- ctx, &batch->framebuffer);
+ /* defer allocating new batch until one is needed for rendering
+ * to avoid unused batches for apps that create many contexts
+ */
+ new_batch = NULL;
} else {
new_batch = fd_batch_create(ctx, false);
+ util_copy_framebuffer_state(&new_batch->framebuffer, &batch->framebuffer);
}
- util_copy_framebuffer_state(&new_batch->framebuffer, &batch->framebuffer);
-
fd_batch_reference(&batch, NULL);
ctx->batch = new_batch;
}
{
struct fd_context *ctx = fd_context(pctx);
struct pipe_fence_handle *fence = NULL;
+ // TODO we want to lookup batch if it exists, but not create one if not.
+ struct fd_batch *batch = fd_context_batch(ctx);
DBG("%p: flush: flags=%x\n", ctx->batch, flags);
- if (!ctx->batch)
+ if (!batch)
return;
/* Take a ref to the batch's fence (batch can be unref'd when flushed: */
- fd_fence_ref(pctx->screen, &fence, ctx->batch->fence);
+ fd_fence_ref(pctx->screen, &fence, batch->fence);
if (flags & PIPE_FLUSH_FENCE_FD)
- ctx->batch->needs_out_fence_fd = true;
+ batch->needs_out_fence_fd = true;
if (!ctx->screen->reorder) {
- fd_batch_flush(ctx->batch, true, false);
+ fd_batch_flush(batch, true, false);
} else if (flags & PIPE_FLUSH_DEFERRED) {
fd_bc_flush_deferred(&ctx->screen->batch_cache, ctx);
} else {
if (ctx->screen->reorder && util_queue_is_initialized(&ctx->flush_queue))
util_queue_destroy(&ctx->flush_queue);
+ util_copy_framebuffer_state(&ctx->framebuffer, NULL);
fd_batch_reference(&ctx->batch, NULL); /* unref current batch */
fd_bc_invalidate_context(ctx);
goto fail;
pctx->const_uploader = pctx->stream_uploader;
- ctx->batch = fd_bc_alloc_batch(&screen->batch_cache, ctx);
+ if (!ctx->screen->reorder)
+ ctx->batch = fd_bc_alloc_batch(&screen->batch_cache, ctx);
slab_create_child(&ctx->transfer_pool, &screen->transfer_pool);
struct pipe_blend_color blend_color;
struct pipe_stencil_ref stencil_ref;
unsigned sample_mask;
+ /* local context fb state, for when ctx->batch is null: */
+ struct pipe_framebuffer_state framebuffer;
struct pipe_poly_stipple stipple;
struct pipe_viewport_state viewport;
struct fd_constbuf_stateobj constbuf[PIPE_SHADER_TYPES];
static inline struct fd_batch *
fd_context_batch(struct fd_context *ctx)
{
+ if (unlikely(!ctx->batch)) {
+ struct fd_batch *batch =
+ fd_batch_from_fb(&ctx->screen->batch_cache, ctx, &ctx->framebuffer);
+ util_copy_framebuffer_state(&batch->framebuffer, &ctx->framebuffer);
+ ctx->batch = batch;
+ fd_context_all_dirty(ctx);
+ }
return ctx->batch;
}
struct pipe_framebuffer_state *cso;
if (ctx->screen->reorder) {
- struct fd_batch *batch, *old_batch = NULL;
+ struct fd_batch *old_batch = NULL;
fd_batch_reference(&old_batch, ctx->batch);
if (likely(old_batch))
fd_batch_set_stage(old_batch, FD_STAGE_NULL);
- batch = fd_batch_from_fb(&ctx->screen->batch_cache, ctx, framebuffer);
fd_batch_reference(&ctx->batch, NULL);
- fd_reset_wfi(batch);
- ctx->batch = batch;
fd_context_all_dirty(ctx);
if (old_batch && old_batch->blit && !old_batch->back_blit) {
fd_batch_flush(ctx->batch, false, false);
}
- cso = &ctx->batch->framebuffer;
+ cso = &ctx->framebuffer;
util_copy_framebuffer_state(cso, framebuffer);