freedreno: allocate ctx's batch on demand
authorRob Clark <robdclark@gmail.com>
Mon, 3 Sep 2018 20:33:59 +0000 (16:33 -0400)
committerRob Clark <robdclark@gmail.com>
Wed, 5 Sep 2018 17:38:43 +0000 (13:38 -0400)
Don't fall over when app wants more than 32 contexts.  Instead allocate
contexts on demand.

Signed-off-by: Rob Clark <robdclark@gmail.com>
src/gallium/drivers/freedreno/freedreno_batch.c
src/gallium/drivers/freedreno/freedreno_context.c
src/gallium/drivers/freedreno/freedreno_context.h
src/gallium/drivers/freedreno/freedreno_state.c

index 36b834f4013b1b7a32f853f3644f13c915b38eee..ff8298e82a1827fc2f144375b8e3fbc959df1c83 100644 (file)
@@ -296,7 +296,6 @@ batch_flush(struct fd_batch *batch, bool force)
         */
        fd_batch_set_stage(batch, FD_STAGE_NULL);
 
-       fd_context_all_dirty(batch->ctx);
        batch_flush_reset_dependencies(batch, true);
 
        batch->flushed = true;
@@ -355,14 +354,15 @@ fd_batch_flush(struct fd_batch *batch, bool sync, bool force)
                struct fd_batch *new_batch;
 
                if (ctx->screen->reorder) {
-                       new_batch = fd_batch_from_fb(&ctx->screen->batch_cache,
-                                       ctx, &batch->framebuffer);
+                       /* defer allocating new batch until one is needed for rendering
+                        * to avoid unused batches for apps that create many contexts
+                        */
+                       new_batch = NULL;
                } else {
                        new_batch = fd_batch_create(ctx, false);
+                       util_copy_framebuffer_state(&new_batch->framebuffer, &batch->framebuffer);
                }
 
-               util_copy_framebuffer_state(&new_batch->framebuffer, &batch->framebuffer);
-
                fd_batch_reference(&batch, NULL);
                ctx->batch = new_batch;
        }
index 1d91b079f608ac11ecfa9c7874d82ad2c8b3c673..55e978073ab9472a8cd5aa197c104b89bfb72a98 100644 (file)
@@ -46,20 +46,22 @@ fd_context_flush(struct pipe_context *pctx, struct pipe_fence_handle **fencep,
 {
        struct fd_context *ctx = fd_context(pctx);
        struct pipe_fence_handle *fence = NULL;
+       // TODO we want to lookup batch if it exists, but not create one if not.
+       struct fd_batch *batch = fd_context_batch(ctx);
 
        DBG("%p: flush: flags=%x\n", ctx->batch, flags);
 
-       if (!ctx->batch)
+       if (!batch)
                return;
 
        /* Take a ref to the batch's fence (batch can be unref'd when flushed: */
-       fd_fence_ref(pctx->screen, &fence, ctx->batch->fence);
+       fd_fence_ref(pctx->screen, &fence, batch->fence);
 
        if (flags & PIPE_FLUSH_FENCE_FD)
-               ctx->batch->needs_out_fence_fd = true;
+               batch->needs_out_fence_fd = true;
 
        if (!ctx->screen->reorder) {
-               fd_batch_flush(ctx->batch, true, false);
+               fd_batch_flush(batch, true, false);
        } else if (flags & PIPE_FLUSH_DEFERRED) {
                fd_bc_flush_deferred(&ctx->screen->batch_cache, ctx);
        } else {
@@ -140,6 +142,7 @@ fd_context_destroy(struct pipe_context *pctx)
        if (ctx->screen->reorder && util_queue_is_initialized(&ctx->flush_queue))
                util_queue_destroy(&ctx->flush_queue);
 
+       util_copy_framebuffer_state(&ctx->framebuffer, NULL);
        fd_batch_reference(&ctx->batch, NULL);  /* unref current batch */
        fd_bc_invalidate_context(ctx);
 
@@ -312,7 +315,8 @@ fd_context_init(struct fd_context *ctx, struct pipe_screen *pscreen,
                goto fail;
        pctx->const_uploader = pctx->stream_uploader;
 
-       ctx->batch = fd_bc_alloc_batch(&screen->batch_cache, ctx);
+       if (!ctx->screen->reorder)
+               ctx->batch = fd_bc_alloc_batch(&screen->batch_cache, ctx);
 
        slab_create_child(&ctx->transfer_pool, &screen->transfer_pool);
 
index c8eb4f8c01018e2d856d5f927ff972711c0ca99c..58fba99874a50c624a9c3bea768db7a57f5e58bd 100644 (file)
@@ -281,6 +281,8 @@ struct fd_context {
        struct pipe_blend_color blend_color;
        struct pipe_stencil_ref stencil_ref;
        unsigned sample_mask;
+       /* local context fb state, for when ctx->batch is null: */
+       struct pipe_framebuffer_state framebuffer;
        struct pipe_poly_stipple stipple;
        struct pipe_viewport_state viewport;
        struct fd_constbuf_stateobj constbuf[PIPE_SHADER_TYPES];
@@ -431,6 +433,13 @@ fd_supported_prim(struct fd_context *ctx, unsigned prim)
 static inline struct fd_batch *
 fd_context_batch(struct fd_context *ctx)
 {
+       if (unlikely(!ctx->batch)) {
+               struct fd_batch *batch =
+                       fd_batch_from_fb(&ctx->screen->batch_cache, ctx, &ctx->framebuffer);
+               util_copy_framebuffer_state(&batch->framebuffer, &ctx->framebuffer);
+               ctx->batch = batch;
+               fd_context_all_dirty(ctx);
+       }
        return ctx->batch;
 }
 
index 88f6fb557d0ecc0f8a47f726f45fbfc65c2d63c8..3ce8b6ed9fc144d9de131bf3f4311e777b4720d3 100644 (file)
@@ -212,17 +212,14 @@ fd_set_framebuffer_state(struct pipe_context *pctx,
        struct pipe_framebuffer_state *cso;
 
        if (ctx->screen->reorder) {
-               struct fd_batch *batch, *old_batch = NULL;
+               struct fd_batch *old_batch = NULL;
 
                fd_batch_reference(&old_batch, ctx->batch);
 
                if (likely(old_batch))
                        fd_batch_set_stage(old_batch, FD_STAGE_NULL);
 
-               batch = fd_batch_from_fb(&ctx->screen->batch_cache, ctx, framebuffer);
                fd_batch_reference(&ctx->batch, NULL);
-               fd_reset_wfi(batch);
-               ctx->batch = batch;
                fd_context_all_dirty(ctx);
 
                if (old_batch && old_batch->blit && !old_batch->back_blit) {
@@ -241,7 +238,7 @@ fd_set_framebuffer_state(struct pipe_context *pctx,
                fd_batch_flush(ctx->batch, false, false);
        }
 
-       cso = &ctx->batch->framebuffer;
+       cso = &ctx->framebuffer;
 
        util_copy_framebuffer_state(cso, framebuffer);