return true;
}
+/* Invalidate underlying buffer storage, reset fences, reallocate to non-busy
+ * buffer.
+ */
+void
+nouveau_buffer_invalidate(struct pipe_context *pipe,
+ struct pipe_resource *resource)
+{
+ struct nouveau_context *nv = nouveau_context(pipe);
+ struct nv04_resource *buf = nv04_resource(resource);
+ int ref = buf->base.reference.count - 1;
+
+ /* Shared buffers shouldn't get reallocated */
+ if (unlikely(buf->base.bind & PIPE_BIND_SHARED))
+ return;
+
+ /* We can't touch persistent/coherent buffers */
+ if (buf->base.flags & (PIPE_RESOURCE_FLAG_MAP_PERSISTENT |
+ PIPE_RESOURCE_FLAG_MAP_COHERENT))
+ return;
+
+ /* If the buffer is sub-allocated and not currently being written, just
+ * wipe the valid buffer range. Otherwise we have to create fresh
+ * storage. (We don't keep track of fences for non-sub-allocated BO's.)
+ */
+ if (buf->mm && !nouveau_buffer_busy(buf, PIPE_TRANSFER_WRITE)) {
+ util_range_set_empty(&buf->valid_buffer_range);
+ } else {
+ nouveau_buffer_reallocate(nv->screen, buf, buf->domain);
+ if (ref > 0) /* any references inside context possible ? */
+ nv->invalidate_resource_storage(nv, &buf->base, ref);
+ }
+}
+
/* Scratch data allocation. */
nouveau_user_buffer_upload(struct nouveau_context *, struct nv04_resource *,
unsigned base, unsigned size);
+void
+nouveau_buffer_invalidate(struct pipe_context *pipe,
+ struct pipe_resource *resource);
+
/* Copy data to a scratch buffer and return address & bo the data resides in.
* Returns 0 on failure.
*/
FREE(s);
}
+void
+nv50_invalidate_resource(struct pipe_context *pipe, struct pipe_resource *res)
+{
+ if (res->target == PIPE_BUFFER)
+ nouveau_buffer_invalidate(pipe, res);
+}
+
void
nv50_init_resource_functions(struct pipe_context *pcontext)
{
pcontext->transfer_inline_write = u_transfer_inline_write_vtbl;
pcontext->create_surface = nv50_surface_create;
pcontext->surface_destroy = nv50_surface_destroy;
+ pcontext->invalidate_resource = nv50_invalidate_resource;
}
void
void
nv50_surface_destroy(struct pipe_context *, struct pipe_surface *);
+void
+nv50_invalidate_resource(struct pipe_context *, struct pipe_resource *);
+
void
nv50_clear_texture(struct pipe_context *pipe,
struct pipe_resource *res,
case PIPE_CAP_CLEAR_TEXTURE:
case PIPE_CAP_COMPUTE:
case PIPE_CAP_TGSI_FS_FACE_IS_INTEGER_SYSVAL:
+ case PIPE_CAP_INVALIDATE_BUFFER:
return 1;
case PIPE_CAP_SEAMLESS_CUBE_MAP:
return 1; /* class_3d >= NVA0_3D_CLASS; */
case PIPE_CAP_TGSI_PACK_HALF_FLOAT:
case PIPE_CAP_TGSI_FS_POSITION_IS_SYSVAL:
case PIPE_CAP_SHADER_BUFFER_OFFSET_ALIGNMENT:
- case PIPE_CAP_INVALIDATE_BUFFER:
case PIPE_CAP_GENERATE_MIPMAP:
case PIPE_CAP_STRING_MARKER:
case PIPE_CAP_BUFFER_SAMPLER_VIEW_RGBA_ONLY:
pcontext->transfer_inline_write = u_transfer_inline_write_vtbl;
pcontext->create_surface = nvc0_surface_create;
pcontext->surface_destroy = nv50_surface_destroy;
+ pcontext->invalidate_resource = nv50_invalidate_resource;
}
void
case PIPE_CAP_MULTI_DRAW_INDIRECT_PARAMS:
case PIPE_CAP_TGSI_FS_FACE_IS_INTEGER_SYSVAL:
case PIPE_CAP_QUERY_BUFFER_OBJECT:
+ case PIPE_CAP_INVALIDATE_BUFFER:
return 1;
case PIPE_CAP_SEAMLESS_CUBE_MAP_PER_TEXTURE:
return (class_3d >= NVE4_3D_CLASS) ? 1 : 0;
case PIPE_CAP_RESOURCE_FROM_USER_MEMORY:
case PIPE_CAP_DEVICE_RESET_STATUS_QUERY:
case PIPE_CAP_TGSI_FS_POSITION_IS_SYSVAL:
- case PIPE_CAP_INVALIDATE_BUFFER:
case PIPE_CAP_GENERATE_MIPMAP:
case PIPE_CAP_STRING_MARKER:
case PIPE_CAP_BUFFER_SAMPLER_VIEW_RGBA_ONLY: