2 * Copyright 2014, 2015 Red Hat.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
24 #include "util/u_inlines.h"
25 #include "util/u_memory.h"
26 #include "virgl_context.h"
27 #include "virgl_resource.h"
28 #include "virgl_screen.h"
30 static void virgl_buffer_destroy(struct pipe_screen
*screen
,
31 struct pipe_resource
*buf
)
33 struct virgl_screen
*vs
= virgl_screen(screen
);
34 struct virgl_buffer
*vbuf
= virgl_buffer(buf
);
36 util_range_destroy(&vbuf
->valid_buffer_range
);
37 vs
->vws
->resource_unref(vs
->vws
, vbuf
->base
.hw_res
);
41 static void *virgl_buffer_transfer_map(struct pipe_context
*ctx
,
42 struct pipe_resource
*resource
,
45 const struct pipe_box
*box
,
46 struct pipe_transfer
**transfer
)
48 struct virgl_context
*vctx
= virgl_context(ctx
);
49 struct virgl_screen
*vs
= virgl_screen(ctx
->screen
);
50 struct virgl_buffer
*vbuf
= virgl_buffer(resource
);
51 struct virgl_transfer
*trans
;
55 bool doflushwait
= false;
57 if ((usage
& PIPE_TRANSFER_READ
) && (vbuf
->on_list
== TRUE
))
60 doflushwait
= virgl_res_needs_flush_wait(vctx
, &vbuf
->base
, usage
);
63 ctx
->flush(ctx
, NULL
, 0);
65 trans
= slab_alloc(&vctx
->texture_transfer_pool
);
69 trans
->base
.resource
= resource
;
70 trans
->base
.level
= level
;
71 trans
->base
.usage
= usage
;
72 trans
->base
.box
= *box
;
73 trans
->base
.stride
= 0;
74 trans
->base
.layer_stride
= 0;
78 readback
= virgl_res_needs_readback(vctx
, &vbuf
->base
, usage
);
80 vs
->vws
->transfer_get(vs
->vws
, vbuf
->base
.hw_res
, box
, trans
->base
.stride
, trans
->base
.layer_stride
, offset
, level
);
82 if (!(usage
& PIPE_TRANSFER_UNSYNCHRONIZED
))
85 if (doflushwait
|| readback
)
86 vs
->vws
->resource_wait(vs
->vws
, vbuf
->base
.hw_res
);
88 ptr
= vs
->vws
->resource_map(vs
->vws
, vbuf
->base
.hw_res
);
93 trans
->offset
= offset
;
94 *transfer
= &trans
->base
;
96 return ptr
+ trans
->offset
;
99 static void virgl_buffer_transfer_unmap(struct pipe_context
*ctx
,
100 struct pipe_transfer
*transfer
)
102 struct virgl_context
*vctx
= virgl_context(ctx
);
103 struct virgl_transfer
*trans
= virgl_transfer(transfer
);
104 struct virgl_buffer
*vbuf
= virgl_buffer(transfer
->resource
);
106 if (trans
->base
.usage
& PIPE_TRANSFER_WRITE
) {
107 if (!(transfer
->usage
& PIPE_TRANSFER_FLUSH_EXPLICIT
)) {
108 struct virgl_screen
*vs
= virgl_screen(ctx
->screen
);
109 vbuf
->base
.clean
= FALSE
;
110 vctx
->num_transfers
++;
111 vs
->vws
->transfer_put(vs
->vws
, vbuf
->base
.hw_res
,
112 &transfer
->box
, trans
->base
.stride
, trans
->base
.layer_stride
, trans
->offset
, transfer
->level
);
117 slab_free(&vctx
->texture_transfer_pool
, trans
);
120 static void virgl_buffer_transfer_flush_region(struct pipe_context
*ctx
,
121 struct pipe_transfer
*transfer
,
122 const struct pipe_box
*box
)
124 struct virgl_context
*vctx
= virgl_context(ctx
);
125 struct virgl_buffer
*vbuf
= virgl_buffer(transfer
->resource
);
127 if (!vbuf
->on_list
) {
128 struct pipe_resource
*res
= NULL
;
130 list_addtail(&vbuf
->flush_list
, &vctx
->to_flush_bufs
);
131 vbuf
->on_list
= TRUE
;
132 pipe_resource_reference(&res
, &vbuf
->base
.u
.b
);
135 util_range_add(&vbuf
->valid_buffer_range
, transfer
->box
.x
+ box
->x
,
136 transfer
->box
.x
+ box
->x
+ box
->width
);
138 vbuf
->base
.clean
= FALSE
;
141 static const struct u_resource_vtbl virgl_buffer_vtbl
=
143 u_default_resource_get_handle
, /* get_handle */
144 virgl_buffer_destroy
, /* resource_destroy */
145 virgl_buffer_transfer_map
, /* transfer_map */
146 virgl_buffer_transfer_flush_region
, /* transfer_flush_region */
147 virgl_buffer_transfer_unmap
, /* transfer_unmap */
150 struct pipe_resource
*virgl_buffer_create(struct virgl_screen
*vs
,
151 const struct pipe_resource
*template)
153 struct virgl_buffer
*buf
;
156 buf
= CALLOC_STRUCT(virgl_buffer
);
157 buf
->base
.clean
= TRUE
;
158 buf
->base
.u
.b
= *template;
159 buf
->base
.u
.b
.screen
= &vs
->base
;
160 buf
->base
.u
.vtbl
= &virgl_buffer_vtbl
;
161 pipe_reference_init(&buf
->base
.u
.b
.reference
, 1);
162 util_range_init(&buf
->valid_buffer_range
);
164 vbind
= pipe_to_virgl_bind(template->bind
);
165 size
= template->width0
;
167 buf
->base
.hw_res
= vs
->vws
->resource_create(vs
->vws
, template->target
, template->format
, vbind
, template->width0
, 1, 1, 1, 0, 0, size
);
169 util_range_set_empty(&buf
->valid_buffer_range
);
170 return &buf
->base
.u
.b
;