32dde06ae7d4f52acc563f874ec5797e01b54f23
[mesa.git] / src / gallium / drivers / virgl / virgl_context.c
1 /*
2 * Copyright 2014, 2015 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "pipe/p_shader_tokens.h"
25
26 #include "pipe/p_context.h"
27 #include "pipe/p_defines.h"
28 #include "pipe/p_screen.h"
29 #include "pipe/p_state.h"
30 #include "util/u_inlines.h"
31 #include "util/u_memory.h"
32 #include "util/u_format.h"
33 #include "util/u_transfer.h"
34 #include "util/u_helpers.h"
35 #include "util/u_slab.h"
36 #include "util/u_upload_mgr.h"
37 #include "util/u_blitter.h"
38 #include "tgsi/tgsi_text.h"
39
40 #include "pipebuffer/pb_buffer.h"
41 #include "state_tracker/graw.h"
42 #include "state_tracker/drm_driver.h"
43
44 #include "virgl_encode.h"
45
46 #include "virgl_context.h"
47
48 #include "virgl_resource.h"
49 #include "virgl_screen.h"
50 #include "state_tracker/sw_winsys.h"
51 struct pipe_screen encscreen;
52
53 static uint32_t next_handle;
54 uint32_t virgl_object_assign_handle(void)
55 {
56 return ++next_handle;
57 }
58
59 static void virgl_buffer_flush(struct virgl_context *vctx,
60 struct virgl_buffer *vbuf)
61 {
62 struct virgl_screen *rs = virgl_screen(vctx->base.screen);
63 struct pipe_box box;
64
65 assert(vbuf->on_list);
66
67 box.height = 1;
68 box.depth = 1;
69 box.y = 0;
70 box.z = 0;
71
72 box.x = vbuf->valid_buffer_range.start;
73 box.width = MIN2(vbuf->valid_buffer_range.end - vbuf->valid_buffer_range.start, vbuf->base.u.b.width0);
74
75 vctx->num_transfers++;
76 rs->vws->transfer_put(rs->vws, vbuf->base.hw_res,
77 &box, 0, 0, box.x, 0);
78
79 util_range_set_empty(&vbuf->valid_buffer_range);
80 }
81
82 static void virgl_attach_res_framebuffer(struct virgl_context *vctx)
83 {
84 struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
85 struct pipe_surface *surf;
86 struct virgl_resource *res;
87 unsigned i;
88
89 surf = vctx->framebuffer.zsbuf;
90 if (surf) {
91 res = virgl_resource(surf->texture);
92 if (res)
93 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
94 }
95 for (i = 0; i < vctx->framebuffer.nr_cbufs; i++) {
96 surf = vctx->framebuffer.cbufs[i];
97 if (surf) {
98 res = virgl_resource(surf->texture);
99 if (res)
100 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
101 }
102 }
103 }
104
105 static void virgl_attach_res_sampler_views(struct virgl_context *vctx,
106 unsigned shader_type)
107 {
108 struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
109 struct virgl_textures_info *tinfo = &vctx->samplers[shader_type];
110 struct virgl_resource *res;
111 uint32_t remaining_mask = tinfo->enabled_mask;
112 unsigned i;
113 while (remaining_mask) {
114 i = u_bit_scan(&remaining_mask);
115 assert(tinfo->views[i]);
116
117 res = virgl_resource(tinfo->views[i]->base.texture);
118 if (res)
119 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
120 }
121 }
122
123 static void virgl_attach_res_vertex_buffers(struct virgl_context *vctx)
124 {
125 struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
126 struct virgl_resource *res;
127 unsigned i;
128
129 for (i = 0; i < vctx->num_vertex_buffers; i++) {
130 res = virgl_resource(vctx->vertex_buffer[i].buffer);
131 if (res)
132 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
133 }
134 }
135
136 static void virgl_attach_res_index_buffer(struct virgl_context *vctx)
137 {
138 struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
139 struct virgl_resource *res;
140
141 res = virgl_resource(vctx->index_buffer.buffer);
142 if (res)
143 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
144 }
145
146 static void virgl_attach_res_so_targets(struct virgl_context *vctx)
147 {
148 struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
149 struct virgl_resource *res;
150 unsigned i;
151
152 for (i = 0; i < vctx->num_so_targets; i++) {
153 res = virgl_resource(vctx->so_targets[i].base.buffer);
154 if (res)
155 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
156 }
157 }
158
159 static void virgl_attach_res_uniform_buffers(struct virgl_context *vctx,
160 unsigned shader_type)
161 {
162 struct virgl_winsys *vws = virgl_screen(vctx->base.screen)->vws;
163 struct virgl_resource *res;
164 unsigned i;
165 for (i = 0; i < PIPE_MAX_CONSTANT_BUFFERS; i++) {
166 res = virgl_resource(vctx->ubos[shader_type][i]);
167 if (res) {
168 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE);
169 }
170 }
171 }
172
173 /*
174 * after flushing, the hw context still has a bunch of
175 * resources bound, so we need to rebind those here.
176 */
177 static void virgl_reemit_res(struct virgl_context *vctx)
178 {
179 unsigned shader_type;
180
181 /* reattach any flushed resources */
182 /* framebuffer, sampler views, vertex/index/uniform/stream buffers */
183 virgl_attach_res_framebuffer(vctx);
184
185 for (shader_type = 0; shader_type < PIPE_SHADER_TYPES; shader_type++) {
186 virgl_attach_res_sampler_views(vctx, shader_type);
187 virgl_attach_res_uniform_buffers(vctx, shader_type);
188 }
189 virgl_attach_res_index_buffer(vctx);
190 virgl_attach_res_vertex_buffers(vctx);
191 virgl_attach_res_so_targets(vctx);
192 }
193
194 static struct pipe_surface *virgl_create_surface(struct pipe_context *ctx,
195 struct pipe_resource *resource,
196 const struct pipe_surface *templ)
197 {
198 struct virgl_context *vctx = virgl_context(ctx);
199 struct virgl_surface *surf;
200 struct virgl_resource *res = virgl_resource(resource);
201 uint32_t handle;
202
203 surf = CALLOC_STRUCT(virgl_surface);
204 if (surf == NULL)
205 return NULL;
206
207 res->clean = FALSE;
208 handle = virgl_object_assign_handle();
209 pipe_reference_init(&surf->base.reference, 1);
210 pipe_resource_reference(&surf->base.texture, resource);
211 surf->base.context = ctx;
212 surf->base.format = templ->format;
213 if (resource->target != PIPE_BUFFER) {
214 surf->base.width = u_minify(resource->width0, templ->u.tex.level);
215 surf->base.height = u_minify(resource->height0, templ->u.tex.level);
216 surf->base.u.tex.level = templ->u.tex.level;
217 surf->base.u.tex.first_layer = templ->u.tex.first_layer;
218 surf->base.u.tex.last_layer = templ->u.tex.last_layer;
219 } else {
220 surf->base.width = templ->u.buf.last_element - templ->u.buf.first_element + 1;
221 surf->base.height = resource->height0;
222 surf->base.u.buf.first_element = templ->u.buf.first_element;
223 surf->base.u.buf.last_element = templ->u.buf.last_element;
224 }
225 virgl_encoder_create_surface(vctx, handle, res, &surf->base);
226 surf->handle = handle;
227 return &surf->base;
228 }
229
230 static void virgl_surface_destroy(struct pipe_context *ctx,
231 struct pipe_surface *psurf)
232 {
233 struct virgl_context *vctx = virgl_context(ctx);
234 struct virgl_surface *surf = virgl_surface(psurf);
235
236 pipe_resource_reference(&surf->base.texture, NULL);
237 virgl_encode_delete_object(vctx, surf->handle, VIRGL_OBJECT_SURFACE);
238 FREE(surf);
239 }
240
241 static void *virgl_create_blend_state(struct pipe_context *ctx,
242 const struct pipe_blend_state *blend_state)
243 {
244 struct virgl_context *vctx = virgl_context(ctx);
245 uint32_t handle;
246 handle = virgl_object_assign_handle();
247
248 virgl_encode_blend_state(vctx, handle, blend_state);
249 return (void *)(unsigned long)handle;
250
251 }
252
253 static void virgl_bind_blend_state(struct pipe_context *ctx,
254 void *blend_state)
255 {
256 struct virgl_context *vctx = virgl_context(ctx);
257 uint32_t handle = (unsigned long)blend_state;
258 virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_BLEND);
259 }
260
261 static void virgl_delete_blend_state(struct pipe_context *ctx,
262 void *blend_state)
263 {
264 struct virgl_context *vctx = virgl_context(ctx);
265 uint32_t handle = (unsigned long)blend_state;
266 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_BLEND);
267 }
268
269 static void *virgl_create_depth_stencil_alpha_state(struct pipe_context *ctx,
270 const struct pipe_depth_stencil_alpha_state *blend_state)
271 {
272 struct virgl_context *vctx = virgl_context(ctx);
273 uint32_t handle;
274 handle = virgl_object_assign_handle();
275
276 virgl_encode_dsa_state(vctx, handle, blend_state);
277 return (void *)(unsigned long)handle;
278 }
279
280 static void virgl_bind_depth_stencil_alpha_state(struct pipe_context *ctx,
281 void *blend_state)
282 {
283 struct virgl_context *vctx = virgl_context(ctx);
284 uint32_t handle = (unsigned long)blend_state;
285 virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_DSA);
286 }
287
288 static void virgl_delete_depth_stencil_alpha_state(struct pipe_context *ctx,
289 void *dsa_state)
290 {
291 struct virgl_context *vctx = virgl_context(ctx);
292 uint32_t handle = (unsigned long)dsa_state;
293 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_DSA);
294 }
295
296 static void *virgl_create_rasterizer_state(struct pipe_context *ctx,
297 const struct pipe_rasterizer_state *rs_state)
298 {
299 struct virgl_context *vctx = virgl_context(ctx);
300 uint32_t handle;
301 handle = virgl_object_assign_handle();
302
303 virgl_encode_rasterizer_state(vctx, handle, rs_state);
304 return (void *)(unsigned long)handle;
305 }
306
307 static void virgl_bind_rasterizer_state(struct pipe_context *ctx,
308 void *rs_state)
309 {
310 struct virgl_context *vctx = virgl_context(ctx);
311 uint32_t handle = (unsigned long)rs_state;
312
313 virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_RASTERIZER);
314 }
315
316 static void virgl_delete_rasterizer_state(struct pipe_context *ctx,
317 void *rs_state)
318 {
319 struct virgl_context *vctx = virgl_context(ctx);
320 uint32_t handle = (unsigned long)rs_state;
321 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_RASTERIZER);
322 }
323
324 static void virgl_set_framebuffer_state(struct pipe_context *ctx,
325 const struct pipe_framebuffer_state *state)
326 {
327 struct virgl_context *vctx = virgl_context(ctx);
328
329 vctx->framebuffer = *state;
330 virgl_encoder_set_framebuffer_state(vctx, state);
331 virgl_attach_res_framebuffer(vctx);
332 }
333
334 static void virgl_set_viewport_states(struct pipe_context *ctx,
335 unsigned start_slot,
336 unsigned num_viewports,
337 const struct pipe_viewport_state *state)
338 {
339 struct virgl_context *vctx = virgl_context(ctx);
340 virgl_encoder_set_viewport_states(vctx, start_slot, num_viewports, state);
341 }
342
343 static void *virgl_create_vertex_elements_state(struct pipe_context *ctx,
344 unsigned num_elements,
345 const struct pipe_vertex_element *elements)
346 {
347 struct virgl_context *vctx = virgl_context(ctx);
348 uint32_t handle = virgl_object_assign_handle();
349 virgl_encoder_create_vertex_elements(vctx, handle,
350 num_elements, elements);
351 return (void*)(unsigned long)handle;
352
353 }
354
355 static void virgl_delete_vertex_elements_state(struct pipe_context *ctx,
356 void *ve)
357 {
358 struct virgl_context *vctx = virgl_context(ctx);
359 uint32_t handle = (unsigned long)ve;
360
361 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_VERTEX_ELEMENTS);
362 }
363
364 static void virgl_bind_vertex_elements_state(struct pipe_context *ctx,
365 void *ve)
366 {
367 struct virgl_context *vctx = virgl_context(ctx);
368 uint32_t handle = (unsigned long)ve;
369 virgl_encode_bind_object(vctx, handle, VIRGL_OBJECT_VERTEX_ELEMENTS);
370 }
371
372 static void virgl_set_vertex_buffers(struct pipe_context *ctx,
373 unsigned start_slot,
374 unsigned num_buffers,
375 const struct pipe_vertex_buffer *buffers)
376 {
377 struct virgl_context *vctx = virgl_context(ctx);
378
379 util_set_vertex_buffers_count(vctx->vertex_buffer,
380 &vctx->num_vertex_buffers,
381 buffers, start_slot, num_buffers);
382
383 vctx->vertex_array_dirty = TRUE;
384 }
385
386 static void virgl_hw_set_vertex_buffers(struct pipe_context *ctx)
387 {
388 struct virgl_context *vctx = virgl_context(ctx);
389
390 if (vctx->vertex_array_dirty) {
391 virgl_encoder_set_vertex_buffers(vctx, vctx->num_vertex_buffers, vctx->vertex_buffer);
392 virgl_attach_res_vertex_buffers(vctx);
393 }
394 }
395
396 static void virgl_set_stencil_ref(struct pipe_context *ctx,
397 const struct pipe_stencil_ref *ref)
398 {
399 struct virgl_context *vctx = virgl_context(ctx);
400 virgl_encoder_set_stencil_ref(vctx, ref);
401 }
402
403 static void virgl_set_blend_color(struct pipe_context *ctx,
404 const struct pipe_blend_color *color)
405 {
406 struct virgl_context *vctx = virgl_context(ctx);
407 virgl_encoder_set_blend_color(vctx, color);
408 }
409
410 static void virgl_set_index_buffer(struct pipe_context *ctx,
411 const struct pipe_index_buffer *ib)
412 {
413 struct virgl_context *vctx = virgl_context(ctx);
414
415 if (ib) {
416 pipe_resource_reference(&vctx->index_buffer.buffer, ib->buffer);
417 memcpy(&vctx->index_buffer, ib, sizeof(*ib));
418 } else {
419 pipe_resource_reference(&vctx->index_buffer.buffer, NULL);
420 }
421 }
422
423 static void virgl_hw_set_index_buffer(struct pipe_context *ctx,
424 struct pipe_index_buffer *ib)
425 {
426 struct virgl_context *vctx = virgl_context(ctx);
427 virgl_encoder_set_index_buffer(vctx, ib);
428 virgl_attach_res_index_buffer(vctx);
429 }
430
431 static void virgl_set_constant_buffer(struct pipe_context *ctx,
432 uint shader, uint index,
433 struct pipe_constant_buffer *buf)
434 {
435 struct virgl_context *vctx = virgl_context(ctx);
436
437 if (buf) {
438 if (!buf->user_buffer){
439 struct virgl_resource *res = virgl_resource(buf->buffer);
440 virgl_encoder_set_uniform_buffer(vctx, shader, index, buf->buffer_offset,
441 buf->buffer_size, res);
442 pipe_resource_reference(&vctx->ubos[shader][index], buf->buffer);
443 return;
444 }
445 pipe_resource_reference(&vctx->ubos[shader][index], NULL);
446 virgl_encoder_write_constant_buffer(vctx, shader, index, buf->buffer_size / 4, buf->user_buffer);
447 } else {
448 virgl_encoder_write_constant_buffer(vctx, shader, index, 0, NULL);
449 pipe_resource_reference(&vctx->ubos[shader][index], NULL);
450 }
451 }
452
453 void virgl_transfer_inline_write(struct pipe_context *ctx,
454 struct pipe_resource *res,
455 unsigned level,
456 unsigned usage,
457 const struct pipe_box *box,
458 const void *data,
459 unsigned stride,
460 unsigned layer_stride)
461 {
462 struct virgl_context *vctx = virgl_context(ctx);
463 struct virgl_screen *vs = virgl_screen(ctx->screen);
464 struct virgl_resource *grres = virgl_resource(res);
465 struct virgl_buffer *vbuf = virgl_buffer(res);
466
467 grres->clean = FALSE;
468
469 if (virgl_res_needs_flush_wait(vctx, &vbuf->base, usage)) {
470 ctx->flush(ctx, NULL, 0);
471
472 vs->vws->resource_wait(vs->vws, vbuf->base.hw_res);
473 }
474
475 virgl_encoder_inline_write(vctx, grres, level, usage,
476 box, data, stride, layer_stride);
477 }
478
479 static void *virgl_shader_encoder(struct pipe_context *ctx,
480 const struct pipe_shader_state *shader,
481 unsigned type)
482 {
483 struct virgl_context *vctx = virgl_context(ctx);
484 uint32_t handle;
485 struct tgsi_token *new_tokens;
486 int ret;
487
488 new_tokens = virgl_tgsi_transform(shader->tokens);
489 if (!new_tokens)
490 return NULL;
491
492 handle = virgl_object_assign_handle();
493 /* encode VS state */
494 ret = virgl_encode_shader_state(vctx, handle, type,
495 &shader->stream_output,
496 new_tokens);
497 if (ret) {
498 return NULL;
499 }
500
501 FREE(new_tokens);
502 return (void *)(unsigned long)handle;
503
504 }
505 static void *virgl_create_vs_state(struct pipe_context *ctx,
506 const struct pipe_shader_state *shader)
507 {
508 return virgl_shader_encoder(ctx, shader, PIPE_SHADER_VERTEX);
509 }
510
511 static void *virgl_create_gs_state(struct pipe_context *ctx,
512 const struct pipe_shader_state *shader)
513 {
514 return virgl_shader_encoder(ctx, shader, PIPE_SHADER_GEOMETRY);
515 }
516
517 static void *virgl_create_fs_state(struct pipe_context *ctx,
518 const struct pipe_shader_state *shader)
519 {
520 return virgl_shader_encoder(ctx, shader, PIPE_SHADER_FRAGMENT);
521 }
522
523 static void
524 virgl_delete_fs_state(struct pipe_context *ctx,
525 void *fs)
526 {
527 uint32_t handle = (unsigned long)fs;
528 struct virgl_context *vctx = virgl_context(ctx);
529
530 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SHADER);
531 }
532
533 static void
534 virgl_delete_gs_state(struct pipe_context *ctx,
535 void *gs)
536 {
537 uint32_t handle = (unsigned long)gs;
538 struct virgl_context *vctx = virgl_context(ctx);
539
540 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SHADER);
541 }
542
543 static void
544 virgl_delete_vs_state(struct pipe_context *ctx,
545 void *vs)
546 {
547 uint32_t handle = (unsigned long)vs;
548 struct virgl_context *vctx = virgl_context(ctx);
549
550 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SHADER);
551 }
552
553 static void virgl_bind_vs_state(struct pipe_context *ctx,
554 void *vss)
555 {
556 uint32_t handle = (unsigned long)vss;
557 struct virgl_context *vctx = virgl_context(ctx);
558
559 virgl_encode_bind_shader(vctx, handle, PIPE_SHADER_VERTEX);
560 }
561
562 static void virgl_bind_gs_state(struct pipe_context *ctx,
563 void *vss)
564 {
565 uint32_t handle = (unsigned long)vss;
566 struct virgl_context *vctx = virgl_context(ctx);
567
568 virgl_encode_bind_shader(vctx, handle, PIPE_SHADER_GEOMETRY);
569 }
570
571
572 static void virgl_bind_fs_state(struct pipe_context *ctx,
573 void *vss)
574 {
575 uint32_t handle = (unsigned long)vss;
576 struct virgl_context *vctx = virgl_context(ctx);
577
578 virgl_encode_bind_shader(vctx, handle, PIPE_SHADER_FRAGMENT);
579 }
580
581 static void virgl_clear(struct pipe_context *ctx,
582 unsigned buffers,
583 const union pipe_color_union *color,
584 double depth, unsigned stencil)
585 {
586 struct virgl_context *vctx = virgl_context(ctx);
587
588 virgl_encode_clear(vctx, buffers, color, depth, stencil);
589 }
590
591 static void virgl_draw_vbo(struct pipe_context *ctx,
592 const struct pipe_draw_info *dinfo)
593 {
594 struct virgl_context *vctx = virgl_context(ctx);
595 struct virgl_screen *rs = virgl_screen(ctx->screen);
596 struct pipe_index_buffer ib = {};
597 struct pipe_draw_info info = *dinfo;
598
599 if (!(rs->caps.caps.v1.prim_mask & (1 << dinfo->mode))) {
600 util_primconvert_save_index_buffer(vctx->primconvert, &vctx->index_buffer);
601 util_primconvert_draw_vbo(vctx->primconvert, dinfo);
602 return;
603 }
604 if (info.indexed) {
605 pipe_resource_reference(&ib.buffer, vctx->index_buffer.buffer);
606 ib.user_buffer = vctx->index_buffer.user_buffer;
607 ib.index_size = vctx->index_buffer.index_size;
608 ib.offset = vctx->index_buffer.offset + info.start * ib.index_size;
609
610 if (ib.user_buffer) {
611 u_upload_data(vctx->uploader, 0, info.count * ib.index_size,
612 ib.user_buffer, &ib.offset, &ib.buffer);
613 ib.user_buffer = NULL;
614 }
615 }
616
617 u_upload_unmap(vctx->uploader);
618
619 vctx->num_draws++;
620 virgl_hw_set_vertex_buffers(ctx);
621 if (info.indexed)
622 virgl_hw_set_index_buffer(ctx, &ib);
623
624 virgl_encoder_draw_vbo(vctx, &info);
625
626 pipe_resource_reference(&ib.buffer, NULL);
627
628 }
629
630 static void virgl_flush_eq(struct virgl_context *ctx, void *closure)
631 {
632 struct virgl_screen *rs = virgl_screen(ctx->base.screen);
633
634 /* send the buffer to the remote side for decoding */
635 ctx->num_transfers = ctx->num_draws = 0;
636 rs->vws->submit_cmd(rs->vws, ctx->cbuf);
637
638 virgl_encoder_set_sub_ctx(ctx, ctx->hw_sub_ctx_id);
639
640 /* add back current framebuffer resources to reference list? */
641 virgl_reemit_res(ctx);
642 }
643
644 static void virgl_flush_from_st(struct pipe_context *ctx,
645 struct pipe_fence_handle **fence,
646 enum pipe_flush_flags flags)
647 {
648 struct virgl_context *vctx = virgl_context(ctx);
649 struct virgl_screen *rs = virgl_screen(ctx->screen);
650 struct virgl_buffer *buf, *tmp;
651
652 if (fence)
653 *fence = rs->vws->cs_create_fence(rs->vws);
654
655 LIST_FOR_EACH_ENTRY_SAFE(buf, tmp, &vctx->to_flush_bufs, flush_list) {
656 struct pipe_resource *res = &buf->base.u.b;
657 virgl_buffer_flush(vctx, buf);
658 list_del(&buf->flush_list);
659 buf->on_list = FALSE;
660 pipe_resource_reference(&res, NULL);
661
662 }
663 virgl_flush_eq(vctx, vctx);
664 }
665
666 static struct pipe_sampler_view *virgl_create_sampler_view(struct pipe_context *ctx,
667 struct pipe_resource *texture,
668 const struct pipe_sampler_view *state)
669 {
670 struct virgl_context *vctx = virgl_context(ctx);
671 struct virgl_sampler_view *grview = CALLOC_STRUCT(virgl_sampler_view);
672 uint32_t handle;
673 struct virgl_resource *res;
674
675 if (state == NULL)
676 return NULL;
677
678 res = virgl_resource(texture);
679 handle = virgl_object_assign_handle();
680 virgl_encode_sampler_view(vctx, handle, res, state);
681
682 grview->base = *state;
683 grview->base.reference.count = 1;
684
685 grview->base.texture = NULL;
686 grview->base.context = ctx;
687 pipe_resource_reference(&grview->base.texture, texture);
688 grview->handle = handle;
689 return &grview->base;
690 }
691
692 static void virgl_set_sampler_views(struct pipe_context *ctx,
693 unsigned shader_type,
694 unsigned start_slot,
695 unsigned num_views,
696 struct pipe_sampler_view **views)
697 {
698 struct virgl_context *vctx = virgl_context(ctx);
699 int i;
700 uint32_t disable_mask = ~((1ull << num_views) - 1);
701 struct virgl_textures_info *tinfo = &vctx->samplers[shader_type];
702 uint32_t new_mask = 0;
703 uint32_t remaining_mask;
704
705 remaining_mask = tinfo->enabled_mask & disable_mask;
706
707 while (remaining_mask) {
708 i = u_bit_scan(&remaining_mask);
709 assert(tinfo->views[i]);
710
711 pipe_sampler_view_reference((struct pipe_sampler_view **)&tinfo->views[i], NULL);
712 }
713
714 for (i = 0; i < num_views; i++) {
715 struct virgl_sampler_view *grview = virgl_sampler_view(views[i]);
716
717 if (views[i] == (struct pipe_sampler_view *)tinfo->views[i])
718 continue;
719
720 if (grview) {
721 new_mask |= 1 << i;
722 pipe_sampler_view_reference((struct pipe_sampler_view **)&tinfo->views[i], views[i]);
723 } else {
724 pipe_sampler_view_reference((struct pipe_sampler_view **)&tinfo->views[i], NULL);
725 disable_mask |= 1 << i;
726 }
727 }
728
729 tinfo->enabled_mask &= ~disable_mask;
730 tinfo->enabled_mask |= new_mask;
731 virgl_encode_set_sampler_views(vctx, shader_type, start_slot, num_views, tinfo->views);
732 virgl_attach_res_sampler_views(vctx, shader_type);
733 }
734
735 static void virgl_destroy_sampler_view(struct pipe_context *ctx,
736 struct pipe_sampler_view *view)
737 {
738 struct virgl_context *vctx = virgl_context(ctx);
739 struct virgl_sampler_view *grview = virgl_sampler_view(view);
740
741 virgl_encode_delete_object(vctx, grview->handle, VIRGL_OBJECT_SAMPLER_VIEW);
742 pipe_resource_reference(&view->texture, NULL);
743 FREE(view);
744 }
745
746 static void *virgl_create_sampler_state(struct pipe_context *ctx,
747 const struct pipe_sampler_state *state)
748 {
749 struct virgl_context *vctx = virgl_context(ctx);
750 uint32_t handle;
751
752 handle = virgl_object_assign_handle();
753
754 virgl_encode_sampler_state(vctx, handle, state);
755 return (void *)(unsigned long)handle;
756 }
757
758 static void virgl_delete_sampler_state(struct pipe_context *ctx,
759 void *ss)
760 {
761 struct virgl_context *vctx = virgl_context(ctx);
762 uint32_t handle = (unsigned long)ss;
763
764 virgl_encode_delete_object(vctx, handle, VIRGL_OBJECT_SAMPLER_STATE);
765 }
766
767 static void virgl_bind_sampler_states(struct pipe_context *ctx,
768 unsigned shader, unsigned start_slot,
769 unsigned num_samplers,
770 void **samplers)
771 {
772 struct virgl_context *vctx = virgl_context(ctx);
773 uint32_t handles[32];
774 int i;
775 for (i = 0; i < num_samplers; i++) {
776 handles[i] = (unsigned long)(samplers[i]);
777 }
778 virgl_encode_bind_sampler_states(vctx, shader, start_slot, num_samplers, handles);
779 }
780
781 static void virgl_set_polygon_stipple(struct pipe_context *ctx,
782 const struct pipe_poly_stipple *ps)
783 {
784 struct virgl_context *vctx = virgl_context(ctx);
785 virgl_encoder_set_polygon_stipple(vctx, ps);
786 }
787
788 static void virgl_set_scissor_states(struct pipe_context *ctx,
789 unsigned start_slot,
790 unsigned num_scissor,
791 const struct pipe_scissor_state *ss)
792 {
793 struct virgl_context *vctx = virgl_context(ctx);
794 virgl_encoder_set_scissor_state(vctx, start_slot, num_scissor, ss);
795 }
796
797 static void virgl_set_sample_mask(struct pipe_context *ctx,
798 unsigned sample_mask)
799 {
800 struct virgl_context *vctx = virgl_context(ctx);
801 virgl_encoder_set_sample_mask(vctx, sample_mask);
802 }
803
804 static void virgl_set_clip_state(struct pipe_context *ctx,
805 const struct pipe_clip_state *clip)
806 {
807 struct virgl_context *vctx = virgl_context(ctx);
808 virgl_encoder_set_clip_state(vctx, clip);
809 }
810
811 static void virgl_resource_copy_region(struct pipe_context *ctx,
812 struct pipe_resource *dst,
813 unsigned dst_level,
814 unsigned dstx, unsigned dsty, unsigned dstz,
815 struct pipe_resource *src,
816 unsigned src_level,
817 const struct pipe_box *src_box)
818 {
819 struct virgl_context *vctx = virgl_context(ctx);
820 struct virgl_resource *dres = virgl_resource(dst);
821 struct virgl_resource *sres = virgl_resource(src);
822
823 dres->clean = FALSE;
824 virgl_encode_resource_copy_region(vctx, dres,
825 dst_level, dstx, dsty, dstz,
826 sres, src_level,
827 src_box);
828 }
829
830 static void
831 virgl_flush_resource(struct pipe_context *pipe,
832 struct pipe_resource *resource)
833 {
834 }
835
836 static void virgl_blit(struct pipe_context *ctx,
837 const struct pipe_blit_info *blit)
838 {
839 struct virgl_context *vctx = virgl_context(ctx);
840 struct virgl_resource *dres = virgl_resource(blit->dst.resource);
841 struct virgl_resource *sres = virgl_resource(blit->src.resource);
842
843 dres->clean = FALSE;
844 virgl_encode_blit(vctx, dres, sres,
845 blit);
846 }
847
848 static void
849 virgl_context_destroy( struct pipe_context *ctx )
850 {
851 struct virgl_context *vctx = virgl_context(ctx);
852 struct virgl_screen *rs = virgl_screen(ctx->screen);
853
854 vctx->framebuffer.zsbuf = NULL;
855 vctx->framebuffer.nr_cbufs = 0;
856 virgl_encoder_destroy_sub_ctx(vctx, vctx->hw_sub_ctx_id);
857 virgl_flush_eq(vctx, vctx);
858
859 rs->vws->cmd_buf_destroy(vctx->cbuf);
860 if (vctx->uploader)
861 u_upload_destroy(vctx->uploader);
862 util_primconvert_destroy(vctx->primconvert);
863
864 util_slab_destroy(&vctx->texture_transfer_pool);
865 FREE(vctx);
866 }
867
868 struct pipe_context *virgl_context_create(struct pipe_screen *pscreen,
869 void *priv,
870 unsigned flags)
871 {
872 struct virgl_context *vctx;
873 struct virgl_screen *rs = virgl_screen(pscreen);
874 vctx = CALLOC_STRUCT(virgl_context);
875
876 vctx->cbuf = rs->vws->cmd_buf_create(rs->vws);
877 if (!vctx->cbuf) {
878 FREE(vctx);
879 return NULL;
880 }
881
882 vctx->base.destroy = virgl_context_destroy;
883 vctx->base.create_surface = virgl_create_surface;
884 vctx->base.surface_destroy = virgl_surface_destroy;
885 vctx->base.set_framebuffer_state = virgl_set_framebuffer_state;
886 vctx->base.create_blend_state = virgl_create_blend_state;
887 vctx->base.bind_blend_state = virgl_bind_blend_state;
888 vctx->base.delete_blend_state = virgl_delete_blend_state;
889 vctx->base.create_depth_stencil_alpha_state = virgl_create_depth_stencil_alpha_state;
890 vctx->base.bind_depth_stencil_alpha_state = virgl_bind_depth_stencil_alpha_state;
891 vctx->base.delete_depth_stencil_alpha_state = virgl_delete_depth_stencil_alpha_state;
892 vctx->base.create_rasterizer_state = virgl_create_rasterizer_state;
893 vctx->base.bind_rasterizer_state = virgl_bind_rasterizer_state;
894 vctx->base.delete_rasterizer_state = virgl_delete_rasterizer_state;
895
896 vctx->base.set_viewport_states = virgl_set_viewport_states;
897 vctx->base.create_vertex_elements_state = virgl_create_vertex_elements_state;
898 vctx->base.bind_vertex_elements_state = virgl_bind_vertex_elements_state;
899 vctx->base.delete_vertex_elements_state = virgl_delete_vertex_elements_state;
900 vctx->base.set_vertex_buffers = virgl_set_vertex_buffers;
901 vctx->base.set_index_buffer = virgl_set_index_buffer;
902 vctx->base.set_constant_buffer = virgl_set_constant_buffer;
903 vctx->base.transfer_inline_write = virgl_transfer_inline_write;
904
905 vctx->base.create_vs_state = virgl_create_vs_state;
906 vctx->base.create_gs_state = virgl_create_gs_state;
907 vctx->base.create_fs_state = virgl_create_fs_state;
908
909 vctx->base.bind_vs_state = virgl_bind_vs_state;
910 vctx->base.bind_gs_state = virgl_bind_gs_state;
911 vctx->base.bind_fs_state = virgl_bind_fs_state;
912
913 vctx->base.delete_vs_state = virgl_delete_vs_state;
914 vctx->base.delete_gs_state = virgl_delete_gs_state;
915 vctx->base.delete_fs_state = virgl_delete_fs_state;
916
917 vctx->base.clear = virgl_clear;
918 vctx->base.draw_vbo = virgl_draw_vbo;
919 vctx->base.flush = virgl_flush_from_st;
920 vctx->base.screen = pscreen;
921 vctx->base.create_sampler_view = virgl_create_sampler_view;
922 vctx->base.sampler_view_destroy = virgl_destroy_sampler_view;
923 vctx->base.set_sampler_views = virgl_set_sampler_views;
924
925 vctx->base.create_sampler_state = virgl_create_sampler_state;
926 vctx->base.delete_sampler_state = virgl_delete_sampler_state;
927 vctx->base.bind_sampler_states = virgl_bind_sampler_states;
928
929 vctx->base.set_polygon_stipple = virgl_set_polygon_stipple;
930 vctx->base.set_scissor_states = virgl_set_scissor_states;
931 vctx->base.set_sample_mask = virgl_set_sample_mask;
932 vctx->base.set_stencil_ref = virgl_set_stencil_ref;
933 vctx->base.set_clip_state = virgl_set_clip_state;
934
935 vctx->base.set_blend_color = virgl_set_blend_color;
936
937 vctx->base.resource_copy_region = virgl_resource_copy_region;
938 vctx->base.flush_resource = virgl_flush_resource;
939 vctx->base.blit = virgl_blit;
940
941 virgl_init_context_resource_functions(&vctx->base);
942 virgl_init_query_functions(vctx);
943 virgl_init_so_functions(vctx);
944
945 list_inithead(&vctx->to_flush_bufs);
946 util_slab_create(&vctx->texture_transfer_pool, sizeof(struct virgl_transfer),
947 16, UTIL_SLAB_SINGLETHREADED);
948
949 vctx->primconvert = util_primconvert_create(&vctx->base, rs->caps.caps.v1.prim_mask);
950 vctx->uploader = u_upload_create(&vctx->base, 1024 * 1024, 256,
951 PIPE_BIND_INDEX_BUFFER);
952 if (!vctx->uploader)
953 goto fail;
954
955 vctx->hw_sub_ctx_id = rs->sub_ctx_id++;
956 virgl_encoder_create_sub_ctx(vctx, vctx->hw_sub_ctx_id);
957
958 virgl_encoder_set_sub_ctx(vctx, vctx->hw_sub_ctx_id);
959 return &vctx->base;
960 fail:
961 return NULL;
962 }