2 * Copyright 2014, 2015 Red Hat.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
24 #include "pipe/p_shader_tokens.h"
26 #include "pipe/p_context.h"
27 #include "pipe/p_defines.h"
28 #include "pipe/p_screen.h"
29 #include "pipe/p_state.h"
30 #include "util/u_inlines.h"
31 #include "util/u_memory.h"
32 #include "util/u_format.h"
33 #include "util/u_prim.h"
34 #include "util/u_transfer.h"
35 #include "util/u_helpers.h"
36 #include "util/slab.h"
37 #include "util/u_upload_mgr.h"
38 #include "util/u_blitter.h"
39 #include "tgsi/tgsi_text.h"
40 #include "indices/u_primconvert.h"
42 #include "pipebuffer/pb_buffer.h"
44 #include "virgl_encode.h"
45 #include "virgl_context.h"
46 #include "virgl_protocol.h"
47 #include "virgl_resource.h"
48 #include "virgl_screen.h"
50 static uint32_t next_handle
;
51 uint32_t virgl_object_assign_handle(void)
56 static void virgl_buffer_flush(struct virgl_context
*vctx
,
57 struct virgl_buffer
*vbuf
)
59 struct virgl_screen
*rs
= virgl_screen(vctx
->base
.screen
);
62 assert(vbuf
->on_list
);
69 box
.x
= vbuf
->valid_buffer_range
.start
;
70 box
.width
= MIN2(vbuf
->valid_buffer_range
.end
- vbuf
->valid_buffer_range
.start
, vbuf
->base
.u
.b
.width0
);
72 vctx
->num_transfers
++;
73 rs
->vws
->transfer_put(rs
->vws
, vbuf
->base
.hw_res
,
74 &box
, 0, 0, box
.x
, 0);
76 util_range_set_empty(&vbuf
->valid_buffer_range
);
79 static void virgl_attach_res_framebuffer(struct virgl_context
*vctx
)
81 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
82 struct pipe_surface
*surf
;
83 struct virgl_resource
*res
;
86 surf
= vctx
->framebuffer
.zsbuf
;
88 res
= virgl_resource(surf
->texture
);
90 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
92 for (i
= 0; i
< vctx
->framebuffer
.nr_cbufs
; i
++) {
93 surf
= vctx
->framebuffer
.cbufs
[i
];
95 res
= virgl_resource(surf
->texture
);
97 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
102 static void virgl_attach_res_sampler_views(struct virgl_context
*vctx
,
103 enum pipe_shader_type shader_type
)
105 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
106 struct virgl_textures_info
*tinfo
= &vctx
->samplers
[shader_type
];
107 struct virgl_resource
*res
;
108 uint32_t remaining_mask
= tinfo
->enabled_mask
;
110 while (remaining_mask
) {
111 i
= u_bit_scan(&remaining_mask
);
112 assert(tinfo
->views
[i
]);
114 res
= virgl_resource(tinfo
->views
[i
]->base
.texture
);
116 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
120 static void virgl_attach_res_vertex_buffers(struct virgl_context
*vctx
)
122 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
123 struct virgl_resource
*res
;
126 for (i
= 0; i
< vctx
->num_vertex_buffers
; i
++) {
127 res
= virgl_resource(vctx
->vertex_buffer
[i
].buffer
.resource
);
129 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
133 static void virgl_attach_res_index_buffer(struct virgl_context
*vctx
,
134 struct virgl_indexbuf
*ib
)
136 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
137 struct virgl_resource
*res
;
139 res
= virgl_resource(ib
->buffer
);
141 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
144 static void virgl_attach_res_so_targets(struct virgl_context
*vctx
)
146 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
147 struct virgl_resource
*res
;
150 for (i
= 0; i
< vctx
->num_so_targets
; i
++) {
151 res
= virgl_resource(vctx
->so_targets
[i
].base
.buffer
);
153 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
157 static void virgl_attach_res_uniform_buffers(struct virgl_context
*vctx
,
158 enum pipe_shader_type shader_type
)
160 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
161 struct virgl_resource
*res
;
163 for (i
= 0; i
< PIPE_MAX_CONSTANT_BUFFERS
; i
++) {
164 res
= virgl_resource(vctx
->ubos
[shader_type
][i
]);
166 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
171 static void virgl_attach_res_shader_buffers(struct virgl_context
*vctx
,
172 enum pipe_shader_type shader_type
)
174 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
175 struct virgl_resource
*res
;
177 for (i
= 0; i
< PIPE_MAX_SHADER_BUFFERS
; i
++) {
178 res
= virgl_resource(vctx
->ssbos
[shader_type
][i
]);
180 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
185 static void virgl_attach_res_shader_images(struct virgl_context
*vctx
,
186 enum pipe_shader_type shader_type
)
188 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
189 struct virgl_resource
*res
;
191 for (i
= 0; i
< PIPE_MAX_SHADER_IMAGES
; i
++) {
192 res
= virgl_resource(vctx
->images
[shader_type
][i
]);
194 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
200 * after flushing, the hw context still has a bunch of
201 * resources bound, so we need to rebind those here.
203 static void virgl_reemit_res(struct virgl_context
*vctx
)
205 enum pipe_shader_type shader_type
;
207 /* reattach any flushed resources */
208 /* framebuffer, sampler views, vertex/index/uniform/stream buffers */
209 virgl_attach_res_framebuffer(vctx
);
211 for (shader_type
= 0; shader_type
< PIPE_SHADER_TYPES
; shader_type
++) {
212 virgl_attach_res_sampler_views(vctx
, shader_type
);
213 virgl_attach_res_uniform_buffers(vctx
, shader_type
);
214 virgl_attach_res_shader_buffers(vctx
, shader_type
);
215 virgl_attach_res_shader_images(vctx
, shader_type
);
217 virgl_attach_res_vertex_buffers(vctx
);
218 virgl_attach_res_so_targets(vctx
);
221 static struct pipe_surface
*virgl_create_surface(struct pipe_context
*ctx
,
222 struct pipe_resource
*resource
,
223 const struct pipe_surface
*templ
)
225 struct virgl_context
*vctx
= virgl_context(ctx
);
226 struct virgl_surface
*surf
;
227 struct virgl_resource
*res
= virgl_resource(resource
);
230 surf
= CALLOC_STRUCT(virgl_surface
);
235 handle
= virgl_object_assign_handle();
236 pipe_reference_init(&surf
->base
.reference
, 1);
237 pipe_resource_reference(&surf
->base
.texture
, resource
);
238 surf
->base
.context
= ctx
;
239 surf
->base
.format
= templ
->format
;
240 if (resource
->target
!= PIPE_BUFFER
) {
241 surf
->base
.width
= u_minify(resource
->width0
, templ
->u
.tex
.level
);
242 surf
->base
.height
= u_minify(resource
->height0
, templ
->u
.tex
.level
);
243 surf
->base
.u
.tex
.level
= templ
->u
.tex
.level
;
244 surf
->base
.u
.tex
.first_layer
= templ
->u
.tex
.first_layer
;
245 surf
->base
.u
.tex
.last_layer
= templ
->u
.tex
.last_layer
;
247 surf
->base
.width
= templ
->u
.buf
.last_element
- templ
->u
.buf
.first_element
+ 1;
248 surf
->base
.height
= resource
->height0
;
249 surf
->base
.u
.buf
.first_element
= templ
->u
.buf
.first_element
;
250 surf
->base
.u
.buf
.last_element
= templ
->u
.buf
.last_element
;
252 virgl_encoder_create_surface(vctx
, handle
, res
, &surf
->base
);
253 surf
->handle
= handle
;
257 static void virgl_surface_destroy(struct pipe_context
*ctx
,
258 struct pipe_surface
*psurf
)
260 struct virgl_context
*vctx
= virgl_context(ctx
);
261 struct virgl_surface
*surf
= virgl_surface(psurf
);
263 pipe_resource_reference(&surf
->base
.texture
, NULL
);
264 virgl_encode_delete_object(vctx
, surf
->handle
, VIRGL_OBJECT_SURFACE
);
268 static void *virgl_create_blend_state(struct pipe_context
*ctx
,
269 const struct pipe_blend_state
*blend_state
)
271 struct virgl_context
*vctx
= virgl_context(ctx
);
273 handle
= virgl_object_assign_handle();
275 virgl_encode_blend_state(vctx
, handle
, blend_state
);
276 return (void *)(unsigned long)handle
;
280 static void virgl_bind_blend_state(struct pipe_context
*ctx
,
283 struct virgl_context
*vctx
= virgl_context(ctx
);
284 uint32_t handle
= (unsigned long)blend_state
;
285 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_BLEND
);
288 static void virgl_delete_blend_state(struct pipe_context
*ctx
,
291 struct virgl_context
*vctx
= virgl_context(ctx
);
292 uint32_t handle
= (unsigned long)blend_state
;
293 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_BLEND
);
296 static void *virgl_create_depth_stencil_alpha_state(struct pipe_context
*ctx
,
297 const struct pipe_depth_stencil_alpha_state
*blend_state
)
299 struct virgl_context
*vctx
= virgl_context(ctx
);
301 handle
= virgl_object_assign_handle();
303 virgl_encode_dsa_state(vctx
, handle
, blend_state
);
304 return (void *)(unsigned long)handle
;
307 static void virgl_bind_depth_stencil_alpha_state(struct pipe_context
*ctx
,
310 struct virgl_context
*vctx
= virgl_context(ctx
);
311 uint32_t handle
= (unsigned long)blend_state
;
312 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_DSA
);
315 static void virgl_delete_depth_stencil_alpha_state(struct pipe_context
*ctx
,
318 struct virgl_context
*vctx
= virgl_context(ctx
);
319 uint32_t handle
= (unsigned long)dsa_state
;
320 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_DSA
);
323 static void *virgl_create_rasterizer_state(struct pipe_context
*ctx
,
324 const struct pipe_rasterizer_state
*rs_state
)
326 struct virgl_context
*vctx
= virgl_context(ctx
);
328 handle
= virgl_object_assign_handle();
330 virgl_encode_rasterizer_state(vctx
, handle
, rs_state
);
331 return (void *)(unsigned long)handle
;
334 static void virgl_bind_rasterizer_state(struct pipe_context
*ctx
,
337 struct virgl_context
*vctx
= virgl_context(ctx
);
338 uint32_t handle
= (unsigned long)rs_state
;
340 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_RASTERIZER
);
343 static void virgl_delete_rasterizer_state(struct pipe_context
*ctx
,
346 struct virgl_context
*vctx
= virgl_context(ctx
);
347 uint32_t handle
= (unsigned long)rs_state
;
348 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_RASTERIZER
);
351 static void virgl_set_framebuffer_state(struct pipe_context
*ctx
,
352 const struct pipe_framebuffer_state
*state
)
354 struct virgl_context
*vctx
= virgl_context(ctx
);
356 vctx
->framebuffer
= *state
;
357 virgl_encoder_set_framebuffer_state(vctx
, state
);
358 virgl_attach_res_framebuffer(vctx
);
361 static void virgl_set_viewport_states(struct pipe_context
*ctx
,
363 unsigned num_viewports
,
364 const struct pipe_viewport_state
*state
)
366 struct virgl_context
*vctx
= virgl_context(ctx
);
367 virgl_encoder_set_viewport_states(vctx
, start_slot
, num_viewports
, state
);
370 static void *virgl_create_vertex_elements_state(struct pipe_context
*ctx
,
371 unsigned num_elements
,
372 const struct pipe_vertex_element
*elements
)
374 struct virgl_context
*vctx
= virgl_context(ctx
);
375 uint32_t handle
= virgl_object_assign_handle();
376 virgl_encoder_create_vertex_elements(vctx
, handle
,
377 num_elements
, elements
);
378 return (void*)(unsigned long)handle
;
382 static void virgl_delete_vertex_elements_state(struct pipe_context
*ctx
,
385 struct virgl_context
*vctx
= virgl_context(ctx
);
386 uint32_t handle
= (unsigned long)ve
;
388 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_VERTEX_ELEMENTS
);
391 static void virgl_bind_vertex_elements_state(struct pipe_context
*ctx
,
394 struct virgl_context
*vctx
= virgl_context(ctx
);
395 uint32_t handle
= (unsigned long)ve
;
396 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_VERTEX_ELEMENTS
);
399 static void virgl_set_vertex_buffers(struct pipe_context
*ctx
,
401 unsigned num_buffers
,
402 const struct pipe_vertex_buffer
*buffers
)
404 struct virgl_context
*vctx
= virgl_context(ctx
);
406 util_set_vertex_buffers_count(vctx
->vertex_buffer
,
407 &vctx
->num_vertex_buffers
,
408 buffers
, start_slot
, num_buffers
);
410 vctx
->vertex_array_dirty
= TRUE
;
413 static void virgl_hw_set_vertex_buffers(struct pipe_context
*ctx
)
415 struct virgl_context
*vctx
= virgl_context(ctx
);
417 if (vctx
->vertex_array_dirty
) {
418 virgl_encoder_set_vertex_buffers(vctx
, vctx
->num_vertex_buffers
, vctx
->vertex_buffer
);
419 virgl_attach_res_vertex_buffers(vctx
);
423 static void virgl_set_stencil_ref(struct pipe_context
*ctx
,
424 const struct pipe_stencil_ref
*ref
)
426 struct virgl_context
*vctx
= virgl_context(ctx
);
427 virgl_encoder_set_stencil_ref(vctx
, ref
);
430 static void virgl_set_blend_color(struct pipe_context
*ctx
,
431 const struct pipe_blend_color
*color
)
433 struct virgl_context
*vctx
= virgl_context(ctx
);
434 virgl_encoder_set_blend_color(vctx
, color
);
437 static void virgl_hw_set_index_buffer(struct pipe_context
*ctx
,
438 struct virgl_indexbuf
*ib
)
440 struct virgl_context
*vctx
= virgl_context(ctx
);
441 virgl_encoder_set_index_buffer(vctx
, ib
);
442 virgl_attach_res_index_buffer(vctx
, ib
);
445 static void virgl_set_constant_buffer(struct pipe_context
*ctx
,
446 enum pipe_shader_type shader
, uint index
,
447 const struct pipe_constant_buffer
*buf
)
449 struct virgl_context
*vctx
= virgl_context(ctx
);
452 if (!buf
->user_buffer
){
453 struct virgl_resource
*res
= virgl_resource(buf
->buffer
);
454 virgl_encoder_set_uniform_buffer(vctx
, shader
, index
, buf
->buffer_offset
,
455 buf
->buffer_size
, res
);
456 pipe_resource_reference(&vctx
->ubos
[shader
][index
], buf
->buffer
);
459 pipe_resource_reference(&vctx
->ubos
[shader
][index
], NULL
);
460 virgl_encoder_write_constant_buffer(vctx
, shader
, index
, buf
->buffer_size
/ 4, buf
->user_buffer
);
462 virgl_encoder_write_constant_buffer(vctx
, shader
, index
, 0, NULL
);
463 pipe_resource_reference(&vctx
->ubos
[shader
][index
], NULL
);
467 void virgl_transfer_inline_write(struct pipe_context
*ctx
,
468 struct pipe_resource
*res
,
471 const struct pipe_box
*box
,
474 unsigned layer_stride
)
476 struct virgl_context
*vctx
= virgl_context(ctx
);
477 struct virgl_screen
*vs
= virgl_screen(ctx
->screen
);
478 struct virgl_resource
*grres
= virgl_resource(res
);
479 struct virgl_buffer
*vbuf
= virgl_buffer(res
);
481 grres
->clean
= FALSE
;
483 if (virgl_res_needs_flush_wait(vctx
, &vbuf
->base
, usage
)) {
484 ctx
->flush(ctx
, NULL
, 0);
486 vs
->vws
->resource_wait(vs
->vws
, vbuf
->base
.hw_res
);
489 virgl_encoder_inline_write(vctx
, grres
, level
, usage
,
490 box
, data
, stride
, layer_stride
);
493 static void *virgl_shader_encoder(struct pipe_context
*ctx
,
494 const struct pipe_shader_state
*shader
,
497 struct virgl_context
*vctx
= virgl_context(ctx
);
499 struct tgsi_token
*new_tokens
;
502 new_tokens
= virgl_tgsi_transform(vctx
, shader
->tokens
);
506 handle
= virgl_object_assign_handle();
507 /* encode VS state */
508 ret
= virgl_encode_shader_state(vctx
, handle
, type
,
509 &shader
->stream_output
,
516 return (void *)(unsigned long)handle
;
519 static void *virgl_create_vs_state(struct pipe_context
*ctx
,
520 const struct pipe_shader_state
*shader
)
522 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_VERTEX
);
525 static void *virgl_create_tcs_state(struct pipe_context
*ctx
,
526 const struct pipe_shader_state
*shader
)
528 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_TESS_CTRL
);
531 static void *virgl_create_tes_state(struct pipe_context
*ctx
,
532 const struct pipe_shader_state
*shader
)
534 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_TESS_EVAL
);
537 static void *virgl_create_gs_state(struct pipe_context
*ctx
,
538 const struct pipe_shader_state
*shader
)
540 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_GEOMETRY
);
543 static void *virgl_create_fs_state(struct pipe_context
*ctx
,
544 const struct pipe_shader_state
*shader
)
546 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_FRAGMENT
);
550 virgl_delete_fs_state(struct pipe_context
*ctx
,
553 uint32_t handle
= (unsigned long)fs
;
554 struct virgl_context
*vctx
= virgl_context(ctx
);
556 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
560 virgl_delete_gs_state(struct pipe_context
*ctx
,
563 uint32_t handle
= (unsigned long)gs
;
564 struct virgl_context
*vctx
= virgl_context(ctx
);
566 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
570 virgl_delete_vs_state(struct pipe_context
*ctx
,
573 uint32_t handle
= (unsigned long)vs
;
574 struct virgl_context
*vctx
= virgl_context(ctx
);
576 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
580 virgl_delete_tcs_state(struct pipe_context
*ctx
,
583 uint32_t handle
= (unsigned long)tcs
;
584 struct virgl_context
*vctx
= virgl_context(ctx
);
586 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
590 virgl_delete_tes_state(struct pipe_context
*ctx
,
593 uint32_t handle
= (unsigned long)tes
;
594 struct virgl_context
*vctx
= virgl_context(ctx
);
596 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
599 static void virgl_bind_vs_state(struct pipe_context
*ctx
,
602 uint32_t handle
= (unsigned long)vss
;
603 struct virgl_context
*vctx
= virgl_context(ctx
);
605 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_VERTEX
);
608 static void virgl_bind_tcs_state(struct pipe_context
*ctx
,
611 uint32_t handle
= (unsigned long)vss
;
612 struct virgl_context
*vctx
= virgl_context(ctx
);
614 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_TESS_CTRL
);
617 static void virgl_bind_tes_state(struct pipe_context
*ctx
,
620 uint32_t handle
= (unsigned long)vss
;
621 struct virgl_context
*vctx
= virgl_context(ctx
);
623 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_TESS_EVAL
);
626 static void virgl_bind_gs_state(struct pipe_context
*ctx
,
629 uint32_t handle
= (unsigned long)vss
;
630 struct virgl_context
*vctx
= virgl_context(ctx
);
632 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_GEOMETRY
);
636 static void virgl_bind_fs_state(struct pipe_context
*ctx
,
639 uint32_t handle
= (unsigned long)vss
;
640 struct virgl_context
*vctx
= virgl_context(ctx
);
642 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_FRAGMENT
);
645 static void virgl_clear(struct pipe_context
*ctx
,
647 const union pipe_color_union
*color
,
648 double depth
, unsigned stencil
)
650 struct virgl_context
*vctx
= virgl_context(ctx
);
652 virgl_encode_clear(vctx
, buffers
, color
, depth
, stencil
);
655 static void virgl_draw_vbo(struct pipe_context
*ctx
,
656 const struct pipe_draw_info
*dinfo
)
658 struct virgl_context
*vctx
= virgl_context(ctx
);
659 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
660 struct virgl_indexbuf ib
= {};
661 struct pipe_draw_info info
= *dinfo
;
663 if (!dinfo
->count_from_stream_output
&& !dinfo
->indirect
&&
664 !dinfo
->primitive_restart
&&
665 !u_trim_pipe_prim(dinfo
->mode
, (unsigned*)&dinfo
->count
))
668 if (!(rs
->caps
.caps
.v1
.prim_mask
& (1 << dinfo
->mode
))) {
669 util_primconvert_draw_vbo(vctx
->primconvert
, dinfo
);
672 if (info
.index_size
) {
673 pipe_resource_reference(&ib
.buffer
, info
.has_user_indices
? NULL
: info
.index
.resource
);
674 ib
.user_buffer
= info
.has_user_indices
? info
.index
.user
: NULL
;
675 ib
.index_size
= dinfo
->index_size
;
676 ib
.offset
= info
.start
* ib
.index_size
;
678 if (ib
.user_buffer
) {
679 u_upload_data(vctx
->uploader
, 0, info
.count
* ib
.index_size
, 256,
680 ib
.user_buffer
, &ib
.offset
, &ib
.buffer
);
681 ib
.user_buffer
= NULL
;
685 u_upload_unmap(vctx
->uploader
);
688 virgl_hw_set_vertex_buffers(ctx
);
690 virgl_hw_set_index_buffer(ctx
, &ib
);
692 virgl_encoder_draw_vbo(vctx
, &info
);
694 pipe_resource_reference(&ib
.buffer
, NULL
);
698 static void virgl_flush_eq(struct virgl_context
*ctx
, void *closure
)
700 struct virgl_screen
*rs
= virgl_screen(ctx
->base
.screen
);
702 /* send the buffer to the remote side for decoding */
703 ctx
->num_transfers
= ctx
->num_draws
= 0;
704 rs
->vws
->submit_cmd(rs
->vws
, ctx
->cbuf
);
706 virgl_encoder_set_sub_ctx(ctx
, ctx
->hw_sub_ctx_id
);
708 /* add back current framebuffer resources to reference list? */
709 virgl_reemit_res(ctx
);
712 static void virgl_flush_from_st(struct pipe_context
*ctx
,
713 struct pipe_fence_handle
**fence
,
714 enum pipe_flush_flags flags
)
716 struct virgl_context
*vctx
= virgl_context(ctx
);
717 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
718 struct virgl_buffer
*buf
, *tmp
;
721 *fence
= rs
->vws
->cs_create_fence(rs
->vws
);
723 LIST_FOR_EACH_ENTRY_SAFE(buf
, tmp
, &vctx
->to_flush_bufs
, flush_list
) {
724 struct pipe_resource
*res
= &buf
->base
.u
.b
;
725 virgl_buffer_flush(vctx
, buf
);
726 list_del(&buf
->flush_list
);
727 buf
->on_list
= FALSE
;
728 pipe_resource_reference(&res
, NULL
);
731 virgl_flush_eq(vctx
, vctx
);
734 static struct pipe_sampler_view
*virgl_create_sampler_view(struct pipe_context
*ctx
,
735 struct pipe_resource
*texture
,
736 const struct pipe_sampler_view
*state
)
738 struct virgl_context
*vctx
= virgl_context(ctx
);
739 struct virgl_sampler_view
*grview
;
741 struct virgl_resource
*res
;
746 grview
= CALLOC_STRUCT(virgl_sampler_view
);
750 res
= virgl_resource(texture
);
751 handle
= virgl_object_assign_handle();
752 virgl_encode_sampler_view(vctx
, handle
, res
, state
);
754 grview
->base
= *state
;
755 grview
->base
.reference
.count
= 1;
757 grview
->base
.texture
= NULL
;
758 grview
->base
.context
= ctx
;
759 pipe_resource_reference(&grview
->base
.texture
, texture
);
760 grview
->handle
= handle
;
761 return &grview
->base
;
764 static void virgl_set_sampler_views(struct pipe_context
*ctx
,
765 enum pipe_shader_type shader_type
,
768 struct pipe_sampler_view
**views
)
770 struct virgl_context
*vctx
= virgl_context(ctx
);
772 uint32_t disable_mask
= ~((1ull << num_views
) - 1);
773 struct virgl_textures_info
*tinfo
= &vctx
->samplers
[shader_type
];
774 uint32_t new_mask
= 0;
775 uint32_t remaining_mask
;
777 remaining_mask
= tinfo
->enabled_mask
& disable_mask
;
779 while (remaining_mask
) {
780 i
= u_bit_scan(&remaining_mask
);
781 assert(tinfo
->views
[i
]);
783 pipe_sampler_view_reference((struct pipe_sampler_view
**)&tinfo
->views
[i
], NULL
);
786 for (i
= 0; i
< num_views
; i
++) {
787 struct virgl_sampler_view
*grview
= virgl_sampler_view(views
[i
]);
789 if (views
[i
] == (struct pipe_sampler_view
*)tinfo
->views
[i
])
794 pipe_sampler_view_reference((struct pipe_sampler_view
**)&tinfo
->views
[i
], views
[i
]);
796 pipe_sampler_view_reference((struct pipe_sampler_view
**)&tinfo
->views
[i
], NULL
);
797 disable_mask
|= 1 << i
;
801 tinfo
->enabled_mask
&= ~disable_mask
;
802 tinfo
->enabled_mask
|= new_mask
;
803 virgl_encode_set_sampler_views(vctx
, shader_type
, start_slot
, num_views
, tinfo
->views
);
804 virgl_attach_res_sampler_views(vctx
, shader_type
);
807 static void virgl_destroy_sampler_view(struct pipe_context
*ctx
,
808 struct pipe_sampler_view
*view
)
810 struct virgl_context
*vctx
= virgl_context(ctx
);
811 struct virgl_sampler_view
*grview
= virgl_sampler_view(view
);
813 virgl_encode_delete_object(vctx
, grview
->handle
, VIRGL_OBJECT_SAMPLER_VIEW
);
814 pipe_resource_reference(&view
->texture
, NULL
);
818 static void *virgl_create_sampler_state(struct pipe_context
*ctx
,
819 const struct pipe_sampler_state
*state
)
821 struct virgl_context
*vctx
= virgl_context(ctx
);
824 handle
= virgl_object_assign_handle();
826 virgl_encode_sampler_state(vctx
, handle
, state
);
827 return (void *)(unsigned long)handle
;
830 static void virgl_delete_sampler_state(struct pipe_context
*ctx
,
833 struct virgl_context
*vctx
= virgl_context(ctx
);
834 uint32_t handle
= (unsigned long)ss
;
836 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SAMPLER_STATE
);
839 static void virgl_bind_sampler_states(struct pipe_context
*ctx
,
840 enum pipe_shader_type shader
,
842 unsigned num_samplers
,
845 struct virgl_context
*vctx
= virgl_context(ctx
);
846 uint32_t handles
[32];
848 for (i
= 0; i
< num_samplers
; i
++) {
849 handles
[i
] = (unsigned long)(samplers
[i
]);
851 virgl_encode_bind_sampler_states(vctx
, shader
, start_slot
, num_samplers
, handles
);
854 static void virgl_set_polygon_stipple(struct pipe_context
*ctx
,
855 const struct pipe_poly_stipple
*ps
)
857 struct virgl_context
*vctx
= virgl_context(ctx
);
858 virgl_encoder_set_polygon_stipple(vctx
, ps
);
861 static void virgl_set_scissor_states(struct pipe_context
*ctx
,
863 unsigned num_scissor
,
864 const struct pipe_scissor_state
*ss
)
866 struct virgl_context
*vctx
= virgl_context(ctx
);
867 virgl_encoder_set_scissor_state(vctx
, start_slot
, num_scissor
, ss
);
870 static void virgl_set_sample_mask(struct pipe_context
*ctx
,
871 unsigned sample_mask
)
873 struct virgl_context
*vctx
= virgl_context(ctx
);
874 virgl_encoder_set_sample_mask(vctx
, sample_mask
);
877 static void virgl_set_min_samples(struct pipe_context
*ctx
,
878 unsigned min_samples
)
880 struct virgl_context
*vctx
= virgl_context(ctx
);
881 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
883 if (!(rs
->caps
.caps
.v2
.capability_bits
& VIRGL_CAP_SET_MIN_SAMPLES
))
885 virgl_encoder_set_min_samples(vctx
, min_samples
);
888 static void virgl_set_clip_state(struct pipe_context
*ctx
,
889 const struct pipe_clip_state
*clip
)
891 struct virgl_context
*vctx
= virgl_context(ctx
);
892 virgl_encoder_set_clip_state(vctx
, clip
);
895 static void virgl_set_tess_state(struct pipe_context
*ctx
,
896 const float default_outer_level
[4],
897 const float default_inner_level
[2])
899 struct virgl_context
*vctx
= virgl_context(ctx
);
900 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
902 if (!rs
->caps
.caps
.v1
.bset
.has_tessellation_shaders
)
904 virgl_encode_set_tess_state(vctx
, default_outer_level
, default_inner_level
);
907 static void virgl_resource_copy_region(struct pipe_context
*ctx
,
908 struct pipe_resource
*dst
,
910 unsigned dstx
, unsigned dsty
, unsigned dstz
,
911 struct pipe_resource
*src
,
913 const struct pipe_box
*src_box
)
915 struct virgl_context
*vctx
= virgl_context(ctx
);
916 struct virgl_resource
*dres
= virgl_resource(dst
);
917 struct virgl_resource
*sres
= virgl_resource(src
);
920 virgl_encode_resource_copy_region(vctx
, dres
,
921 dst_level
, dstx
, dsty
, dstz
,
927 virgl_flush_resource(struct pipe_context
*pipe
,
928 struct pipe_resource
*resource
)
932 static void virgl_blit(struct pipe_context
*ctx
,
933 const struct pipe_blit_info
*blit
)
935 struct virgl_context
*vctx
= virgl_context(ctx
);
936 struct virgl_resource
*dres
= virgl_resource(blit
->dst
.resource
);
937 struct virgl_resource
*sres
= virgl_resource(blit
->src
.resource
);
940 virgl_encode_blit(vctx
, dres
, sres
,
944 static void virgl_set_shader_buffers(struct pipe_context
*ctx
,
945 enum pipe_shader_type shader
,
946 unsigned start_slot
, unsigned count
,
947 const struct pipe_shader_buffer
*buffers
)
949 struct virgl_context
*vctx
= virgl_context(ctx
);
950 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
952 for (unsigned i
= 0; i
< count
; i
++) {
953 unsigned idx
= start_slot
+ i
;
956 if (buffers
[i
].buffer
) {
957 pipe_resource_reference(&vctx
->ssbos
[shader
][idx
], buffers
[i
].buffer
);
961 pipe_resource_reference(&vctx
->ssbos
[shader
][idx
], NULL
);
964 uint32_t max_shader_buffer
= shader
== PIPE_SHADER_FRAGMENT
?
965 rs
->caps
.caps
.v2
.max_shader_buffer_frag_compute
:
966 rs
->caps
.caps
.v2
.max_shader_buffer_other_stages
;
967 if (!max_shader_buffer
)
969 virgl_encode_set_shader_buffers(vctx
, shader
, start_slot
, count
, buffers
);
972 static void virgl_set_shader_images(struct pipe_context
*ctx
,
973 enum pipe_shader_type shader
,
974 unsigned start_slot
, unsigned count
,
975 const struct pipe_image_view
*images
)
977 struct virgl_context
*vctx
= virgl_context(ctx
);
978 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
980 for (unsigned i
= 0; i
< count
; i
++) {
981 unsigned idx
= start_slot
+ i
;
984 if (images
[i
].resource
) {
985 pipe_resource_reference(&vctx
->images
[shader
][idx
], images
[i
].resource
);
989 pipe_resource_reference(&vctx
->images
[shader
][idx
], NULL
);
992 uint32_t max_shader_images
= shader
== PIPE_SHADER_FRAGMENT
?
993 rs
->caps
.caps
.v2
.max_shader_image_frag_compute
:
994 rs
->caps
.caps
.v2
.max_shader_image_other_stages
;
995 if (!max_shader_images
)
997 virgl_encode_set_shader_images(vctx
, shader
, start_slot
, count
, images
);
1001 virgl_context_destroy( struct pipe_context
*ctx
)
1003 struct virgl_context
*vctx
= virgl_context(ctx
);
1004 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
1006 vctx
->framebuffer
.zsbuf
= NULL
;
1007 vctx
->framebuffer
.nr_cbufs
= 0;
1008 virgl_encoder_destroy_sub_ctx(vctx
, vctx
->hw_sub_ctx_id
);
1009 virgl_flush_eq(vctx
, vctx
);
1011 rs
->vws
->cmd_buf_destroy(vctx
->cbuf
);
1013 u_upload_destroy(vctx
->uploader
);
1014 util_primconvert_destroy(vctx
->primconvert
);
1016 slab_destroy_child(&vctx
->texture_transfer_pool
);
1020 static void virgl_get_sample_position(struct pipe_context
*ctx
,
1021 unsigned sample_count
,
1025 struct virgl_context
*vctx
= virgl_context(ctx
);
1026 struct virgl_screen
*vs
= virgl_screen(vctx
->base
.screen
);
1028 if (sample_count
> vs
->caps
.caps
.v1
.max_samples
) {
1029 debug_printf("VIRGL: requested %d MSAA samples, but only %d supported\n",
1030 sample_count
, vs
->caps
.caps
.v1
.max_samples
);
1034 /* The following is basically copied from dri/i965gen6_get_sample_position
1035 * The only addition is that we hold the msaa positions for all sample
1036 * counts in a flat array. */
1038 if (sample_count
== 1) {
1039 out_value
[0] = out_value
[1] = 0.5f
;
1041 } else if (sample_count
== 2) {
1042 bits
= vs
->caps
.caps
.v2
.msaa_sample_positions
[0] >> (8 * index
);
1043 } else if (sample_count
<= 4) {
1044 bits
= vs
->caps
.caps
.v2
.msaa_sample_positions
[1] >> (8 * index
);
1045 } else if (sample_count
<= 8) {
1046 bits
= vs
->caps
.caps
.v2
.msaa_sample_positions
[2 + (index
>> 2)] >> (8 * (index
& 3));
1047 } else if (sample_count
<= 16) {
1048 bits
= vs
->caps
.caps
.v2
.msaa_sample_positions
[4 + (index
>> 2)] >> (8 * (index
& 3));
1050 out_value
[0] = ((bits
>> 4) & 0xf) / 16.0f
;
1051 out_value
[1] = (bits
& 0xf) / 16.0f
;
1052 debug_printf("VIRGL: sample postion [%2d/%2d] = (%f, %f)\n",
1053 index
, sample_count
, out_value
[0], out_value
[1]);
1056 struct pipe_context
*virgl_context_create(struct pipe_screen
*pscreen
,
1060 struct virgl_context
*vctx
;
1061 struct virgl_screen
*rs
= virgl_screen(pscreen
);
1062 vctx
= CALLOC_STRUCT(virgl_context
);
1064 vctx
->cbuf
= rs
->vws
->cmd_buf_create(rs
->vws
);
1070 vctx
->base
.destroy
= virgl_context_destroy
;
1071 vctx
->base
.create_surface
= virgl_create_surface
;
1072 vctx
->base
.surface_destroy
= virgl_surface_destroy
;
1073 vctx
->base
.set_framebuffer_state
= virgl_set_framebuffer_state
;
1074 vctx
->base
.create_blend_state
= virgl_create_blend_state
;
1075 vctx
->base
.bind_blend_state
= virgl_bind_blend_state
;
1076 vctx
->base
.delete_blend_state
= virgl_delete_blend_state
;
1077 vctx
->base
.create_depth_stencil_alpha_state
= virgl_create_depth_stencil_alpha_state
;
1078 vctx
->base
.bind_depth_stencil_alpha_state
= virgl_bind_depth_stencil_alpha_state
;
1079 vctx
->base
.delete_depth_stencil_alpha_state
= virgl_delete_depth_stencil_alpha_state
;
1080 vctx
->base
.create_rasterizer_state
= virgl_create_rasterizer_state
;
1081 vctx
->base
.bind_rasterizer_state
= virgl_bind_rasterizer_state
;
1082 vctx
->base
.delete_rasterizer_state
= virgl_delete_rasterizer_state
;
1084 vctx
->base
.set_viewport_states
= virgl_set_viewport_states
;
1085 vctx
->base
.create_vertex_elements_state
= virgl_create_vertex_elements_state
;
1086 vctx
->base
.bind_vertex_elements_state
= virgl_bind_vertex_elements_state
;
1087 vctx
->base
.delete_vertex_elements_state
= virgl_delete_vertex_elements_state
;
1088 vctx
->base
.set_vertex_buffers
= virgl_set_vertex_buffers
;
1089 vctx
->base
.set_constant_buffer
= virgl_set_constant_buffer
;
1091 vctx
->base
.set_tess_state
= virgl_set_tess_state
;
1092 vctx
->base
.create_vs_state
= virgl_create_vs_state
;
1093 vctx
->base
.create_tcs_state
= virgl_create_tcs_state
;
1094 vctx
->base
.create_tes_state
= virgl_create_tes_state
;
1095 vctx
->base
.create_gs_state
= virgl_create_gs_state
;
1096 vctx
->base
.create_fs_state
= virgl_create_fs_state
;
1098 vctx
->base
.bind_vs_state
= virgl_bind_vs_state
;
1099 vctx
->base
.bind_tcs_state
= virgl_bind_tcs_state
;
1100 vctx
->base
.bind_tes_state
= virgl_bind_tes_state
;
1101 vctx
->base
.bind_gs_state
= virgl_bind_gs_state
;
1102 vctx
->base
.bind_fs_state
= virgl_bind_fs_state
;
1104 vctx
->base
.delete_vs_state
= virgl_delete_vs_state
;
1105 vctx
->base
.delete_tcs_state
= virgl_delete_tcs_state
;
1106 vctx
->base
.delete_tes_state
= virgl_delete_tes_state
;
1107 vctx
->base
.delete_gs_state
= virgl_delete_gs_state
;
1108 vctx
->base
.delete_fs_state
= virgl_delete_fs_state
;
1110 vctx
->base
.clear
= virgl_clear
;
1111 vctx
->base
.draw_vbo
= virgl_draw_vbo
;
1112 vctx
->base
.flush
= virgl_flush_from_st
;
1113 vctx
->base
.screen
= pscreen
;
1114 vctx
->base
.create_sampler_view
= virgl_create_sampler_view
;
1115 vctx
->base
.sampler_view_destroy
= virgl_destroy_sampler_view
;
1116 vctx
->base
.set_sampler_views
= virgl_set_sampler_views
;
1118 vctx
->base
.create_sampler_state
= virgl_create_sampler_state
;
1119 vctx
->base
.delete_sampler_state
= virgl_delete_sampler_state
;
1120 vctx
->base
.bind_sampler_states
= virgl_bind_sampler_states
;
1122 vctx
->base
.set_polygon_stipple
= virgl_set_polygon_stipple
;
1123 vctx
->base
.set_scissor_states
= virgl_set_scissor_states
;
1124 vctx
->base
.set_sample_mask
= virgl_set_sample_mask
;
1125 vctx
->base
.set_min_samples
= virgl_set_min_samples
;
1126 vctx
->base
.set_stencil_ref
= virgl_set_stencil_ref
;
1127 vctx
->base
.set_clip_state
= virgl_set_clip_state
;
1129 vctx
->base
.set_blend_color
= virgl_set_blend_color
;
1131 vctx
->base
.get_sample_position
= virgl_get_sample_position
;
1133 vctx
->base
.resource_copy_region
= virgl_resource_copy_region
;
1134 vctx
->base
.flush_resource
= virgl_flush_resource
;
1135 vctx
->base
.blit
= virgl_blit
;
1137 vctx
->base
.set_shader_buffers
= virgl_set_shader_buffers
;
1138 vctx
->base
.set_shader_images
= virgl_set_shader_images
;
1139 virgl_init_context_resource_functions(&vctx
->base
);
1140 virgl_init_query_functions(vctx
);
1141 virgl_init_so_functions(vctx
);
1143 list_inithead(&vctx
->to_flush_bufs
);
1144 slab_create_child(&vctx
->texture_transfer_pool
, &rs
->texture_transfer_pool
);
1146 vctx
->primconvert
= util_primconvert_create(&vctx
->base
, rs
->caps
.caps
.v1
.prim_mask
);
1147 vctx
->uploader
= u_upload_create(&vctx
->base
, 1024 * 1024,
1148 PIPE_BIND_INDEX_BUFFER
, PIPE_USAGE_STREAM
, 0);
1149 if (!vctx
->uploader
)
1151 vctx
->base
.stream_uploader
= vctx
->uploader
;
1152 vctx
->base
.const_uploader
= vctx
->uploader
;
1154 vctx
->hw_sub_ctx_id
= rs
->sub_ctx_id
++;
1155 virgl_encoder_create_sub_ctx(vctx
, vctx
->hw_sub_ctx_id
);
1157 virgl_encoder_set_sub_ctx(vctx
, vctx
->hw_sub_ctx_id
);