2 * Copyright 2014, 2015 Red Hat.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
24 #include "pipe/p_shader_tokens.h"
26 #include "pipe/p_context.h"
27 #include "pipe/p_defines.h"
28 #include "pipe/p_screen.h"
29 #include "pipe/p_state.h"
30 #include "util/u_inlines.h"
31 #include "util/u_memory.h"
32 #include "util/u_format.h"
33 #include "util/u_transfer.h"
34 #include "util/u_helpers.h"
35 #include "util/u_slab.h"
36 #include "util/u_upload_mgr.h"
37 #include "util/u_blitter.h"
38 #include "tgsi/tgsi_text.h"
40 #include "pipebuffer/pb_buffer.h"
41 #include "state_tracker/graw.h"
42 #include "state_tracker/drm_driver.h"
44 #include "virgl_encode.h"
46 #include "virgl_context.h"
48 #include "virgl_resource.h"
49 #include "virgl_screen.h"
50 #include "state_tracker/sw_winsys.h"
51 struct pipe_screen encscreen
;
53 static uint32_t next_handle
;
54 uint32_t virgl_object_assign_handle(void)
59 static void virgl_buffer_flush(struct virgl_context
*vctx
,
60 struct virgl_buffer
*vbuf
)
62 struct virgl_screen
*rs
= virgl_screen(vctx
->base
.screen
);
65 assert(vbuf
->on_list
);
72 box
.x
= vbuf
->valid_buffer_range
.start
;
73 box
.width
= MIN2(vbuf
->valid_buffer_range
.end
- vbuf
->valid_buffer_range
.start
, vbuf
->base
.u
.b
.width0
);
75 vctx
->num_transfers
++;
76 rs
->vws
->transfer_put(rs
->vws
, vbuf
->base
.hw_res
,
77 &box
, 0, 0, box
.x
, 0);
79 util_range_set_empty(&vbuf
->valid_buffer_range
);
82 static void virgl_attach_res_framebuffer(struct virgl_context
*vctx
)
84 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
85 struct pipe_surface
*surf
;
86 struct virgl_resource
*res
;
89 surf
= vctx
->framebuffer
.zsbuf
;
91 res
= virgl_resource(surf
->texture
);
93 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
95 for (i
= 0; i
< vctx
->framebuffer
.nr_cbufs
; i
++) {
96 surf
= vctx
->framebuffer
.cbufs
[i
];
98 res
= virgl_resource(surf
->texture
);
100 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
105 static void virgl_attach_res_sampler_views(struct virgl_context
*vctx
,
106 unsigned shader_type
)
108 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
109 struct virgl_textures_info
*tinfo
= &vctx
->samplers
[shader_type
];
110 struct virgl_resource
*res
;
111 uint32_t remaining_mask
= tinfo
->enabled_mask
;
113 while (remaining_mask
) {
114 i
= u_bit_scan(&remaining_mask
);
115 assert(tinfo
->views
[i
]);
117 res
= virgl_resource(tinfo
->views
[i
]->base
.texture
);
119 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
123 static void virgl_attach_res_vertex_buffers(struct virgl_context
*vctx
)
125 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
126 struct virgl_resource
*res
;
129 for (i
= 0; i
< vctx
->num_vertex_buffers
; i
++) {
130 res
= virgl_resource(vctx
->vertex_buffer
[i
].buffer
);
132 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
136 static void virgl_attach_res_index_buffer(struct virgl_context
*vctx
)
138 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
139 struct virgl_resource
*res
;
141 res
= virgl_resource(vctx
->index_buffer
.buffer
);
143 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
146 static void virgl_attach_res_so_targets(struct virgl_context
*vctx
)
148 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
149 struct virgl_resource
*res
;
152 for (i
= 0; i
< vctx
->num_so_targets
; i
++) {
153 res
= virgl_resource(vctx
->so_targets
[i
].base
.buffer
);
155 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
159 static void virgl_attach_res_uniform_buffers(struct virgl_context
*vctx
,
160 unsigned shader_type
)
162 struct virgl_winsys
*vws
= virgl_screen(vctx
->base
.screen
)->vws
;
163 struct virgl_resource
*res
;
165 for (i
= 0; i
< PIPE_MAX_CONSTANT_BUFFERS
; i
++) {
166 res
= virgl_resource(vctx
->ubos
[shader_type
][i
]);
168 vws
->emit_res(vws
, vctx
->cbuf
, res
->hw_res
, FALSE
);
174 * after flushing, the hw context still has a bunch of
175 * resources bound, so we need to rebind those here.
177 static void virgl_reemit_res(struct virgl_context
*vctx
)
179 unsigned shader_type
;
181 /* reattach any flushed resources */
182 /* framebuffer, sampler views, vertex/index/uniform/stream buffers */
183 virgl_attach_res_framebuffer(vctx
);
185 for (shader_type
= 0; shader_type
< PIPE_SHADER_TYPES
; shader_type
++) {
186 virgl_attach_res_sampler_views(vctx
, shader_type
);
187 virgl_attach_res_uniform_buffers(vctx
, shader_type
);
189 virgl_attach_res_index_buffer(vctx
);
190 virgl_attach_res_vertex_buffers(vctx
);
191 virgl_attach_res_so_targets(vctx
);
194 static struct pipe_surface
*virgl_create_surface(struct pipe_context
*ctx
,
195 struct pipe_resource
*resource
,
196 const struct pipe_surface
*templ
)
198 struct virgl_context
*vctx
= virgl_context(ctx
);
199 struct virgl_surface
*surf
;
200 struct virgl_resource
*res
= virgl_resource(resource
);
203 surf
= CALLOC_STRUCT(virgl_surface
);
208 handle
= virgl_object_assign_handle();
209 pipe_reference_init(&surf
->base
.reference
, 1);
210 pipe_resource_reference(&surf
->base
.texture
, resource
);
211 surf
->base
.context
= ctx
;
212 surf
->base
.format
= templ
->format
;
213 if (resource
->target
!= PIPE_BUFFER
) {
214 surf
->base
.width
= u_minify(resource
->width0
, templ
->u
.tex
.level
);
215 surf
->base
.height
= u_minify(resource
->height0
, templ
->u
.tex
.level
);
216 surf
->base
.u
.tex
.level
= templ
->u
.tex
.level
;
217 surf
->base
.u
.tex
.first_layer
= templ
->u
.tex
.first_layer
;
218 surf
->base
.u
.tex
.last_layer
= templ
->u
.tex
.last_layer
;
220 surf
->base
.width
= templ
->u
.buf
.last_element
- templ
->u
.buf
.first_element
+ 1;
221 surf
->base
.height
= resource
->height0
;
222 surf
->base
.u
.buf
.first_element
= templ
->u
.buf
.first_element
;
223 surf
->base
.u
.buf
.last_element
= templ
->u
.buf
.last_element
;
225 virgl_encoder_create_surface(vctx
, handle
, res
, &surf
->base
);
226 surf
->handle
= handle
;
230 static void virgl_surface_destroy(struct pipe_context
*ctx
,
231 struct pipe_surface
*psurf
)
233 struct virgl_context
*vctx
= virgl_context(ctx
);
234 struct virgl_surface
*surf
= virgl_surface(psurf
);
236 pipe_resource_reference(&surf
->base
.texture
, NULL
);
237 virgl_encode_delete_object(vctx
, surf
->handle
, VIRGL_OBJECT_SURFACE
);
241 static void *virgl_create_blend_state(struct pipe_context
*ctx
,
242 const struct pipe_blend_state
*blend_state
)
244 struct virgl_context
*vctx
= virgl_context(ctx
);
246 handle
= virgl_object_assign_handle();
248 virgl_encode_blend_state(vctx
, handle
, blend_state
);
249 return (void *)(unsigned long)handle
;
253 static void virgl_bind_blend_state(struct pipe_context
*ctx
,
256 struct virgl_context
*vctx
= virgl_context(ctx
);
257 uint32_t handle
= (unsigned long)blend_state
;
258 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_BLEND
);
261 static void virgl_delete_blend_state(struct pipe_context
*ctx
,
264 struct virgl_context
*vctx
= virgl_context(ctx
);
265 uint32_t handle
= (unsigned long)blend_state
;
266 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_BLEND
);
269 static void *virgl_create_depth_stencil_alpha_state(struct pipe_context
*ctx
,
270 const struct pipe_depth_stencil_alpha_state
*blend_state
)
272 struct virgl_context
*vctx
= virgl_context(ctx
);
274 handle
= virgl_object_assign_handle();
276 virgl_encode_dsa_state(vctx
, handle
, blend_state
);
277 return (void *)(unsigned long)handle
;
280 static void virgl_bind_depth_stencil_alpha_state(struct pipe_context
*ctx
,
283 struct virgl_context
*vctx
= virgl_context(ctx
);
284 uint32_t handle
= (unsigned long)blend_state
;
285 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_DSA
);
288 static void virgl_delete_depth_stencil_alpha_state(struct pipe_context
*ctx
,
291 struct virgl_context
*vctx
= virgl_context(ctx
);
292 uint32_t handle
= (unsigned long)dsa_state
;
293 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_DSA
);
296 static void *virgl_create_rasterizer_state(struct pipe_context
*ctx
,
297 const struct pipe_rasterizer_state
*rs_state
)
299 struct virgl_context
*vctx
= virgl_context(ctx
);
301 handle
= virgl_object_assign_handle();
303 virgl_encode_rasterizer_state(vctx
, handle
, rs_state
);
304 return (void *)(unsigned long)handle
;
307 static void virgl_bind_rasterizer_state(struct pipe_context
*ctx
,
310 struct virgl_context
*vctx
= virgl_context(ctx
);
311 uint32_t handle
= (unsigned long)rs_state
;
313 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_RASTERIZER
);
316 static void virgl_delete_rasterizer_state(struct pipe_context
*ctx
,
319 struct virgl_context
*vctx
= virgl_context(ctx
);
320 uint32_t handle
= (unsigned long)rs_state
;
321 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_RASTERIZER
);
324 static void virgl_set_framebuffer_state(struct pipe_context
*ctx
,
325 const struct pipe_framebuffer_state
*state
)
327 struct virgl_context
*vctx
= virgl_context(ctx
);
329 vctx
->framebuffer
= *state
;
330 virgl_encoder_set_framebuffer_state(vctx
, state
);
331 virgl_attach_res_framebuffer(vctx
);
334 static void virgl_set_viewport_states(struct pipe_context
*ctx
,
336 unsigned num_viewports
,
337 const struct pipe_viewport_state
*state
)
339 struct virgl_context
*vctx
= virgl_context(ctx
);
340 virgl_encoder_set_viewport_states(vctx
, start_slot
, num_viewports
, state
);
343 static void *virgl_create_vertex_elements_state(struct pipe_context
*ctx
,
344 unsigned num_elements
,
345 const struct pipe_vertex_element
*elements
)
347 struct virgl_context
*vctx
= virgl_context(ctx
);
348 uint32_t handle
= virgl_object_assign_handle();
349 virgl_encoder_create_vertex_elements(vctx
, handle
,
350 num_elements
, elements
);
351 return (void*)(unsigned long)handle
;
355 static void virgl_delete_vertex_elements_state(struct pipe_context
*ctx
,
358 struct virgl_context
*vctx
= virgl_context(ctx
);
359 uint32_t handle
= (unsigned long)ve
;
361 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_VERTEX_ELEMENTS
);
364 static void virgl_bind_vertex_elements_state(struct pipe_context
*ctx
,
367 struct virgl_context
*vctx
= virgl_context(ctx
);
368 uint32_t handle
= (unsigned long)ve
;
369 virgl_encode_bind_object(vctx
, handle
, VIRGL_OBJECT_VERTEX_ELEMENTS
);
372 static void virgl_set_vertex_buffers(struct pipe_context
*ctx
,
374 unsigned num_buffers
,
375 const struct pipe_vertex_buffer
*buffers
)
377 struct virgl_context
*vctx
= virgl_context(ctx
);
379 util_set_vertex_buffers_count(vctx
->vertex_buffer
,
380 &vctx
->num_vertex_buffers
,
381 buffers
, start_slot
, num_buffers
);
383 vctx
->vertex_array_dirty
= TRUE
;
386 static void virgl_hw_set_vertex_buffers(struct pipe_context
*ctx
)
388 struct virgl_context
*vctx
= virgl_context(ctx
);
390 if (vctx
->vertex_array_dirty
) {
391 virgl_encoder_set_vertex_buffers(vctx
, vctx
->num_vertex_buffers
, vctx
->vertex_buffer
);
392 virgl_attach_res_vertex_buffers(vctx
);
396 static void virgl_set_stencil_ref(struct pipe_context
*ctx
,
397 const struct pipe_stencil_ref
*ref
)
399 struct virgl_context
*vctx
= virgl_context(ctx
);
400 virgl_encoder_set_stencil_ref(vctx
, ref
);
403 static void virgl_set_blend_color(struct pipe_context
*ctx
,
404 const struct pipe_blend_color
*color
)
406 struct virgl_context
*vctx
= virgl_context(ctx
);
407 virgl_encoder_set_blend_color(vctx
, color
);
410 static void virgl_set_index_buffer(struct pipe_context
*ctx
,
411 const struct pipe_index_buffer
*ib
)
413 struct virgl_context
*vctx
= virgl_context(ctx
);
416 pipe_resource_reference(&vctx
->index_buffer
.buffer
, ib
->buffer
);
417 memcpy(&vctx
->index_buffer
, ib
, sizeof(*ib
));
419 pipe_resource_reference(&vctx
->index_buffer
.buffer
, NULL
);
423 static void virgl_hw_set_index_buffer(struct pipe_context
*ctx
,
424 struct pipe_index_buffer
*ib
)
426 struct virgl_context
*vctx
= virgl_context(ctx
);
427 virgl_encoder_set_index_buffer(vctx
, ib
);
428 virgl_attach_res_index_buffer(vctx
);
431 static void virgl_set_constant_buffer(struct pipe_context
*ctx
,
432 uint shader
, uint index
,
433 struct pipe_constant_buffer
*buf
)
435 struct virgl_context
*vctx
= virgl_context(ctx
);
438 if (!buf
->user_buffer
){
439 struct virgl_resource
*res
= virgl_resource(buf
->buffer
);
440 virgl_encoder_set_uniform_buffer(vctx
, shader
, index
, buf
->buffer_offset
,
441 buf
->buffer_size
, res
);
442 pipe_resource_reference(&vctx
->ubos
[shader
][index
], buf
->buffer
);
445 pipe_resource_reference(&vctx
->ubos
[shader
][index
], NULL
);
446 virgl_encoder_write_constant_buffer(vctx
, shader
, index
, buf
->buffer_size
/ 4, buf
->user_buffer
);
448 virgl_encoder_write_constant_buffer(vctx
, shader
, index
, 0, NULL
);
449 pipe_resource_reference(&vctx
->ubos
[shader
][index
], NULL
);
453 void virgl_transfer_inline_write(struct pipe_context
*ctx
,
454 struct pipe_resource
*res
,
457 const struct pipe_box
*box
,
460 unsigned layer_stride
)
462 struct virgl_context
*vctx
= virgl_context(ctx
);
463 struct virgl_screen
*vs
= virgl_screen(ctx
->screen
);
464 struct virgl_resource
*grres
= virgl_resource(res
);
465 struct virgl_buffer
*vbuf
= virgl_buffer(res
);
467 grres
->clean
= FALSE
;
469 if (virgl_res_needs_flush_wait(vctx
, &vbuf
->base
, usage
)) {
470 ctx
->flush(ctx
, NULL
, 0);
472 vs
->vws
->resource_wait(vs
->vws
, vbuf
->base
.hw_res
);
475 virgl_encoder_inline_write(vctx
, grres
, level
, usage
,
476 box
, data
, stride
, layer_stride
);
479 static void *virgl_shader_encoder(struct pipe_context
*ctx
,
480 const struct pipe_shader_state
*shader
,
483 struct virgl_context
*vctx
= virgl_context(ctx
);
485 struct tgsi_token
*new_tokens
;
488 new_tokens
= virgl_tgsi_transform(shader
->tokens
);
492 handle
= virgl_object_assign_handle();
493 /* encode VS state */
494 ret
= virgl_encode_shader_state(vctx
, handle
, type
,
495 &shader
->stream_output
,
502 return (void *)(unsigned long)handle
;
505 static void *virgl_create_vs_state(struct pipe_context
*ctx
,
506 const struct pipe_shader_state
*shader
)
508 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_VERTEX
);
511 static void *virgl_create_gs_state(struct pipe_context
*ctx
,
512 const struct pipe_shader_state
*shader
)
514 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_GEOMETRY
);
517 static void *virgl_create_fs_state(struct pipe_context
*ctx
,
518 const struct pipe_shader_state
*shader
)
520 return virgl_shader_encoder(ctx
, shader
, PIPE_SHADER_FRAGMENT
);
524 virgl_delete_fs_state(struct pipe_context
*ctx
,
527 uint32_t handle
= (unsigned long)fs
;
528 struct virgl_context
*vctx
= virgl_context(ctx
);
530 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
534 virgl_delete_gs_state(struct pipe_context
*ctx
,
537 uint32_t handle
= (unsigned long)gs
;
538 struct virgl_context
*vctx
= virgl_context(ctx
);
540 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
544 virgl_delete_vs_state(struct pipe_context
*ctx
,
547 uint32_t handle
= (unsigned long)vs
;
548 struct virgl_context
*vctx
= virgl_context(ctx
);
550 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SHADER
);
553 static void virgl_bind_vs_state(struct pipe_context
*ctx
,
556 uint32_t handle
= (unsigned long)vss
;
557 struct virgl_context
*vctx
= virgl_context(ctx
);
559 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_VERTEX
);
562 static void virgl_bind_gs_state(struct pipe_context
*ctx
,
565 uint32_t handle
= (unsigned long)vss
;
566 struct virgl_context
*vctx
= virgl_context(ctx
);
568 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_GEOMETRY
);
572 static void virgl_bind_fs_state(struct pipe_context
*ctx
,
575 uint32_t handle
= (unsigned long)vss
;
576 struct virgl_context
*vctx
= virgl_context(ctx
);
578 virgl_encode_bind_shader(vctx
, handle
, PIPE_SHADER_FRAGMENT
);
581 static void virgl_clear(struct pipe_context
*ctx
,
583 const union pipe_color_union
*color
,
584 double depth
, unsigned stencil
)
586 struct virgl_context
*vctx
= virgl_context(ctx
);
588 virgl_encode_clear(vctx
, buffers
, color
, depth
, stencil
);
591 static void virgl_draw_vbo(struct pipe_context
*ctx
,
592 const struct pipe_draw_info
*dinfo
)
594 struct virgl_context
*vctx
= virgl_context(ctx
);
595 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
596 struct pipe_index_buffer ib
= {};
597 struct pipe_draw_info info
= *dinfo
;
599 if (!(rs
->caps
.caps
.v1
.prim_mask
& (1 << dinfo
->mode
))) {
600 util_primconvert_save_index_buffer(vctx
->primconvert
, &vctx
->index_buffer
);
601 util_primconvert_draw_vbo(vctx
->primconvert
, dinfo
);
605 pipe_resource_reference(&ib
.buffer
, vctx
->index_buffer
.buffer
);
606 ib
.user_buffer
= vctx
->index_buffer
.user_buffer
;
607 ib
.index_size
= vctx
->index_buffer
.index_size
;
608 ib
.offset
= vctx
->index_buffer
.offset
+ info
.start
* ib
.index_size
;
610 if (ib
.user_buffer
) {
611 u_upload_data(vctx
->uploader
, 0, info
.count
* ib
.index_size
,
612 ib
.user_buffer
, &ib
.offset
, &ib
.buffer
);
613 ib
.user_buffer
= NULL
;
617 u_upload_unmap(vctx
->uploader
);
620 virgl_hw_set_vertex_buffers(ctx
);
622 virgl_hw_set_index_buffer(ctx
, &ib
);
624 virgl_encoder_draw_vbo(vctx
, &info
);
626 pipe_resource_reference(&ib
.buffer
, NULL
);
630 static void virgl_flush_eq(struct virgl_context
*ctx
, void *closure
)
632 struct virgl_screen
*rs
= virgl_screen(ctx
->base
.screen
);
634 /* send the buffer to the remote side for decoding */
635 ctx
->num_transfers
= ctx
->num_draws
= 0;
636 rs
->vws
->submit_cmd(rs
->vws
, ctx
->cbuf
);
638 virgl_encoder_set_sub_ctx(ctx
, ctx
->hw_sub_ctx_id
);
640 /* add back current framebuffer resources to reference list? */
641 virgl_reemit_res(ctx
);
644 static void virgl_flush_from_st(struct pipe_context
*ctx
,
645 struct pipe_fence_handle
**fence
,
646 enum pipe_flush_flags flags
)
648 struct virgl_context
*vctx
= virgl_context(ctx
);
649 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
650 struct virgl_buffer
*buf
, *tmp
;
653 *fence
= rs
->vws
->cs_create_fence(rs
->vws
);
655 LIST_FOR_EACH_ENTRY_SAFE(buf
, tmp
, &vctx
->to_flush_bufs
, flush_list
) {
656 struct pipe_resource
*res
= &buf
->base
.u
.b
;
657 virgl_buffer_flush(vctx
, buf
);
658 list_del(&buf
->flush_list
);
659 buf
->on_list
= FALSE
;
660 pipe_resource_reference(&res
, NULL
);
663 virgl_flush_eq(vctx
, vctx
);
666 static struct pipe_sampler_view
*virgl_create_sampler_view(struct pipe_context
*ctx
,
667 struct pipe_resource
*texture
,
668 const struct pipe_sampler_view
*state
)
670 struct virgl_context
*vctx
= virgl_context(ctx
);
671 struct virgl_sampler_view
*grview
= CALLOC_STRUCT(virgl_sampler_view
);
673 struct virgl_resource
*res
;
678 res
= virgl_resource(texture
);
679 handle
= virgl_object_assign_handle();
680 virgl_encode_sampler_view(vctx
, handle
, res
, state
);
682 grview
->base
= *state
;
683 grview
->base
.reference
.count
= 1;
685 grview
->base
.texture
= NULL
;
686 grview
->base
.context
= ctx
;
687 pipe_resource_reference(&grview
->base
.texture
, texture
);
688 grview
->handle
= handle
;
689 return &grview
->base
;
692 static void virgl_set_sampler_views(struct pipe_context
*ctx
,
693 unsigned shader_type
,
696 struct pipe_sampler_view
**views
)
698 struct virgl_context
*vctx
= virgl_context(ctx
);
700 uint32_t disable_mask
= ~((1ull << num_views
) - 1);
701 struct virgl_textures_info
*tinfo
= &vctx
->samplers
[shader_type
];
702 uint32_t new_mask
= 0;
703 uint32_t remaining_mask
;
705 remaining_mask
= tinfo
->enabled_mask
& disable_mask
;
707 while (remaining_mask
) {
708 i
= u_bit_scan(&remaining_mask
);
709 assert(tinfo
->views
[i
]);
711 pipe_sampler_view_reference((struct pipe_sampler_view
**)&tinfo
->views
[i
], NULL
);
714 for (i
= 0; i
< num_views
; i
++) {
715 struct virgl_sampler_view
*grview
= virgl_sampler_view(views
[i
]);
717 if (views
[i
] == (struct pipe_sampler_view
*)tinfo
->views
[i
])
722 pipe_sampler_view_reference((struct pipe_sampler_view
**)&tinfo
->views
[i
], views
[i
]);
724 pipe_sampler_view_reference((struct pipe_sampler_view
**)&tinfo
->views
[i
], NULL
);
725 disable_mask
|= 1 << i
;
729 tinfo
->enabled_mask
&= ~disable_mask
;
730 tinfo
->enabled_mask
|= new_mask
;
731 virgl_encode_set_sampler_views(vctx
, shader_type
, start_slot
, num_views
, tinfo
->views
);
732 virgl_attach_res_sampler_views(vctx
, shader_type
);
735 static void virgl_destroy_sampler_view(struct pipe_context
*ctx
,
736 struct pipe_sampler_view
*view
)
738 struct virgl_context
*vctx
= virgl_context(ctx
);
739 struct virgl_sampler_view
*grview
= virgl_sampler_view(view
);
741 virgl_encode_delete_object(vctx
, grview
->handle
, VIRGL_OBJECT_SAMPLER_VIEW
);
742 pipe_resource_reference(&view
->texture
, NULL
);
746 static void *virgl_create_sampler_state(struct pipe_context
*ctx
,
747 const struct pipe_sampler_state
*state
)
749 struct virgl_context
*vctx
= virgl_context(ctx
);
752 handle
= virgl_object_assign_handle();
754 virgl_encode_sampler_state(vctx
, handle
, state
);
755 return (void *)(unsigned long)handle
;
758 static void virgl_delete_sampler_state(struct pipe_context
*ctx
,
761 struct virgl_context
*vctx
= virgl_context(ctx
);
762 uint32_t handle
= (unsigned long)ss
;
764 virgl_encode_delete_object(vctx
, handle
, VIRGL_OBJECT_SAMPLER_STATE
);
767 static void virgl_bind_sampler_states(struct pipe_context
*ctx
,
768 unsigned shader
, unsigned start_slot
,
769 unsigned num_samplers
,
772 struct virgl_context
*vctx
= virgl_context(ctx
);
773 uint32_t handles
[32];
775 for (i
= 0; i
< num_samplers
; i
++) {
776 handles
[i
] = (unsigned long)(samplers
[i
]);
778 virgl_encode_bind_sampler_states(vctx
, shader
, start_slot
, num_samplers
, handles
);
781 static void virgl_set_polygon_stipple(struct pipe_context
*ctx
,
782 const struct pipe_poly_stipple
*ps
)
784 struct virgl_context
*vctx
= virgl_context(ctx
);
785 virgl_encoder_set_polygon_stipple(vctx
, ps
);
788 static void virgl_set_scissor_states(struct pipe_context
*ctx
,
790 unsigned num_scissor
,
791 const struct pipe_scissor_state
*ss
)
793 struct virgl_context
*vctx
= virgl_context(ctx
);
794 virgl_encoder_set_scissor_state(vctx
, start_slot
, num_scissor
, ss
);
797 static void virgl_set_sample_mask(struct pipe_context
*ctx
,
798 unsigned sample_mask
)
800 struct virgl_context
*vctx
= virgl_context(ctx
);
801 virgl_encoder_set_sample_mask(vctx
, sample_mask
);
804 static void virgl_set_clip_state(struct pipe_context
*ctx
,
805 const struct pipe_clip_state
*clip
)
807 struct virgl_context
*vctx
= virgl_context(ctx
);
808 virgl_encoder_set_clip_state(vctx
, clip
);
811 static void virgl_resource_copy_region(struct pipe_context
*ctx
,
812 struct pipe_resource
*dst
,
814 unsigned dstx
, unsigned dsty
, unsigned dstz
,
815 struct pipe_resource
*src
,
817 const struct pipe_box
*src_box
)
819 struct virgl_context
*vctx
= virgl_context(ctx
);
820 struct virgl_resource
*dres
= virgl_resource(dst
);
821 struct virgl_resource
*sres
= virgl_resource(src
);
824 virgl_encode_resource_copy_region(vctx
, dres
,
825 dst_level
, dstx
, dsty
, dstz
,
831 virgl_flush_resource(struct pipe_context
*pipe
,
832 struct pipe_resource
*resource
)
836 static void virgl_blit(struct pipe_context
*ctx
,
837 const struct pipe_blit_info
*blit
)
839 struct virgl_context
*vctx
= virgl_context(ctx
);
840 struct virgl_resource
*dres
= virgl_resource(blit
->dst
.resource
);
841 struct virgl_resource
*sres
= virgl_resource(blit
->src
.resource
);
844 virgl_encode_blit(vctx
, dres
, sres
,
849 virgl_context_destroy( struct pipe_context
*ctx
)
851 struct virgl_context
*vctx
= virgl_context(ctx
);
852 struct virgl_screen
*rs
= virgl_screen(ctx
->screen
);
854 vctx
->framebuffer
.zsbuf
= NULL
;
855 vctx
->framebuffer
.nr_cbufs
= 0;
856 virgl_encoder_destroy_sub_ctx(vctx
, vctx
->hw_sub_ctx_id
);
857 virgl_flush_eq(vctx
, vctx
);
859 rs
->vws
->cmd_buf_destroy(vctx
->cbuf
);
861 u_upload_destroy(vctx
->uploader
);
862 util_primconvert_destroy(vctx
->primconvert
);
864 util_slab_destroy(&vctx
->texture_transfer_pool
);
868 struct pipe_context
*virgl_context_create(struct pipe_screen
*pscreen
,
872 struct virgl_context
*vctx
;
873 struct virgl_screen
*rs
= virgl_screen(pscreen
);
874 vctx
= CALLOC_STRUCT(virgl_context
);
876 vctx
->cbuf
= rs
->vws
->cmd_buf_create(rs
->vws
);
882 vctx
->base
.destroy
= virgl_context_destroy
;
883 vctx
->base
.create_surface
= virgl_create_surface
;
884 vctx
->base
.surface_destroy
= virgl_surface_destroy
;
885 vctx
->base
.set_framebuffer_state
= virgl_set_framebuffer_state
;
886 vctx
->base
.create_blend_state
= virgl_create_blend_state
;
887 vctx
->base
.bind_blend_state
= virgl_bind_blend_state
;
888 vctx
->base
.delete_blend_state
= virgl_delete_blend_state
;
889 vctx
->base
.create_depth_stencil_alpha_state
= virgl_create_depth_stencil_alpha_state
;
890 vctx
->base
.bind_depth_stencil_alpha_state
= virgl_bind_depth_stencil_alpha_state
;
891 vctx
->base
.delete_depth_stencil_alpha_state
= virgl_delete_depth_stencil_alpha_state
;
892 vctx
->base
.create_rasterizer_state
= virgl_create_rasterizer_state
;
893 vctx
->base
.bind_rasterizer_state
= virgl_bind_rasterizer_state
;
894 vctx
->base
.delete_rasterizer_state
= virgl_delete_rasterizer_state
;
896 vctx
->base
.set_viewport_states
= virgl_set_viewport_states
;
897 vctx
->base
.create_vertex_elements_state
= virgl_create_vertex_elements_state
;
898 vctx
->base
.bind_vertex_elements_state
= virgl_bind_vertex_elements_state
;
899 vctx
->base
.delete_vertex_elements_state
= virgl_delete_vertex_elements_state
;
900 vctx
->base
.set_vertex_buffers
= virgl_set_vertex_buffers
;
901 vctx
->base
.set_index_buffer
= virgl_set_index_buffer
;
902 vctx
->base
.set_constant_buffer
= virgl_set_constant_buffer
;
903 vctx
->base
.transfer_inline_write
= virgl_transfer_inline_write
;
905 vctx
->base
.create_vs_state
= virgl_create_vs_state
;
906 vctx
->base
.create_gs_state
= virgl_create_gs_state
;
907 vctx
->base
.create_fs_state
= virgl_create_fs_state
;
909 vctx
->base
.bind_vs_state
= virgl_bind_vs_state
;
910 vctx
->base
.bind_gs_state
= virgl_bind_gs_state
;
911 vctx
->base
.bind_fs_state
= virgl_bind_fs_state
;
913 vctx
->base
.delete_vs_state
= virgl_delete_vs_state
;
914 vctx
->base
.delete_gs_state
= virgl_delete_gs_state
;
915 vctx
->base
.delete_fs_state
= virgl_delete_fs_state
;
917 vctx
->base
.clear
= virgl_clear
;
918 vctx
->base
.draw_vbo
= virgl_draw_vbo
;
919 vctx
->base
.flush
= virgl_flush_from_st
;
920 vctx
->base
.screen
= pscreen
;
921 vctx
->base
.create_sampler_view
= virgl_create_sampler_view
;
922 vctx
->base
.sampler_view_destroy
= virgl_destroy_sampler_view
;
923 vctx
->base
.set_sampler_views
= virgl_set_sampler_views
;
925 vctx
->base
.create_sampler_state
= virgl_create_sampler_state
;
926 vctx
->base
.delete_sampler_state
= virgl_delete_sampler_state
;
927 vctx
->base
.bind_sampler_states
= virgl_bind_sampler_states
;
929 vctx
->base
.set_polygon_stipple
= virgl_set_polygon_stipple
;
930 vctx
->base
.set_scissor_states
= virgl_set_scissor_states
;
931 vctx
->base
.set_sample_mask
= virgl_set_sample_mask
;
932 vctx
->base
.set_stencil_ref
= virgl_set_stencil_ref
;
933 vctx
->base
.set_clip_state
= virgl_set_clip_state
;
935 vctx
->base
.set_blend_color
= virgl_set_blend_color
;
937 vctx
->base
.resource_copy_region
= virgl_resource_copy_region
;
938 vctx
->base
.flush_resource
= virgl_flush_resource
;
939 vctx
->base
.blit
= virgl_blit
;
941 virgl_init_context_resource_functions(&vctx
->base
);
942 virgl_init_query_functions(vctx
);
943 virgl_init_so_functions(vctx
);
945 list_inithead(&vctx
->to_flush_bufs
);
946 util_slab_create(&vctx
->texture_transfer_pool
, sizeof(struct virgl_transfer
),
947 16, UTIL_SLAB_SINGLETHREADED
);
949 vctx
->primconvert
= util_primconvert_create(&vctx
->base
, rs
->caps
.caps
.v1
.prim_mask
);
950 vctx
->uploader
= u_upload_create(&vctx
->base
, 1024 * 1024, 256,
951 PIPE_BIND_INDEX_BUFFER
);
955 vctx
->hw_sub_ctx_id
= rs
->sub_ctx_id
++;
956 virgl_encoder_create_sub_ctx(vctx
, vctx
->hw_sub_ctx_id
);
958 virgl_encoder_set_sub_ctx(vctx
, vctx
->hw_sub_ctx_id
);