2 * Copyright 2018 Collabora Ltd.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
24 #include "zink_context.h"
26 #include "zink_cmdbuf.h"
27 #include "zink_compiler.h"
28 #include "zink_framebuffer.h"
29 #include "zink_pipeline.h"
30 #include "zink_program.h"
31 #include "zink_render_pass.h"
32 #include "zink_resource.h"
33 #include "zink_screen.h"
34 #include "zink_state.h"
35 #include "zink_surface.h"
37 #include "indices/u_primconvert.h"
38 #include "util/u_blitter.h"
39 #include "util/u_debug.h"
40 #include "util/u_format.h"
41 #include "util/u_framebuffer.h"
42 #include "util/u_helpers.h"
43 #include "util/u_inlines.h"
47 #include "util/u_memory.h"
48 #include "util/u_prim.h"
49 #include "util/u_upload_mgr.h"
52 zink_context_destroy(struct pipe_context
*pctx
)
54 struct zink_context
*ctx
= zink_context(pctx
);
55 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
56 vkFreeCommandBuffers(screen
->dev
, ctx
->cmdpool
, 1, &ctx
->cmdbuf
.cmdbuf
);
57 vkDestroyCommandPool(screen
->dev
, ctx
->cmdpool
, NULL
);
59 util_primconvert_destroy(ctx
->primconvert
);
60 u_upload_destroy(pctx
->stream_uploader
);
61 slab_destroy_child(&ctx
->transfer_pool
);
62 util_blitter_destroy(ctx
->blitter
);
67 filter(enum pipe_tex_filter filter
)
70 case PIPE_TEX_FILTER_NEAREST
: return VK_FILTER_NEAREST
;
71 case PIPE_TEX_FILTER_LINEAR
: return VK_FILTER_LINEAR
;
73 unreachable("unexpected filter");
76 static VkSamplerMipmapMode
77 sampler_mipmap_mode(enum pipe_tex_mipfilter filter
)
80 case PIPE_TEX_MIPFILTER_NEAREST
: return VK_SAMPLER_MIPMAP_MODE_NEAREST
;
81 case PIPE_TEX_MIPFILTER_LINEAR
: return VK_SAMPLER_MIPMAP_MODE_LINEAR
;
82 case PIPE_TEX_MIPFILTER_NONE
:
83 unreachable("PIPE_TEX_MIPFILTER_NONE should be dealt with earlier");
85 unreachable("unexpected filter");
88 static VkSamplerAddressMode
89 sampler_address_mode(enum pipe_tex_wrap filter
)
92 case PIPE_TEX_WRAP_REPEAT
: return VK_SAMPLER_ADDRESS_MODE_REPEAT
;
93 case PIPE_TEX_WRAP_CLAMP
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
94 case PIPE_TEX_WRAP_CLAMP_TO_EDGE
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
;
95 case PIPE_TEX_WRAP_CLAMP_TO_BORDER
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
;
96 case PIPE_TEX_WRAP_MIRROR_REPEAT
: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT
;
97 case PIPE_TEX_WRAP_MIRROR_CLAMP
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
98 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
;
99 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
101 unreachable("unexpected wrap");
105 zink_create_sampler_state(struct pipe_context
*pctx
,
106 const struct pipe_sampler_state
*state
)
108 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
110 VkSamplerCreateInfo sci
= {};
111 sci
.sType
= VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
;
112 sci
.magFilter
= filter(state
->mag_img_filter
);
113 sci
.minFilter
= filter(state
->min_img_filter
);
115 if (state
->min_mip_filter
!= PIPE_TEX_MIPFILTER_NONE
) {
116 sci
.mipmapMode
= sampler_mipmap_mode(state
->min_mip_filter
);
117 sci
.minLod
= state
->min_lod
;
118 sci
.maxLod
= state
->max_lod
;
120 sci
.mipmapMode
= VK_SAMPLER_MIPMAP_MODE_NEAREST
;
125 sci
.addressModeU
= sampler_address_mode(state
->wrap_s
);
126 sci
.addressModeV
= sampler_address_mode(state
->wrap_t
);
127 sci
.addressModeW
= sampler_address_mode(state
->wrap_r
);
128 sci
.mipLodBias
= state
->lod_bias
;
129 sci
.compareOp
= VK_COMPARE_OP_NEVER
; // TODO
130 sci
.borderColor
= VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK
; // TODO
132 if (state
->max_anisotropy
> 1) {
133 sci
.maxAnisotropy
= state
->max_anisotropy
;
134 sci
.anisotropyEnable
= VK_TRUE
;
138 VkResult err
= vkCreateSampler(screen
->dev
, &sci
, NULL
, &sampler
);
139 if (err
!= VK_SUCCESS
)
146 zink_bind_sampler_states(struct pipe_context
*pctx
,
147 enum pipe_shader_type shader
,
149 unsigned num_samplers
,
152 struct zink_context
*ctx
= zink_context(pctx
);
153 for (unsigned i
= 0; i
< num_samplers
; ++i
)
154 ctx
->samplers
[shader
][start_slot
+ i
] = (VkSampler
)samplers
[i
];
158 zink_delete_sampler_state(struct pipe_context
*pctx
,
161 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
162 vkDestroySampler(screen
->dev
, sampler_state
, NULL
);
166 static VkImageViewType
167 image_view_type(enum pipe_texture_target target
)
170 case PIPE_TEXTURE_1D
: return VK_IMAGE_VIEW_TYPE_1D
;
171 case PIPE_TEXTURE_1D_ARRAY
: return VK_IMAGE_VIEW_TYPE_1D_ARRAY
;
172 case PIPE_TEXTURE_2D
: return VK_IMAGE_VIEW_TYPE_2D
;
173 case PIPE_TEXTURE_2D_ARRAY
: return VK_IMAGE_VIEW_TYPE_2D_ARRAY
;
174 case PIPE_TEXTURE_CUBE
: return VK_IMAGE_VIEW_TYPE_CUBE
;
175 case PIPE_TEXTURE_CUBE_ARRAY
: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
;
176 case PIPE_TEXTURE_3D
: return VK_IMAGE_VIEW_TYPE_3D
;
177 case PIPE_TEXTURE_RECT
: return VK_IMAGE_VIEW_TYPE_2D
; /* not sure */
179 unreachable("unexpected target");
183 static VkComponentSwizzle
184 component_mapping(enum pipe_swizzle swizzle
)
187 case PIPE_SWIZZLE_X
: return VK_COMPONENT_SWIZZLE_R
;
188 case PIPE_SWIZZLE_Y
: return VK_COMPONENT_SWIZZLE_G
;
189 case PIPE_SWIZZLE_Z
: return VK_COMPONENT_SWIZZLE_B
;
190 case PIPE_SWIZZLE_W
: return VK_COMPONENT_SWIZZLE_A
;
191 case PIPE_SWIZZLE_0
: return VK_COMPONENT_SWIZZLE_ZERO
;
192 case PIPE_SWIZZLE_1
: return VK_COMPONENT_SWIZZLE_ONE
;
193 case PIPE_SWIZZLE_NONE
: return VK_COMPONENT_SWIZZLE_IDENTITY
; // ???
195 unreachable("unexpected swizzle");
199 static struct pipe_sampler_view
*
200 zink_create_sampler_view(struct pipe_context
*pctx
, struct pipe_resource
*pres
,
201 const struct pipe_sampler_view
*state
)
203 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
204 struct zink_resource
*res
= zink_resource(pres
);
205 struct zink_sampler_view
*sampler_view
= CALLOC_STRUCT(zink_sampler_view
);
207 sampler_view
->base
= *state
;
208 sampler_view
->base
.texture
= NULL
;
209 pipe_resource_reference(&sampler_view
->base
.texture
, pres
);
210 sampler_view
->base
.reference
.count
= 1;
211 sampler_view
->base
.context
= pctx
;
213 VkImageViewCreateInfo ivci
= {};
214 ivci
.sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
;
215 ivci
.image
= res
->image
;
216 ivci
.viewType
= image_view_type(state
->target
);
217 ivci
.format
= zink_get_format(state
->format
);
218 ivci
.components
.r
= component_mapping(state
->swizzle_r
);
219 ivci
.components
.g
= component_mapping(state
->swizzle_g
);
220 ivci
.components
.b
= component_mapping(state
->swizzle_b
);
221 ivci
.components
.a
= component_mapping(state
->swizzle_a
);
222 ivci
.subresourceRange
.aspectMask
= zink_aspect_from_format(state
->format
);
223 ivci
.subresourceRange
.baseMipLevel
= state
->u
.tex
.first_level
;
224 ivci
.subresourceRange
.baseArrayLayer
= state
->u
.tex
.first_layer
;
225 ivci
.subresourceRange
.levelCount
= state
->u
.tex
.last_level
- state
->u
.tex
.first_level
+ 1;
226 ivci
.subresourceRange
.layerCount
= state
->u
.tex
.last_layer
- state
->u
.tex
.first_layer
+ 1;
228 VkResult err
= vkCreateImageView(screen
->dev
, &ivci
, NULL
, &sampler_view
->image_view
);
229 if (err
!= VK_SUCCESS
) {
234 return &sampler_view
->base
;
238 zink_destroy_sampler_view(struct pipe_context
*pctx
,
239 struct pipe_sampler_view
*view
)
245 zink_create_vs_state(struct pipe_context
*pctx
,
246 const struct pipe_shader_state
*shader
)
248 struct nir_shader
*nir
;
249 if (shader
->type
!= PIPE_SHADER_IR_NIR
)
250 nir
= zink_tgsi_to_nir(pctx
->screen
, shader
->tokens
);
252 nir
= (struct nir_shader
*)shader
->ir
.nir
;
254 return zink_compile_nir(zink_screen(pctx
->screen
), nir
);
258 zink_bind_vs_state(struct pipe_context
*pctx
,
261 struct zink_context
*ctx
= zink_context(pctx
);
262 ctx
->gfx_stages
[PIPE_SHADER_VERTEX
] = cso
;
266 zink_delete_vs_state(struct pipe_context
*pctx
,
269 zink_shader_free(zink_screen(pctx
->screen
), cso
);
273 zink_create_fs_state(struct pipe_context
*pctx
,
274 const struct pipe_shader_state
*shader
)
276 struct nir_shader
*nir
;
277 if (shader
->type
!= PIPE_SHADER_IR_NIR
)
278 nir
= zink_tgsi_to_nir(pctx
->screen
, shader
->tokens
);
280 nir
= (struct nir_shader
*)shader
->ir
.nir
;
282 return zink_compile_nir(zink_screen(pctx
->screen
), nir
);
286 zink_bind_fs_state(struct pipe_context
*pctx
,
289 struct zink_context
*ctx
= zink_context(pctx
);
290 ctx
->gfx_stages
[PIPE_SHADER_FRAGMENT
] = cso
;
294 zink_delete_fs_state(struct pipe_context
*pctx
,
297 zink_shader_free(zink_screen(pctx
->screen
), cso
);
301 zink_set_polygon_stipple(struct pipe_context
*pctx
,
302 const struct pipe_poly_stipple
*ps
)
307 zink_set_vertex_buffers(struct pipe_context
*pctx
,
309 unsigned num_buffers
,
310 const struct pipe_vertex_buffer
*buffers
)
312 struct zink_context
*ctx
= zink_context(pctx
);
315 for (int i
= 0; i
< num_buffers
; ++i
) {
316 const struct pipe_vertex_buffer
*vb
= buffers
+ i
;
317 ctx
->gfx_pipeline_state
.bindings
[start_slot
+ i
].stride
= vb
->stride
;
321 util_set_vertex_buffers_mask(ctx
->buffers
, &ctx
->buffers_enabled_mask
,
322 buffers
, start_slot
, num_buffers
);
326 zink_set_viewport_states(struct pipe_context
*pctx
,
328 unsigned num_viewports
,
329 const struct pipe_viewport_state
*state
)
331 struct zink_context
*ctx
= zink_context(pctx
);
333 for (unsigned i
= 0; i
< num_viewports
; ++i
) {
334 VkViewport viewport
= {
335 state
[i
].translate
[0] - state
[i
].scale
[0],
336 state
[i
].translate
[1] - state
[i
].scale
[1],
337 state
[i
].scale
[0] * 2,
338 state
[i
].scale
[1] * 2,
339 state
[i
].translate
[2] - state
[i
].scale
[2],
340 state
[i
].translate
[2] + state
[i
].scale
[2]
342 ctx
->viewports
[start_slot
+ i
] = viewport
;
344 ctx
->num_viewports
= start_slot
+ num_viewports
;
348 zink_set_scissor_states(struct pipe_context
*pctx
,
349 unsigned start_slot
, unsigned num_scissors
,
350 const struct pipe_scissor_state
*states
)
352 struct zink_context
*ctx
= zink_context(pctx
);
354 for (unsigned i
= 0; i
< num_scissors
; i
++) {
357 scissor
.offset
.x
= states
[i
].minx
;
358 scissor
.offset
.y
= states
[i
].miny
;
359 scissor
.extent
.width
= states
[i
].maxx
- states
[i
].minx
;
360 scissor
.extent
.height
= states
[i
].maxy
- states
[i
].miny
;
361 ctx
->scissors
[start_slot
+ i
] = scissor
;
363 ctx
->num_scissors
= start_slot
+ num_scissors
;
367 zink_set_constant_buffer(struct pipe_context
*pctx
,
368 enum pipe_shader_type shader
, uint index
,
369 const struct pipe_constant_buffer
*cb
)
371 struct zink_context
*ctx
= zink_context(pctx
);
374 struct pipe_resource
*buffer
= cb
->buffer
;
375 unsigned offset
= cb
->buffer_offset
;
377 u_upload_data(ctx
->base
.const_uploader
, 0, cb
->buffer_size
, 64,
378 cb
->user_buffer
, &offset
, &buffer
);
380 pipe_resource_reference(&ctx
->ubos
[shader
][index
].buffer
, buffer
);
381 ctx
->ubos
[shader
][index
].buffer_offset
= offset
;
382 ctx
->ubos
[shader
][index
].buffer_size
= cb
->buffer_size
;
383 ctx
->ubos
[shader
][index
].user_buffer
= NULL
;
386 pipe_resource_reference(&buffer
, NULL
);
388 pipe_resource_reference(&ctx
->ubos
[shader
][index
].buffer
, NULL
);
389 ctx
->ubos
[shader
][index
].buffer_offset
= 0;
390 ctx
->ubos
[shader
][index
].buffer_size
= 0;
391 ctx
->ubos
[shader
][index
].user_buffer
= NULL
;
396 zink_set_sampler_views(struct pipe_context
*pctx
,
397 enum pipe_shader_type shader_type
,
400 struct pipe_sampler_view
**views
)
402 struct zink_context
*ctx
= zink_context(pctx
);
404 for (unsigned i
= 0; i
< num_views
; ++i
) {
405 pipe_sampler_view_reference(
406 &ctx
->image_views
[shader_type
][start_slot
+ i
],
412 zink_set_stencil_ref(struct pipe_context
*pctx
,
413 const struct pipe_stencil_ref
*ref
)
415 struct zink_context
*ctx
= zink_context(pctx
);
416 ctx
->stencil_ref
[0] = ref
->ref_value
[0];
417 ctx
->stencil_ref
[1] = ref
->ref_value
[1];
421 zink_set_clip_state(struct pipe_context
*pctx
,
422 const struct pipe_clip_state
*pcs
)
426 static struct zink_render_pass
*
427 get_render_pass(struct zink_context
*ctx
,
428 const struct pipe_framebuffer_state
*fb
)
430 struct zink_render_pass_state state
;
432 for (int i
= 0; i
< fb
->nr_cbufs
; i
++) {
433 struct zink_resource
*cbuf
= zink_resource(fb
->cbufs
[i
]->texture
);
434 state
.rts
[i
].format
= cbuf
->format
;
436 state
.num_cbufs
= fb
->nr_cbufs
;
439 struct zink_resource
*zsbuf
= zink_resource(fb
->zsbuf
->texture
);
440 state
.rts
[fb
->nr_cbufs
].format
= zsbuf
->format
;
442 state
.have_zsbuf
= fb
->zsbuf
!= NULL
;
444 // TODO: cache instead!
445 return zink_create_render_pass(zink_screen(ctx
->base
.screen
), &state
);
448 static struct zink_framebuffer
*
449 get_framebuffer(struct zink_context
*ctx
,
450 const struct pipe_framebuffer_state
*fb
,
451 struct zink_render_pass
*rp
)
454 return zink_create_framebuffer(zink_screen(ctx
->base
.screen
), fb
, rp
);
458 zink_set_framebuffer_state(struct pipe_context
*pctx
,
459 const struct pipe_framebuffer_state
*state
)
461 struct zink_context
*ctx
= zink_context(pctx
);
462 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
464 struct zink_render_pass
*rp
= get_render_pass(ctx
, state
);
465 zink_render_pass_reference(screen
, &ctx
->render_pass
, rp
);
467 struct zink_framebuffer
*fb
= get_framebuffer(ctx
, state
, rp
);
468 zink_framebuffer_reference(screen
, &ctx
->framebuffer
, fb
);
469 zink_framebuffer_reference(screen
, &fb
, NULL
);
470 zink_render_pass_reference(screen
, &rp
, NULL
);
472 ctx
->gfx_pipeline_state
.num_attachments
= state
->nr_cbufs
;
474 util_copy_framebuffer_state(&ctx
->fb_state
, state
);
476 struct zink_cmdbuf
*cmdbuf
= zink_start_cmdbuf(ctx
);
480 for (int i
= 0; i
< state
->nr_cbufs
; i
++) {
481 struct zink_resource
*res
= zink_resource(state
->cbufs
[i
]->texture
);
482 if (res
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
483 res
->layout
!= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
)
484 zink_resource_barrier(cmdbuf
->cmdbuf
, res
, res
->aspect
,
485 VK_IMAGE_LAYOUT_GENERAL
);
489 struct zink_resource
*res
= zink_resource(state
->zsbuf
->texture
);
490 if (res
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
491 res
->layout
!= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
)
492 zink_resource_barrier(cmdbuf
->cmdbuf
, res
, res
->aspect
,
493 VK_IMAGE_LAYOUT_GENERAL
);
496 zink_end_cmdbuf(ctx
, cmdbuf
);
500 zink_set_active_query_state(struct pipe_context
*pctx
, bool enable
)
505 zink_set_blend_color(struct pipe_context
*pctx
,
506 const struct pipe_blend_color
*color
)
508 struct zink_context
*ctx
= zink_context(pctx
);
509 memcpy(ctx
->blend_constants
, color
->color
, sizeof(float) * 4);
513 access_flags(VkImageLayout layout
)
516 case VK_IMAGE_LAYOUT_UNDEFINED
:
517 case VK_IMAGE_LAYOUT_GENERAL
:
520 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
:
521 return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
;
522 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
:
523 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
;
525 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
:
526 return VK_ACCESS_SHADER_READ_BIT
;
528 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
:
529 return VK_ACCESS_TRANSFER_READ_BIT
;
531 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
:
532 return VK_ACCESS_TRANSFER_WRITE_BIT
;
534 case VK_IMAGE_LAYOUT_PREINITIALIZED
:
535 return VK_ACCESS_HOST_WRITE_BIT
;
538 unreachable("unexpected layout");
543 zink_resource_barrier(VkCommandBuffer cmdbuf
, struct zink_resource
*res
,
544 VkImageAspectFlags aspect
, VkImageLayout new_layout
)
546 VkImageSubresourceRange isr
= {
548 0, VK_REMAINING_MIP_LEVELS
,
549 0, VK_REMAINING_ARRAY_LAYERS
552 VkImageMemoryBarrier imb
= {
553 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER
,
555 access_flags(res
->layout
),
556 access_flags(new_layout
),
559 VK_QUEUE_FAMILY_IGNORED
,
560 VK_QUEUE_FAMILY_IGNORED
,
564 vkCmdPipelineBarrier(
566 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
,
567 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
,
574 res
->layout
= new_layout
;
578 zink_clear(struct pipe_context
*pctx
,
580 const union pipe_color_union
*pcolor
,
581 double depth
, unsigned stencil
)
583 struct zink_context
*ctx
= zink_context(pctx
);
584 struct pipe_framebuffer_state
*fb
= &ctx
->fb_state
;
586 struct zink_cmdbuf
*cmdbuf
= zink_start_cmdbuf(ctx
);
590 // first transition all images to a compatible layout
591 if (buffers
& PIPE_CLEAR_COLOR
) {
592 for (unsigned i
= 0; i
< fb
->nr_cbufs
; i
++) {
593 if (!(buffers
& (PIPE_CLEAR_COLOR0
<< i
)) || !fb
->cbufs
[i
])
596 struct zink_resource
*cbuf
= zink_resource(fb
->cbufs
[i
]->texture
);
598 if (cbuf
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
599 cbuf
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
)
600 zink_resource_barrier(cmdbuf
->cmdbuf
, cbuf
, cbuf
->aspect
,
601 VK_IMAGE_LAYOUT_GENERAL
);
605 VkImageAspectFlags depthStencilAspect
= 0;
606 if (buffers
& PIPE_CLEAR_DEPTHSTENCIL
&& fb
->zsbuf
) {
607 struct zink_resource
*zsbuf
= zink_resource(fb
->zsbuf
->texture
);
608 if (buffers
& PIPE_CLEAR_DEPTH
)
609 depthStencilAspect
|= VK_IMAGE_ASPECT_DEPTH_BIT
;
610 if (buffers
& PIPE_CLEAR_STENCIL
)
611 depthStencilAspect
|= VK_IMAGE_ASPECT_STENCIL_BIT
;
613 if (zsbuf
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
614 zsbuf
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
)
615 zink_resource_barrier(cmdbuf
->cmdbuf
, zsbuf
, depthStencilAspect
,
616 VK_IMAGE_LAYOUT_GENERAL
);
619 VkClearColorValue color
;
620 color
.float32
[0] = pcolor
->f
[0];
621 color
.float32
[1] = pcolor
->f
[1];
622 color
.float32
[2] = pcolor
->f
[2];
623 color
.float32
[3] = pcolor
->f
[3];
625 if (buffers
& PIPE_CLEAR_COLOR
) {
626 for (unsigned i
= 0; i
< fb
->nr_cbufs
; i
++) {
627 if (!(buffers
& (PIPE_CLEAR_COLOR0
<< i
)) || !fb
->cbufs
[i
])
630 struct zink_resource
*cbuf
= zink_resource(fb
->cbufs
[i
]->texture
);
632 VkImageSubresourceRange range
;
633 range
.aspectMask
= cbuf
->aspect
;
634 range
.baseMipLevel
= 0;
635 range
.levelCount
= VK_REMAINING_MIP_LEVELS
;
636 range
.baseArrayLayer
= 0;
637 range
.layerCount
= VK_REMAINING_ARRAY_LAYERS
;
638 vkCmdClearColorImage(cmdbuf
->cmdbuf
,
639 cbuf
->image
, VK_IMAGE_LAYOUT_GENERAL
,
645 if (depthStencilAspect
) {
646 struct zink_resource
*zsbuf
= zink_resource(fb
->zsbuf
->texture
);
648 VkClearDepthStencilValue zsvalue
= { depth
, stencil
};
650 VkImageSubresourceRange range
;
651 range
.aspectMask
= depthStencilAspect
;
652 range
.baseMipLevel
= 0;
653 range
.levelCount
= VK_REMAINING_MIP_LEVELS
;
654 range
.baseArrayLayer
= 0;
655 range
.layerCount
= VK_REMAINING_ARRAY_LAYERS
;
657 vkCmdClearDepthStencilImage(cmdbuf
->cmdbuf
,
658 zsbuf
->image
, VK_IMAGE_LAYOUT_GENERAL
,
663 zink_end_cmdbuf(ctx
, cmdbuf
);
666 VkShaderStageFlagBits
667 zink_shader_stage(enum pipe_shader_type type
)
669 VkShaderStageFlagBits stages
[] = {
670 [PIPE_SHADER_VERTEX
] = VK_SHADER_STAGE_VERTEX_BIT
,
671 [PIPE_SHADER_FRAGMENT
] = VK_SHADER_STAGE_FRAGMENT_BIT
,
672 [PIPE_SHADER_GEOMETRY
] = VK_SHADER_STAGE_GEOMETRY_BIT
,
673 [PIPE_SHADER_TESS_CTRL
] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT
,
674 [PIPE_SHADER_TESS_EVAL
] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT
,
675 [PIPE_SHADER_COMPUTE
] = VK_SHADER_STAGE_COMPUTE_BIT
,
680 static VkDescriptorSet
681 allocate_descriptor_set(struct zink_context
*ctx
, VkDescriptorSetLayout dsl
)
683 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
684 VkDescriptorSetAllocateInfo dsai
;
685 memset((void *)&dsai
, 0, sizeof(dsai
));
686 dsai
.sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
;
688 dsai
.descriptorPool
= ctx
->descpool
;
689 dsai
.descriptorSetCount
= 1;
690 dsai
.pSetLayouts
= &dsl
;
692 VkDescriptorSet desc_set
;
693 if (vkAllocateDescriptorSets(screen
->dev
, &dsai
, &desc_set
) != VK_SUCCESS
) {
694 if (vkResetDescriptorPool(screen
->dev
, ctx
->descpool
, 0) != VK_SUCCESS
) {
695 fprintf(stderr
, "vkResetDescriptorPool failed\n");
696 return VK_NULL_HANDLE
;
698 if (vkAllocateDescriptorSets(screen
->dev
, &dsai
, &desc_set
) != VK_SUCCESS
) {
699 fprintf(stderr
, "vkAllocateDescriptorSets failed\n");
700 return VK_NULL_HANDLE
;
707 static VkPrimitiveTopology
708 zink_primitive_topology(enum pipe_prim_type mode
)
711 case PIPE_PRIM_POINTS
:
712 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST
;
714 case PIPE_PRIM_LINES
:
715 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST
;
717 case PIPE_PRIM_LINE_STRIP
:
718 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP
;
720 case PIPE_PRIM_TRIANGLES
:
721 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST
;
723 case PIPE_PRIM_TRIANGLE_STRIP
:
724 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
;
726 case PIPE_PRIM_TRIANGLE_FAN
:
727 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN
;
730 unreachable("unexpected enum pipe_prim_type");
735 zink_bind_vertex_buffers(VkCommandBuffer cmdbuf
, struct zink_context
*ctx
)
737 VkBuffer buffers
[PIPE_MAX_ATTRIBS
];
738 VkDeviceSize buffer_offsets
[PIPE_MAX_ATTRIBS
];
739 struct zink_vertex_elements_state
*elems
= ctx
->gfx_pipeline_state
.element_state
;
740 for (unsigned i
= 0; i
< elems
->num_bindings
; i
++) {
741 struct pipe_vertex_buffer
*vb
= ctx
->buffers
+ elems
->binding_map
[i
];
742 assert(vb
&& vb
->buffer
.resource
);
743 struct zink_resource
*res
= zink_resource(vb
->buffer
.resource
);
744 buffers
[i
] = res
->buffer
;
745 buffer_offsets
[i
] = vb
->buffer_offset
;
748 if (elems
->num_bindings
> 0)
749 vkCmdBindVertexBuffers(cmdbuf
, 0, elems
->num_bindings
, buffers
, buffer_offsets
);
753 begin_render_pass(struct zink_cmdbuf
*cmdbuf
, struct zink_render_pass
*rp
,
754 struct zink_framebuffer
*fb
, unsigned width
, unsigned height
)
756 VkRenderPassBeginInfo rpbi
= {};
757 rpbi
.sType
= VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
;
758 rpbi
.renderPass
= rp
->render_pass
;
759 rpbi
.renderArea
.offset
.x
= 0;
760 rpbi
.renderArea
.offset
.y
= 0;
761 rpbi
.renderArea
.extent
.width
= width
;
762 rpbi
.renderArea
.extent
.height
= height
;
763 rpbi
.clearValueCount
= 0;
764 rpbi
.pClearValues
= NULL
;
765 rpbi
.framebuffer
= fb
->fb
;
767 vkCmdBeginRenderPass(cmdbuf
->cmdbuf
, &rpbi
, VK_SUBPASS_CONTENTS_INLINE
);
771 zink_draw_vbo(struct pipe_context
*pctx
,
772 const struct pipe_draw_info
*dinfo
)
774 struct zink_context
*ctx
= zink_context(pctx
);
775 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
776 struct zink_rasterizer_state
*rast_state
= ctx
->gfx_pipeline_state
.rast_state
;
778 if (dinfo
->mode
>= PIPE_PRIM_QUADS
||
779 dinfo
->mode
== PIPE_PRIM_LINE_LOOP
) {
780 if (!u_trim_pipe_prim(dinfo
->mode
, (unsigned *)&dinfo
->count
))
783 util_primconvert_save_rasterizer_state(ctx
->primconvert
, &rast_state
->base
);
784 util_primconvert_draw_vbo(ctx
->primconvert
, dinfo
);
788 struct zink_gfx_program
*gfx_program
= zink_create_gfx_program(screen
->dev
,
793 ctx
->gfx_pipeline_state
.primitive_topology
= zink_primitive_topology(dinfo
->mode
);
795 VkPipeline pipeline
= zink_create_gfx_pipeline(screen
->dev
,
797 &ctx
->gfx_pipeline_state
,
798 ctx
->render_pass
->render_pass
);
800 bool depth_bias
= false;
801 switch (u_reduced_prim(dinfo
->mode
)) {
802 case PIPE_PRIM_POINTS
:
803 depth_bias
= rast_state
->offset_point
;
806 case PIPE_PRIM_LINES
:
807 depth_bias
= rast_state
->offset_line
;
810 case PIPE_PRIM_TRIANGLES
:
811 depth_bias
= rast_state
->offset_tri
;
815 unreachable("unexpected reduced prim");
818 unsigned index_offset
= 0;
819 struct pipe_resource
*index_buffer
= NULL
;
820 if (dinfo
->index_size
> 0) {
821 if (dinfo
->has_user_indices
) {
822 if (!util_upload_index_buffer(pctx
, dinfo
, &index_buffer
, &index_offset
)) {
823 debug_printf("util_upload_index_buffer() failed\n");
827 index_buffer
= dinfo
->index
.resource
;
830 struct zink_cmdbuf
*cmdbuf
= zink_start_cmdbuf(ctx
);
834 begin_render_pass(cmdbuf
, ctx
->render_pass
, ctx
->framebuffer
,
835 ctx
->fb_state
.width
, ctx
->fb_state
.height
);
837 vkCmdSetViewport(cmdbuf
->cmdbuf
, 0, ctx
->num_viewports
, ctx
->viewports
);
839 if (ctx
->num_scissors
)
840 vkCmdSetScissor(cmdbuf
->cmdbuf
, 0, ctx
->num_scissors
, ctx
->scissors
);
841 else if (ctx
->fb_state
.width
&& ctx
->fb_state
.height
) {
842 VkRect2D fb_scissor
= {};
843 fb_scissor
.extent
.width
= ctx
->fb_state
.width
;
844 fb_scissor
.extent
.height
= ctx
->fb_state
.height
;
845 vkCmdSetScissor(cmdbuf
->cmdbuf
, 0, 1, &fb_scissor
);
848 vkCmdSetStencilReference(cmdbuf
->cmdbuf
, VK_STENCIL_FACE_FRONT_BIT
, ctx
->stencil_ref
[0]);
849 vkCmdSetStencilReference(cmdbuf
->cmdbuf
, VK_STENCIL_FACE_BACK_BIT
, ctx
->stencil_ref
[1]);
852 vkCmdSetDepthBias(cmdbuf
->cmdbuf
, rast_state
->offset_units
, rast_state
->offset_clamp
, rast_state
->offset_scale
);
854 vkCmdSetDepthBias(cmdbuf
->cmdbuf
, 0.0f
, 0.0f
, 0.0f
);
856 if (ctx
->gfx_pipeline_state
.blend_state
->need_blend_constants
)
857 vkCmdSetBlendConstants(cmdbuf
->cmdbuf
, ctx
->blend_constants
);
859 VkDescriptorSet desc_set
= allocate_descriptor_set(ctx
, gfx_program
->dsl
);
861 VkWriteDescriptorSet wds
[PIPE_SHADER_TYPES
* PIPE_MAX_CONSTANT_BUFFERS
+ PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
862 VkDescriptorBufferInfo buffer_infos
[PIPE_SHADER_TYPES
* PIPE_MAX_CONSTANT_BUFFERS
];
863 VkDescriptorImageInfo image_infos
[PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
864 int num_wds
= 0, num_buffer_info
= 0, num_image_info
= 0;
866 for (int i
= 0; i
< ARRAY_SIZE(ctx
->gfx_stages
); i
++) {
867 struct zink_shader
*shader
= ctx
->gfx_stages
[i
];
871 for (int j
= 0; j
< shader
->num_bindings
; j
++) {
872 int index
= shader
->bindings
[j
].index
;
873 if (shader
->bindings
[j
].type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
) {
874 assert(ctx
->ubos
[i
][index
].buffer_size
> 0);
875 assert(ctx
->ubos
[i
][index
].buffer
);
876 buffer_infos
[num_buffer_info
].buffer
= zink_resource(ctx
->ubos
[i
][index
].buffer
)->buffer
;
877 buffer_infos
[num_buffer_info
].offset
= ctx
->ubos
[i
][index
].buffer_offset
;
878 buffer_infos
[num_buffer_info
].range
= VK_WHOLE_SIZE
;
879 wds
[num_wds
].pBufferInfo
= buffer_infos
+ num_buffer_info
;
882 struct pipe_sampler_view
*psampler_view
= ctx
->image_views
[i
][index
];
883 assert(psampler_view
);
884 struct zink_sampler_view
*sampler_view
= (struct zink_sampler_view
*)psampler_view
;
885 struct zink_resource
*resource
= zink_resource(psampler_view
->texture
);
886 image_infos
[num_image_info
].imageLayout
= resource
->layout
;
887 image_infos
[num_image_info
].imageView
= sampler_view
->image_view
;
888 image_infos
[num_image_info
].sampler
= ctx
->samplers
[i
][index
];
889 wds
[num_wds
].pImageInfo
= image_infos
+ num_image_info
;
893 wds
[num_wds
].sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
;
894 wds
[num_wds
].pNext
= NULL
;
895 wds
[num_wds
].dstSet
= desc_set
;
896 wds
[num_wds
].dstBinding
= shader
->bindings
[j
].binding
;
897 wds
[num_wds
].dstArrayElement
= 0;
898 wds
[num_wds
].descriptorCount
= 1;
899 wds
[num_wds
].descriptorType
= shader
->bindings
[j
].type
;
904 vkUpdateDescriptorSets(screen
->dev
, num_wds
, wds
, 0, NULL
);
906 vkCmdBindPipeline(cmdbuf
->cmdbuf
, VK_PIPELINE_BIND_POINT_GRAPHICS
, pipeline
);
907 vkCmdBindDescriptorSets(cmdbuf
->cmdbuf
, VK_PIPELINE_BIND_POINT_GRAPHICS
,
908 gfx_program
->layout
, 0, 1, &desc_set
, 0, NULL
);
909 zink_bind_vertex_buffers(cmdbuf
->cmdbuf
, ctx
);
911 if (dinfo
->index_size
> 0) {
912 assert(dinfo
->index_size
!= 1);
913 VkIndexType index_type
= dinfo
->index_size
== 2 ? VK_INDEX_TYPE_UINT16
: VK_INDEX_TYPE_UINT32
;
914 vkCmdBindIndexBuffer(cmdbuf
->cmdbuf
, zink_resource(index_buffer
)->buffer
, index_offset
, index_type
);
915 vkCmdDrawIndexed(cmdbuf
->cmdbuf
,
916 dinfo
->count
, dinfo
->instance_count
,
917 dinfo
->start
, dinfo
->index_bias
, dinfo
->start_instance
);
919 vkCmdDraw(cmdbuf
->cmdbuf
, dinfo
->count
, dinfo
->instance_count
, dinfo
->start
, dinfo
->start_instance
);
921 vkCmdEndRenderPass(cmdbuf
->cmdbuf
);
923 zink_end_cmdbuf(ctx
, cmdbuf
);
925 vkDestroyPipeline(screen
->dev
, pipeline
, NULL
);
927 if (dinfo
->index_size
> 0 && dinfo
->has_user_indices
)
928 pipe_resource_reference(&index_buffer
, NULL
);
932 zink_flush(struct pipe_context
*pctx
,
933 struct pipe_fence_handle
**pfence
,
934 enum pipe_flush_flags flags
)
939 zink_blit(struct pipe_context
*pctx
,
940 const struct pipe_blit_info
*info
)
942 struct zink_context
*ctx
= zink_context(pctx
);
943 bool is_resolve
= false;
944 if (info
->mask
!= PIPE_MASK_RGBA
||
945 info
->scissor_enable
||
947 if (!util_blitter_is_blit_supported(ctx
->blitter
, info
)) {
948 debug_printf("blit unsupported %s -> %s\n",
949 util_format_short_name(info
->src
.resource
->format
),
950 util_format_short_name(info
->dst
.resource
->format
));
954 util_blitter_save_fragment_constant_buffer_slot(ctx
->blitter
, ctx
->ubos
[PIPE_SHADER_FRAGMENT
]);
955 util_blitter_save_vertex_buffer_slot(ctx
->blitter
, ctx
->buffers
);
956 util_blitter_save_vertex_shader(ctx
->blitter
, ctx
->gfx_stages
[PIPE_SHADER_VERTEX
]);
957 util_blitter_save_fragment_shader(ctx
->blitter
, ctx
->gfx_stages
[PIPE_SHADER_FRAGMENT
]);
958 util_blitter_save_rasterizer(ctx
->blitter
, ctx
->gfx_pipeline_state
.rast_state
);
960 util_blitter_blit(ctx
->blitter
, info
);
963 struct zink_resource
*src
= zink_resource(info
->src
.resource
);
964 struct zink_resource
*dst
= zink_resource(info
->dst
.resource
);
966 if (src
->base
.nr_samples
> 1 && dst
->base
.nr_samples
<= 1)
969 struct zink_cmdbuf
*cmdbuf
= zink_start_cmdbuf(ctx
);
974 VkImageResolve region
= {};
976 region
.srcSubresource
.aspectMask
= src
->aspect
;
977 region
.srcSubresource
.mipLevel
= info
->src
.level
;
978 region
.srcSubresource
.baseArrayLayer
= 0; // no clue
979 region
.srcSubresource
.layerCount
= 1; // no clue
980 region
.srcOffset
.x
= info
->src
.box
.x
;
981 region
.srcOffset
.y
= info
->src
.box
.y
;
982 region
.srcOffset
.z
= info
->src
.box
.z
;
984 region
.dstSubresource
.aspectMask
= dst
->aspect
;
985 region
.dstSubresource
.mipLevel
= info
->dst
.level
;
986 region
.dstSubresource
.baseArrayLayer
= 0; // no clue
987 region
.dstSubresource
.layerCount
= 1; // no clue
988 region
.dstOffset
.x
= info
->dst
.box
.x
;
989 region
.dstOffset
.y
= info
->dst
.box
.y
;
990 region
.dstOffset
.z
= info
->dst
.box
.z
;
992 region
.extent
.width
= info
->dst
.box
.width
;
993 region
.extent
.height
= info
->dst
.box
.height
;
994 region
.extent
.depth
= info
->dst
.box
.depth
;
995 vkCmdResolveImage(cmdbuf
->cmdbuf
, src
->image
, src
->layout
,
996 dst
->image
, dst
->layout
,
1000 if (dst
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
1001 dst
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
)
1002 zink_resource_barrier(cmdbuf
->cmdbuf
, dst
, dst
->aspect
,
1003 VK_IMAGE_LAYOUT_GENERAL
);
1005 VkImageBlit region
= {};
1006 region
.srcSubresource
.aspectMask
= src
->aspect
;
1007 region
.srcSubresource
.mipLevel
= info
->src
.level
;
1008 region
.srcOffsets
[0].x
= info
->src
.box
.x
;
1009 region
.srcOffsets
[0].y
= info
->src
.box
.y
;
1010 region
.srcOffsets
[1].x
= info
->src
.box
.x
+ info
->src
.box
.width
;
1011 region
.srcOffsets
[1].y
= info
->src
.box
.y
+ info
->src
.box
.height
;
1013 if (src
->base
.array_size
> 1) {
1014 region
.srcOffsets
[0].z
= 0;
1015 region
.srcOffsets
[1].z
= 1;
1016 region
.srcSubresource
.baseArrayLayer
= info
->src
.box
.z
;
1017 region
.srcSubresource
.layerCount
= info
->src
.box
.depth
;
1019 region
.srcOffsets
[0].z
= info
->src
.box
.z
;
1020 region
.srcOffsets
[1].z
= info
->src
.box
.z
+ info
->src
.box
.depth
;
1021 region
.srcSubresource
.baseArrayLayer
= 0;
1022 region
.srcSubresource
.layerCount
= 1;
1025 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1026 region
.dstSubresource
.mipLevel
= info
->dst
.level
;
1027 region
.dstOffsets
[0].x
= info
->dst
.box
.x
;
1028 region
.dstOffsets
[0].y
= info
->dst
.box
.y
;
1029 region
.dstOffsets
[1].x
= info
->dst
.box
.x
+ info
->dst
.box
.width
;
1030 region
.dstOffsets
[1].y
= info
->dst
.box
.y
+ info
->dst
.box
.height
;
1032 if (dst
->base
.array_size
> 1) {
1033 region
.dstOffsets
[0].z
= 0;
1034 region
.dstOffsets
[1].z
= 1;
1035 region
.dstSubresource
.baseArrayLayer
= info
->dst
.box
.z
;
1036 region
.dstSubresource
.layerCount
= info
->dst
.box
.depth
;
1038 region
.dstOffsets
[0].z
= info
->dst
.box
.z
;
1039 region
.dstOffsets
[1].z
= info
->dst
.box
.z
+ info
->dst
.box
.depth
;
1040 region
.dstSubresource
.baseArrayLayer
= 0;
1041 region
.dstSubresource
.layerCount
= 1;
1044 vkCmdBlitImage(cmdbuf
->cmdbuf
, src
->image
, src
->layout
,
1045 dst
->image
, dst
->layout
,
1047 filter(info
->filter
));
1049 zink_end_cmdbuf(ctx
, cmdbuf
);
1053 zink_flush_resource(struct pipe_context
*pipe
,
1054 struct pipe_resource
*resource
)
1059 zink_resource_copy_region(struct pipe_context
*pctx
,
1060 struct pipe_resource
*pdst
,
1061 unsigned dst_level
, unsigned dstx
, unsigned dsty
, unsigned dstz
,
1062 struct pipe_resource
*psrc
,
1063 unsigned src_level
, const struct pipe_box
*src_box
)
1065 struct zink_resource
*dst
= zink_resource(pdst
);
1066 struct zink_resource
*src
= zink_resource(psrc
);
1067 struct zink_context
*ctx
= zink_context(pctx
);
1068 if (dst
->base
.target
!= PIPE_BUFFER
&& src
->base
.target
!= PIPE_BUFFER
) {
1069 VkImageCopy region
= {};
1071 region
.srcSubresource
.aspectMask
= src
->aspect
;
1072 region
.srcSubresource
.mipLevel
= src_level
;
1073 region
.srcSubresource
.layerCount
= 1;
1074 if (src
->base
.array_size
> 1) {
1075 region
.srcSubresource
.baseArrayLayer
= src_box
->z
;
1076 region
.srcSubresource
.layerCount
= src_box
->depth
;
1077 region
.extent
.depth
= 1;
1079 region
.srcOffset
.z
= src_box
->z
;
1080 region
.srcSubresource
.layerCount
= 1;
1081 region
.extent
.depth
= src_box
->depth
;
1084 region
.srcOffset
.x
= src_box
->x
;
1085 region
.srcOffset
.y
= src_box
->y
;
1087 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1088 region
.dstSubresource
.mipLevel
= dst_level
;
1089 if (dst
->base
.array_size
> 1) {
1090 region
.dstSubresource
.baseArrayLayer
= dstz
;
1091 region
.dstSubresource
.layerCount
= src_box
->depth
;
1093 region
.dstOffset
.z
= dstz
;
1094 region
.dstSubresource
.layerCount
= 1;
1097 region
.dstOffset
.x
= dstx
;
1098 region
.dstOffset
.y
= dsty
;
1099 region
.extent
.width
= src_box
->width
;
1100 region
.extent
.height
= src_box
->height
;
1102 struct zink_cmdbuf
*cmdbuf
= zink_start_cmdbuf(ctx
);
1106 vkCmdCopyImage(cmdbuf
->cmdbuf
, src
->image
, src
->layout
,
1107 dst
->image
, dst
->layout
,
1109 zink_end_cmdbuf(ctx
, cmdbuf
);
1111 debug_printf("zink: TODO resource copy\n");
1114 struct pipe_context
*
1115 zink_context_create(struct pipe_screen
*pscreen
, void *priv
, unsigned flags
)
1117 struct zink_screen
*screen
= zink_screen(pscreen
);
1118 struct zink_context
*ctx
= CALLOC_STRUCT(zink_context
);
1120 ctx
->base
.screen
= pscreen
;
1121 ctx
->base
.priv
= priv
;
1123 ctx
->base
.destroy
= zink_context_destroy
;
1125 zink_context_state_init(&ctx
->base
);
1127 ctx
->base
.create_sampler_state
= zink_create_sampler_state
;
1128 ctx
->base
.bind_sampler_states
= zink_bind_sampler_states
;
1129 ctx
->base
.delete_sampler_state
= zink_delete_sampler_state
;
1131 ctx
->base
.create_sampler_view
= zink_create_sampler_view
;
1132 ctx
->base
.set_sampler_views
= zink_set_sampler_views
;
1133 ctx
->base
.sampler_view_destroy
= zink_destroy_sampler_view
;
1135 ctx
->base
.create_vs_state
= zink_create_vs_state
;
1136 ctx
->base
.bind_vs_state
= zink_bind_vs_state
;
1137 ctx
->base
.delete_vs_state
= zink_delete_vs_state
;
1139 ctx
->base
.create_fs_state
= zink_create_fs_state
;
1140 ctx
->base
.bind_fs_state
= zink_bind_fs_state
;
1141 ctx
->base
.delete_fs_state
= zink_delete_fs_state
;
1143 ctx
->base
.set_polygon_stipple
= zink_set_polygon_stipple
;
1144 ctx
->base
.set_vertex_buffers
= zink_set_vertex_buffers
;
1145 ctx
->base
.set_viewport_states
= zink_set_viewport_states
;
1146 ctx
->base
.set_scissor_states
= zink_set_scissor_states
;
1147 ctx
->base
.set_constant_buffer
= zink_set_constant_buffer
;
1148 ctx
->base
.set_framebuffer_state
= zink_set_framebuffer_state
;
1149 ctx
->base
.set_stencil_ref
= zink_set_stencil_ref
;
1150 ctx
->base
.set_clip_state
= zink_set_clip_state
;
1151 ctx
->base
.set_active_query_state
= zink_set_active_query_state
;
1152 ctx
->base
.set_blend_color
= zink_set_blend_color
;
1154 ctx
->base
.clear
= zink_clear
;
1155 ctx
->base
.draw_vbo
= zink_draw_vbo
;
1156 ctx
->base
.flush
= zink_flush
;
1158 ctx
->base
.resource_copy_region
= zink_resource_copy_region
;
1159 ctx
->base
.blit
= zink_blit
;
1161 ctx
->base
.flush_resource
= zink_flush_resource
;
1162 zink_context_surface_init(&ctx
->base
);
1163 zink_context_resource_init(&ctx
->base
);
1164 zink_context_query_init(&ctx
->base
);
1166 slab_create_child(&ctx
->transfer_pool
, &screen
->transfer_pool
);
1168 ctx
->base
.stream_uploader
= u_upload_create_default(&ctx
->base
);
1169 ctx
->base
.const_uploader
= ctx
->base
.stream_uploader
;
1171 int prim_hwsupport
= 1 << PIPE_PRIM_POINTS
|
1172 1 << PIPE_PRIM_LINES
|
1173 1 << PIPE_PRIM_LINE_STRIP
|
1174 1 << PIPE_PRIM_TRIANGLES
|
1175 1 << PIPE_PRIM_TRIANGLE_STRIP
|
1176 1 << PIPE_PRIM_TRIANGLE_FAN
;
1178 ctx
->primconvert
= util_primconvert_create(&ctx
->base
, prim_hwsupport
);
1179 if (!ctx
->primconvert
)
1182 ctx
->blitter
= util_blitter_create(&ctx
->base
);
1186 VkCommandPoolCreateInfo cpci
= {};
1187 cpci
.sType
= VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO
;
1188 cpci
.queueFamilyIndex
= screen
->gfx_queue
;
1189 cpci
.flags
= VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
;
1190 if (vkCreateCommandPool(screen
->dev
, &cpci
, NULL
, &ctx
->cmdpool
) != VK_SUCCESS
)
1193 VkCommandBufferAllocateInfo cbai
= {};
1194 cbai
.sType
= VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
;
1195 cbai
.commandPool
= ctx
->cmdpool
;
1196 cbai
.level
= VK_COMMAND_BUFFER_LEVEL_PRIMARY
;
1197 cbai
.commandBufferCount
= 1;
1198 if (vkAllocateCommandBuffers(screen
->dev
, &cbai
, &ctx
->cmdbuf
.cmdbuf
) != VK_SUCCESS
)
1201 VkDescriptorPoolSize sizes
[] = {
1202 {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
, 1000}
1204 VkDescriptorPoolCreateInfo dpci
= {};
1205 dpci
.sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
;
1206 dpci
.pPoolSizes
= sizes
;
1207 dpci
.poolSizeCount
= ARRAY_SIZE(sizes
);
1208 dpci
.flags
= VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
;
1209 dpci
.maxSets
= 1000;
1211 if(vkCreateDescriptorPool(screen
->dev
, &dpci
, 0, &ctx
->descpool
) != VK_SUCCESS
)
1214 vkGetDeviceQueue(screen
->dev
, screen
->gfx_queue
, 0, &ctx
->queue
);
1220 vkDestroyCommandPool(screen
->dev
, ctx
->cmdpool
, NULL
);