2 * Copyright 2018 Collabora Ltd.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
24 #include "zink_context.h"
26 #include "zink_batch.h"
27 #include "zink_compiler.h"
28 #include "zink_fence.h"
29 #include "zink_framebuffer.h"
30 #include "zink_pipeline.h"
31 #include "zink_program.h"
32 #include "zink_render_pass.h"
33 #include "zink_resource.h"
34 #include "zink_screen.h"
35 #include "zink_state.h"
36 #include "zink_surface.h"
38 #include "indices/u_primconvert.h"
39 #include "util/u_blitter.h"
40 #include "util/u_debug.h"
41 #include "util/u_format.h"
42 #include "util/u_framebuffer.h"
43 #include "util/u_helpers.h"
44 #include "util/u_inlines.h"
48 #include "util/u_memory.h"
49 #include "util/u_prim.h"
50 #include "util/u_upload_mgr.h"
53 zink_context_destroy(struct pipe_context
*pctx
)
55 struct zink_context
*ctx
= zink_context(pctx
);
56 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
58 if (vkQueueWaitIdle(ctx
->queue
) != VK_SUCCESS
)
59 debug_printf("vkQueueWaitIdle failed\n");
61 for (int i
= 0; i
< ARRAY_SIZE(ctx
->batches
); ++i
)
62 vkFreeCommandBuffers(screen
->dev
, ctx
->cmdpool
, 1, &ctx
->batches
[i
].cmdbuf
);
63 vkDestroyCommandPool(screen
->dev
, ctx
->cmdpool
, NULL
);
65 util_primconvert_destroy(ctx
->primconvert
);
66 u_upload_destroy(pctx
->stream_uploader
);
67 slab_destroy_child(&ctx
->transfer_pool
);
68 util_blitter_destroy(ctx
->blitter
);
73 filter(enum pipe_tex_filter filter
)
76 case PIPE_TEX_FILTER_NEAREST
: return VK_FILTER_NEAREST
;
77 case PIPE_TEX_FILTER_LINEAR
: return VK_FILTER_LINEAR
;
79 unreachable("unexpected filter");
82 static VkSamplerMipmapMode
83 sampler_mipmap_mode(enum pipe_tex_mipfilter filter
)
86 case PIPE_TEX_MIPFILTER_NEAREST
: return VK_SAMPLER_MIPMAP_MODE_NEAREST
;
87 case PIPE_TEX_MIPFILTER_LINEAR
: return VK_SAMPLER_MIPMAP_MODE_LINEAR
;
88 case PIPE_TEX_MIPFILTER_NONE
:
89 unreachable("PIPE_TEX_MIPFILTER_NONE should be dealt with earlier");
91 unreachable("unexpected filter");
94 static VkSamplerAddressMode
95 sampler_address_mode(enum pipe_tex_wrap filter
)
98 case PIPE_TEX_WRAP_REPEAT
: return VK_SAMPLER_ADDRESS_MODE_REPEAT
;
99 case PIPE_TEX_WRAP_CLAMP
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
100 case PIPE_TEX_WRAP_CLAMP_TO_EDGE
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
;
101 case PIPE_TEX_WRAP_CLAMP_TO_BORDER
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
;
102 case PIPE_TEX_WRAP_MIRROR_REPEAT
: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT
;
103 case PIPE_TEX_WRAP_MIRROR_CLAMP
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
104 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
;
105 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
107 unreachable("unexpected wrap");
111 zink_create_sampler_state(struct pipe_context
*pctx
,
112 const struct pipe_sampler_state
*state
)
114 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
116 VkSamplerCreateInfo sci
= {};
117 sci
.sType
= VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
;
118 sci
.magFilter
= filter(state
->mag_img_filter
);
119 sci
.minFilter
= filter(state
->min_img_filter
);
121 if (state
->min_mip_filter
!= PIPE_TEX_MIPFILTER_NONE
) {
122 sci
.mipmapMode
= sampler_mipmap_mode(state
->min_mip_filter
);
123 sci
.minLod
= state
->min_lod
;
124 sci
.maxLod
= state
->max_lod
;
126 sci
.mipmapMode
= VK_SAMPLER_MIPMAP_MODE_NEAREST
;
131 sci
.addressModeU
= sampler_address_mode(state
->wrap_s
);
132 sci
.addressModeV
= sampler_address_mode(state
->wrap_t
);
133 sci
.addressModeW
= sampler_address_mode(state
->wrap_r
);
134 sci
.mipLodBias
= state
->lod_bias
;
135 sci
.compareOp
= VK_COMPARE_OP_NEVER
; // TODO
136 sci
.borderColor
= VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK
; // TODO
137 sci
.unnormalizedCoordinates
= !state
->normalized_coords
;
139 if (state
->max_anisotropy
> 1) {
140 sci
.maxAnisotropy
= state
->max_anisotropy
;
141 sci
.anisotropyEnable
= VK_TRUE
;
145 VkResult err
= vkCreateSampler(screen
->dev
, &sci
, NULL
, &sampler
);
146 if (err
!= VK_SUCCESS
)
153 zink_bind_sampler_states(struct pipe_context
*pctx
,
154 enum pipe_shader_type shader
,
156 unsigned num_samplers
,
159 struct zink_context
*ctx
= zink_context(pctx
);
160 for (unsigned i
= 0; i
< num_samplers
; ++i
)
161 ctx
->samplers
[shader
][start_slot
+ i
] = (VkSampler
)samplers
[i
];
165 zink_delete_sampler_state(struct pipe_context
*pctx
,
168 struct zink_batch
*batch
= zink_context_curr_batch(zink_context(pctx
));
169 util_dynarray_append(&batch
->zombie_samplers
,
170 VkSampler
, sampler_state
);
174 static VkImageViewType
175 image_view_type(enum pipe_texture_target target
)
178 case PIPE_TEXTURE_1D
: return VK_IMAGE_VIEW_TYPE_1D
;
179 case PIPE_TEXTURE_1D_ARRAY
: return VK_IMAGE_VIEW_TYPE_1D_ARRAY
;
180 case PIPE_TEXTURE_2D
: return VK_IMAGE_VIEW_TYPE_2D
;
181 case PIPE_TEXTURE_2D_ARRAY
: return VK_IMAGE_VIEW_TYPE_2D_ARRAY
;
182 case PIPE_TEXTURE_CUBE
: return VK_IMAGE_VIEW_TYPE_CUBE
;
183 case PIPE_TEXTURE_CUBE_ARRAY
: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
;
184 case PIPE_TEXTURE_3D
: return VK_IMAGE_VIEW_TYPE_3D
;
185 case PIPE_TEXTURE_RECT
: return VK_IMAGE_VIEW_TYPE_2D
; /* not sure */
187 unreachable("unexpected target");
191 static VkComponentSwizzle
192 component_mapping(enum pipe_swizzle swizzle
)
195 case PIPE_SWIZZLE_X
: return VK_COMPONENT_SWIZZLE_R
;
196 case PIPE_SWIZZLE_Y
: return VK_COMPONENT_SWIZZLE_G
;
197 case PIPE_SWIZZLE_Z
: return VK_COMPONENT_SWIZZLE_B
;
198 case PIPE_SWIZZLE_W
: return VK_COMPONENT_SWIZZLE_A
;
199 case PIPE_SWIZZLE_0
: return VK_COMPONENT_SWIZZLE_ZERO
;
200 case PIPE_SWIZZLE_1
: return VK_COMPONENT_SWIZZLE_ONE
;
201 case PIPE_SWIZZLE_NONE
: return VK_COMPONENT_SWIZZLE_IDENTITY
; // ???
203 unreachable("unexpected swizzle");
207 static struct pipe_sampler_view
*
208 zink_create_sampler_view(struct pipe_context
*pctx
, struct pipe_resource
*pres
,
209 const struct pipe_sampler_view
*state
)
211 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
212 struct zink_resource
*res
= zink_resource(pres
);
213 struct zink_sampler_view
*sampler_view
= CALLOC_STRUCT(zink_sampler_view
);
215 sampler_view
->base
= *state
;
216 sampler_view
->base
.texture
= NULL
;
217 pipe_resource_reference(&sampler_view
->base
.texture
, pres
);
218 sampler_view
->base
.reference
.count
= 1;
219 sampler_view
->base
.context
= pctx
;
221 VkImageViewCreateInfo ivci
= {};
222 ivci
.sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
;
223 ivci
.image
= res
->image
;
224 ivci
.viewType
= image_view_type(state
->target
);
225 ivci
.format
= zink_get_format(state
->format
);
226 ivci
.components
.r
= component_mapping(state
->swizzle_r
);
227 ivci
.components
.g
= component_mapping(state
->swizzle_g
);
228 ivci
.components
.b
= component_mapping(state
->swizzle_b
);
229 ivci
.components
.a
= component_mapping(state
->swizzle_a
);
230 ivci
.subresourceRange
.aspectMask
= zink_aspect_from_format(state
->format
);
231 ivci
.subresourceRange
.baseMipLevel
= state
->u
.tex
.first_level
;
232 ivci
.subresourceRange
.baseArrayLayer
= state
->u
.tex
.first_layer
;
233 ivci
.subresourceRange
.levelCount
= state
->u
.tex
.last_level
- state
->u
.tex
.first_level
+ 1;
234 ivci
.subresourceRange
.layerCount
= state
->u
.tex
.last_layer
- state
->u
.tex
.first_layer
+ 1;
236 VkResult err
= vkCreateImageView(screen
->dev
, &ivci
, NULL
, &sampler_view
->image_view
);
237 if (err
!= VK_SUCCESS
) {
242 return &sampler_view
->base
;
246 zink_destroy_sampler_view(struct pipe_context
*pctx
,
247 struct pipe_sampler_view
*pview
)
249 struct zink_sampler_view
*view
= zink_sampler_view(pview
);
250 vkDestroyImageView(zink_screen(pctx
->screen
)->dev
, view
->image_view
, NULL
);
255 zink_create_vs_state(struct pipe_context
*pctx
,
256 const struct pipe_shader_state
*shader
)
258 struct nir_shader
*nir
;
259 if (shader
->type
!= PIPE_SHADER_IR_NIR
)
260 nir
= zink_tgsi_to_nir(pctx
->screen
, shader
->tokens
);
262 nir
= (struct nir_shader
*)shader
->ir
.nir
;
264 return zink_compile_nir(zink_screen(pctx
->screen
), nir
);
268 bind_stage(struct zink_context
*ctx
, enum pipe_shader_type stage
,
269 struct zink_shader
*shader
)
271 assert(stage
< PIPE_SHADER_COMPUTE
);
272 ctx
->gfx_stages
[stage
] = shader
;
273 ctx
->dirty
|= ZINK_DIRTY_PROGRAM
;
277 zink_bind_vs_state(struct pipe_context
*pctx
,
280 bind_stage(zink_context(pctx
), PIPE_SHADER_VERTEX
, cso
);
284 zink_delete_vs_state(struct pipe_context
*pctx
,
287 zink_shader_free(zink_screen(pctx
->screen
), cso
);
291 zink_create_fs_state(struct pipe_context
*pctx
,
292 const struct pipe_shader_state
*shader
)
294 struct nir_shader
*nir
;
295 if (shader
->type
!= PIPE_SHADER_IR_NIR
)
296 nir
= zink_tgsi_to_nir(pctx
->screen
, shader
->tokens
);
298 nir
= (struct nir_shader
*)shader
->ir
.nir
;
300 return zink_compile_nir(zink_screen(pctx
->screen
), nir
);
304 zink_bind_fs_state(struct pipe_context
*pctx
,
307 bind_stage(zink_context(pctx
), PIPE_SHADER_FRAGMENT
, cso
);
311 zink_delete_fs_state(struct pipe_context
*pctx
,
314 zink_shader_free(zink_screen(pctx
->screen
), cso
);
318 zink_set_polygon_stipple(struct pipe_context
*pctx
,
319 const struct pipe_poly_stipple
*ps
)
324 zink_set_vertex_buffers(struct pipe_context
*pctx
,
326 unsigned num_buffers
,
327 const struct pipe_vertex_buffer
*buffers
)
329 struct zink_context
*ctx
= zink_context(pctx
);
332 for (int i
= 0; i
< num_buffers
; ++i
) {
333 const struct pipe_vertex_buffer
*vb
= buffers
+ i
;
334 ctx
->gfx_pipeline_state
.bindings
[start_slot
+ i
].stride
= vb
->stride
;
338 util_set_vertex_buffers_mask(ctx
->buffers
, &ctx
->buffers_enabled_mask
,
339 buffers
, start_slot
, num_buffers
);
343 zink_set_viewport_states(struct pipe_context
*pctx
,
345 unsigned num_viewports
,
346 const struct pipe_viewport_state
*state
)
348 struct zink_context
*ctx
= zink_context(pctx
);
350 for (unsigned i
= 0; i
< num_viewports
; ++i
) {
351 VkViewport viewport
= {
352 state
[i
].translate
[0] - state
[i
].scale
[0],
353 state
[i
].translate
[1] - state
[i
].scale
[1],
354 state
[i
].scale
[0] * 2,
355 state
[i
].scale
[1] * 2,
356 state
[i
].translate
[2] - state
[i
].scale
[2],
357 state
[i
].translate
[2] + state
[i
].scale
[2]
359 ctx
->viewports
[start_slot
+ i
] = viewport
;
361 ctx
->num_viewports
= start_slot
+ num_viewports
;
365 zink_set_scissor_states(struct pipe_context
*pctx
,
366 unsigned start_slot
, unsigned num_scissors
,
367 const struct pipe_scissor_state
*states
)
369 struct zink_context
*ctx
= zink_context(pctx
);
371 for (unsigned i
= 0; i
< num_scissors
; i
++) {
374 scissor
.offset
.x
= states
[i
].minx
;
375 scissor
.offset
.y
= states
[i
].miny
;
376 scissor
.extent
.width
= states
[i
].maxx
- states
[i
].minx
;
377 scissor
.extent
.height
= states
[i
].maxy
- states
[i
].miny
;
378 ctx
->scissors
[start_slot
+ i
] = scissor
;
380 ctx
->num_scissors
= start_slot
+ num_scissors
;
384 zink_set_constant_buffer(struct pipe_context
*pctx
,
385 enum pipe_shader_type shader
, uint index
,
386 const struct pipe_constant_buffer
*cb
)
388 struct zink_context
*ctx
= zink_context(pctx
);
391 struct pipe_resource
*buffer
= cb
->buffer
;
392 unsigned offset
= cb
->buffer_offset
;
394 u_upload_data(ctx
->base
.const_uploader
, 0, cb
->buffer_size
, 64,
395 cb
->user_buffer
, &offset
, &buffer
);
397 pipe_resource_reference(&ctx
->ubos
[shader
][index
].buffer
, buffer
);
398 ctx
->ubos
[shader
][index
].buffer_offset
= offset
;
399 ctx
->ubos
[shader
][index
].buffer_size
= cb
->buffer_size
;
400 ctx
->ubos
[shader
][index
].user_buffer
= NULL
;
403 pipe_resource_reference(&buffer
, NULL
);
405 pipe_resource_reference(&ctx
->ubos
[shader
][index
].buffer
, NULL
);
406 ctx
->ubos
[shader
][index
].buffer_offset
= 0;
407 ctx
->ubos
[shader
][index
].buffer_size
= 0;
408 ctx
->ubos
[shader
][index
].user_buffer
= NULL
;
413 zink_set_sampler_views(struct pipe_context
*pctx
,
414 enum pipe_shader_type shader_type
,
417 struct pipe_sampler_view
**views
)
419 struct zink_context
*ctx
= zink_context(pctx
);
421 for (unsigned i
= 0; i
< num_views
; ++i
) {
422 pipe_sampler_view_reference(
423 &ctx
->image_views
[shader_type
][start_slot
+ i
],
429 zink_set_stencil_ref(struct pipe_context
*pctx
,
430 const struct pipe_stencil_ref
*ref
)
432 struct zink_context
*ctx
= zink_context(pctx
);
433 ctx
->stencil_ref
[0] = ref
->ref_value
[0];
434 ctx
->stencil_ref
[1] = ref
->ref_value
[1];
438 zink_set_clip_state(struct pipe_context
*pctx
,
439 const struct pipe_clip_state
*pcs
)
443 static struct zink_render_pass
*
444 get_render_pass(struct zink_context
*ctx
)
446 const struct pipe_framebuffer_state
*fb
= &ctx
->fb_state
;
447 struct zink_render_pass_state state
;
449 for (int i
= 0; i
< fb
->nr_cbufs
; i
++) {
450 struct zink_resource
*cbuf
= zink_resource(fb
->cbufs
[i
]->texture
);
451 state
.rts
[i
].format
= cbuf
->format
;
453 state
.num_cbufs
= fb
->nr_cbufs
;
456 struct zink_resource
*zsbuf
= zink_resource(fb
->zsbuf
->texture
);
457 state
.rts
[fb
->nr_cbufs
].format
= zsbuf
->format
;
459 state
.have_zsbuf
= fb
->zsbuf
!= NULL
;
461 // TODO: cache instead!
462 return zink_create_render_pass(zink_screen(ctx
->base
.screen
), &state
);
465 static struct zink_framebuffer
*
466 get_framebuffer(struct zink_context
*ctx
)
468 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
469 struct zink_render_pass
*rp
= get_render_pass(ctx
);
471 struct zink_framebuffer
*ret
= zink_create_framebuffer(screen
,
474 zink_render_pass_reference(screen
, &rp
, NULL
);
479 end_batch(struct zink_context
*ctx
, struct zink_batch
*batch
)
482 vkCmdEndRenderPass(batch
->cmdbuf
);
484 zink_end_cmdbuf(ctx
, batch
);
488 zink_begin_render_pass(struct zink_context
*ctx
, struct zink_batch
*batch
)
490 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
491 assert(batch
== zink_context_curr_batch(ctx
));
493 VkRenderPassBeginInfo rpbi
= {};
494 rpbi
.sType
= VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
;
495 rpbi
.renderPass
= ctx
->gfx_pipeline_state
.render_pass
->render_pass
;
496 rpbi
.renderArea
.offset
.x
= 0;
497 rpbi
.renderArea
.offset
.y
= 0;
498 rpbi
.renderArea
.extent
.width
= ctx
->fb_state
.width
;
499 rpbi
.renderArea
.extent
.height
= ctx
->fb_state
.height
;
500 rpbi
.clearValueCount
= 0;
501 rpbi
.pClearValues
= NULL
;
502 rpbi
.framebuffer
= ctx
->framebuffer
->fb
;
504 assert(ctx
->gfx_pipeline_state
.render_pass
&& ctx
->framebuffer
);
505 assert(!batch
->rp
|| batch
->rp
== ctx
->gfx_pipeline_state
.render_pass
);
506 assert(!batch
->fb
|| batch
->fb
== ctx
->framebuffer
);
508 zink_render_pass_reference(screen
, &batch
->rp
, ctx
->gfx_pipeline_state
.render_pass
);
509 zink_framebuffer_reference(screen
, &batch
->fb
, ctx
->framebuffer
);
511 vkCmdBeginRenderPass(batch
->cmdbuf
, &rpbi
, VK_SUBPASS_CONTENTS_INLINE
);
515 flush_batch(struct zink_context
*ctx
)
517 end_batch(ctx
, zink_context_curr_batch(ctx
));
520 if (ctx
->curr_batch
== ARRAY_SIZE(ctx
->batches
))
523 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
524 zink_start_cmdbuf(ctx
, batch
);
528 zink_set_framebuffer_state(struct pipe_context
*pctx
,
529 const struct pipe_framebuffer_state
*state
)
531 struct zink_context
*ctx
= zink_context(pctx
);
532 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
534 util_copy_framebuffer_state(&ctx
->fb_state
, state
);
536 struct zink_framebuffer
*fb
= get_framebuffer(ctx
);
537 zink_framebuffer_reference(screen
, &ctx
->framebuffer
, fb
);
538 zink_render_pass_reference(screen
, &ctx
->gfx_pipeline_state
.render_pass
, fb
->rp
);
539 zink_framebuffer_reference(screen
, &fb
, NULL
);
541 ctx
->gfx_pipeline_state
.num_attachments
= state
->nr_cbufs
;
544 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
546 for (int i
= 0; i
< state
->nr_cbufs
; i
++) {
547 struct zink_resource
*res
= zink_resource(state
->cbufs
[i
]->texture
);
548 if (res
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
549 res
->layout
!= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
)
550 zink_resource_barrier(batch
->cmdbuf
, res
, res
->aspect
,
551 VK_IMAGE_LAYOUT_GENERAL
);
555 struct zink_resource
*res
= zink_resource(state
->zsbuf
->texture
);
556 if (res
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
557 res
->layout
!= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
)
558 zink_resource_barrier(batch
->cmdbuf
, res
, res
->aspect
,
559 VK_IMAGE_LAYOUT_GENERAL
);
564 zink_set_active_query_state(struct pipe_context
*pctx
, bool enable
)
569 zink_set_blend_color(struct pipe_context
*pctx
,
570 const struct pipe_blend_color
*color
)
572 struct zink_context
*ctx
= zink_context(pctx
);
573 memcpy(ctx
->blend_constants
, color
->color
, sizeof(float) * 4);
577 access_flags(VkImageLayout layout
)
580 case VK_IMAGE_LAYOUT_UNDEFINED
:
581 case VK_IMAGE_LAYOUT_GENERAL
:
584 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
:
585 return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
;
586 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
:
587 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
;
589 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
:
590 return VK_ACCESS_SHADER_READ_BIT
;
592 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
:
593 return VK_ACCESS_TRANSFER_READ_BIT
;
595 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
:
596 return VK_ACCESS_TRANSFER_WRITE_BIT
;
598 case VK_IMAGE_LAYOUT_PREINITIALIZED
:
599 return VK_ACCESS_HOST_WRITE_BIT
;
602 unreachable("unexpected layout");
607 zink_resource_barrier(VkCommandBuffer cmdbuf
, struct zink_resource
*res
,
608 VkImageAspectFlags aspect
, VkImageLayout new_layout
)
610 VkImageSubresourceRange isr
= {
612 0, VK_REMAINING_MIP_LEVELS
,
613 0, VK_REMAINING_ARRAY_LAYERS
616 VkImageMemoryBarrier imb
= {
617 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER
,
619 access_flags(res
->layout
),
620 access_flags(new_layout
),
623 VK_QUEUE_FAMILY_IGNORED
,
624 VK_QUEUE_FAMILY_IGNORED
,
628 vkCmdPipelineBarrier(
630 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
,
631 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
,
638 res
->layout
= new_layout
;
642 zink_clear(struct pipe_context
*pctx
,
644 const union pipe_color_union
*pcolor
,
645 double depth
, unsigned stencil
)
647 struct zink_context
*ctx
= zink_context(pctx
);
648 struct pipe_framebuffer_state
*fb
= &ctx
->fb_state
;
650 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
652 VkClearAttachment attachments
[1 + PIPE_MAX_COLOR_BUFS
];
653 int num_attachments
= 0;
655 if (buffers
& PIPE_CLEAR_COLOR
) {
656 VkClearColorValue color
;
657 color
.float32
[0] = pcolor
->f
[0];
658 color
.float32
[1] = pcolor
->f
[1];
659 color
.float32
[2] = pcolor
->f
[2];
660 color
.float32
[3] = pcolor
->f
[3];
662 for (unsigned i
= 0; i
< fb
->nr_cbufs
; i
++) {
663 if (!(buffers
& (PIPE_CLEAR_COLOR0
<< i
)) || !fb
->cbufs
[i
])
666 attachments
[num_attachments
].aspectMask
= VK_IMAGE_ASPECT_COLOR_BIT
;
667 attachments
[num_attachments
].colorAttachment
= i
;
668 attachments
[num_attachments
].clearValue
.color
= color
;
673 if (buffers
& PIPE_CLEAR_DEPTHSTENCIL
&& fb
->zsbuf
) {
674 VkImageAspectFlags aspect
= 0;
675 if (buffers
& PIPE_CLEAR_DEPTH
)
676 aspect
|= VK_IMAGE_ASPECT_DEPTH_BIT
;
677 if (buffers
& PIPE_CLEAR_STENCIL
)
678 aspect
|= VK_IMAGE_ASPECT_STENCIL_BIT
;
680 attachments
[num_attachments
].aspectMask
= aspect
;
681 attachments
[num_attachments
].clearValue
.depthStencil
.depth
= depth
;
682 attachments
[num_attachments
].clearValue
.depthStencil
.stencil
= stencil
;
686 unsigned num_layers
= util_framebuffer_get_num_layers(fb
);
687 VkClearRect rects
[PIPE_MAX_VIEWPORTS
];
689 if (ctx
->num_scissors
) {
690 for (unsigned i
= 0 ; i
< ctx
->num_scissors
; ++i
) {
691 rects
[i
].rect
= ctx
->scissors
[i
];
692 rects
[i
].baseArrayLayer
= 0;
693 rects
[i
].layerCount
= num_layers
;
695 num_rects
= ctx
->num_scissors
;
697 rects
[0].rect
.offset
.x
= 0;
698 rects
[0].rect
.offset
.y
= 0;
699 rects
[0].rect
.extent
.width
= fb
->width
;
700 rects
[0].rect
.extent
.height
= fb
->height
;
701 rects
[0].baseArrayLayer
= 0;
702 rects
[0].layerCount
= num_layers
;
707 zink_begin_render_pass(ctx
, batch
);
709 vkCmdClearAttachments(batch
->cmdbuf
,
710 num_attachments
, attachments
,
714 VkShaderStageFlagBits
715 zink_shader_stage(enum pipe_shader_type type
)
717 VkShaderStageFlagBits stages
[] = {
718 [PIPE_SHADER_VERTEX
] = VK_SHADER_STAGE_VERTEX_BIT
,
719 [PIPE_SHADER_FRAGMENT
] = VK_SHADER_STAGE_FRAGMENT_BIT
,
720 [PIPE_SHADER_GEOMETRY
] = VK_SHADER_STAGE_GEOMETRY_BIT
,
721 [PIPE_SHADER_TESS_CTRL
] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT
,
722 [PIPE_SHADER_TESS_EVAL
] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT
,
723 [PIPE_SHADER_COMPUTE
] = VK_SHADER_STAGE_COMPUTE_BIT
,
728 static VkDescriptorSet
729 allocate_descriptor_set(struct zink_context
*ctx
, VkDescriptorSetLayout dsl
)
731 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
732 VkDescriptorSetAllocateInfo dsai
;
733 memset((void *)&dsai
, 0, sizeof(dsai
));
734 dsai
.sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
;
736 dsai
.descriptorPool
= ctx
->descpool
;
737 dsai
.descriptorSetCount
= 1;
738 dsai
.pSetLayouts
= &dsl
;
740 VkDescriptorSet desc_set
;
741 if (vkAllocateDescriptorSets(screen
->dev
, &dsai
, &desc_set
) != VK_SUCCESS
) {
743 /* if we run out of descriptor sets we either need to create a bunch
744 * more... or flush and wait. For simplicity, let's flush for now.
746 struct pipe_fence_handle
*fence
= NULL
;
747 ctx
->base
.flush(&ctx
->base
, &fence
, 0);
748 ctx
->base
.screen
->fence_finish(ctx
->base
.screen
, &ctx
->base
, fence
,
749 PIPE_TIMEOUT_INFINITE
);
751 if (vkResetDescriptorPool(screen
->dev
, ctx
->descpool
, 0) != VK_SUCCESS
) {
752 fprintf(stderr
, "vkResetDescriptorPool failed\n");
753 return VK_NULL_HANDLE
;
755 if (vkAllocateDescriptorSets(screen
->dev
, &dsai
, &desc_set
) != VK_SUCCESS
) {
756 fprintf(stderr
, "vkAllocateDescriptorSets failed\n");
757 return VK_NULL_HANDLE
;
765 zink_bind_vertex_buffers(struct zink_batch
*batch
, struct zink_context
*ctx
)
767 VkBuffer buffers
[PIPE_MAX_ATTRIBS
];
768 VkDeviceSize buffer_offsets
[PIPE_MAX_ATTRIBS
];
769 const struct zink_vertex_elements_state
*elems
= ctx
->element_state
;
770 for (unsigned i
= 0; i
< elems
->hw_state
.num_bindings
; i
++) {
771 struct pipe_vertex_buffer
*vb
= ctx
->buffers
+ ctx
->element_state
->binding_map
[i
];
772 assert(vb
&& vb
->buffer
.resource
);
773 struct zink_resource
*res
= zink_resource(vb
->buffer
.resource
);
774 buffers
[i
] = res
->buffer
;
775 buffer_offsets
[i
] = vb
->buffer_offset
;
776 zink_batch_reference_resoure(batch
, res
);
779 if (elems
->hw_state
.num_bindings
> 0)
780 vkCmdBindVertexBuffers(batch
->cmdbuf
, 0,
781 elems
->hw_state
.num_bindings
,
782 buffers
, buffer_offsets
);
786 hash_gfx_program(const void *key
)
788 return _mesa_hash_data(key
, sizeof(struct zink_shader
*) * (PIPE_SHADER_TYPES
- 1));
792 equals_gfx_program(const void *a
, const void *b
)
794 return memcmp(a
, b
, sizeof(struct zink_shader
*) * (PIPE_SHADER_TYPES
- 1)) == 0;
797 static struct zink_gfx_program
*
798 get_gfx_program(struct zink_context
*ctx
)
800 if (ctx
->dirty
& ZINK_DIRTY_PROGRAM
) {
801 struct hash_entry
*entry
= _mesa_hash_table_search(ctx
->program_cache
,
804 struct zink_gfx_program
*prog
;
805 prog
= zink_create_gfx_program(zink_screen(ctx
->base
.screen
)->dev
,
807 entry
= _mesa_hash_table_insert(ctx
->program_cache
, prog
->stages
, prog
);
811 ctx
->curr_program
= entry
->data
;
812 ctx
->dirty
&= ~ZINK_DIRTY_PROGRAM
;
815 assert(ctx
->curr_program
);
816 return ctx
->curr_program
;
820 zink_draw_vbo(struct pipe_context
*pctx
,
821 const struct pipe_draw_info
*dinfo
)
823 struct zink_context
*ctx
= zink_context(pctx
);
824 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
825 struct zink_rasterizer_state
*rast_state
= ctx
->rast_state
;
827 if (dinfo
->mode
>= PIPE_PRIM_QUADS
||
828 dinfo
->mode
== PIPE_PRIM_LINE_LOOP
) {
829 if (!u_trim_pipe_prim(dinfo
->mode
, (unsigned *)&dinfo
->count
))
832 util_primconvert_save_rasterizer_state(ctx
->primconvert
, &rast_state
->base
);
833 util_primconvert_draw_vbo(ctx
->primconvert
, dinfo
);
837 struct zink_gfx_program
*gfx_program
= get_gfx_program(ctx
);
841 VkPipeline pipeline
= zink_get_gfx_pipeline(screen
->dev
, gfx_program
,
842 &ctx
->gfx_pipeline_state
,
845 bool depth_bias
= false;
846 switch (u_reduced_prim(dinfo
->mode
)) {
847 case PIPE_PRIM_POINTS
:
848 depth_bias
= rast_state
->offset_point
;
851 case PIPE_PRIM_LINES
:
852 depth_bias
= rast_state
->offset_line
;
855 case PIPE_PRIM_TRIANGLES
:
856 depth_bias
= rast_state
->offset_tri
;
860 unreachable("unexpected reduced prim");
863 unsigned index_offset
= 0;
864 struct pipe_resource
*index_buffer
= NULL
;
865 if (dinfo
->index_size
> 0) {
866 if (dinfo
->has_user_indices
) {
867 if (!util_upload_index_buffer(pctx
, dinfo
, &index_buffer
, &index_offset
)) {
868 debug_printf("util_upload_index_buffer() failed\n");
872 index_buffer
= dinfo
->index
.resource
;
875 VkDescriptorSet desc_set
= allocate_descriptor_set(ctx
, gfx_program
->dsl
);
877 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
879 VkWriteDescriptorSet wds
[PIPE_SHADER_TYPES
* PIPE_MAX_CONSTANT_BUFFERS
+ PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
880 VkDescriptorBufferInfo buffer_infos
[PIPE_SHADER_TYPES
* PIPE_MAX_CONSTANT_BUFFERS
];
881 VkDescriptorImageInfo image_infos
[PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
882 int num_wds
= 0, num_buffer_info
= 0, num_image_info
= 0;
884 struct zink_resource
*transitions
[PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
885 int num_transitions
= 0;
887 for (int i
= 0; i
< ARRAY_SIZE(ctx
->gfx_stages
); i
++) {
888 struct zink_shader
*shader
= ctx
->gfx_stages
[i
];
892 for (int j
= 0; j
< shader
->num_bindings
; j
++) {
893 int index
= shader
->bindings
[j
].index
;
894 if (shader
->bindings
[j
].type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
) {
895 assert(ctx
->ubos
[i
][index
].buffer_size
> 0);
896 assert(ctx
->ubos
[i
][index
].buffer
);
897 struct zink_resource
*res
= zink_resource(ctx
->ubos
[i
][index
].buffer
);
898 buffer_infos
[num_buffer_info
].buffer
= res
->buffer
;
899 buffer_infos
[num_buffer_info
].offset
= ctx
->ubos
[i
][index
].buffer_offset
;
900 buffer_infos
[num_buffer_info
].range
= VK_WHOLE_SIZE
;
901 wds
[num_wds
].pBufferInfo
= buffer_infos
+ num_buffer_info
;
903 zink_batch_reference_resoure(batch
, res
);
905 struct pipe_sampler_view
*psampler_view
= ctx
->image_views
[i
][index
];
906 assert(psampler_view
);
907 struct zink_sampler_view
*sampler_view
= (struct zink_sampler_view
*)psampler_view
;
908 struct zink_resource
*res
= zink_resource(psampler_view
->texture
);
909 VkImageLayout layout
= res
->layout
;
910 if (layout
!= VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
&&
911 layout
!= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
&&
912 layout
!= VK_IMAGE_LAYOUT_GENERAL
) {
913 transitions
[num_transitions
++] = res
;
914 layout
= VK_IMAGE_LAYOUT_GENERAL
;
916 image_infos
[num_image_info
].imageLayout
= layout
;
917 image_infos
[num_image_info
].imageView
= sampler_view
->image_view
;
918 image_infos
[num_image_info
].sampler
= ctx
->samplers
[i
][index
];
919 wds
[num_wds
].pImageInfo
= image_infos
+ num_image_info
;
921 zink_batch_reference_resoure(batch
, res
);
924 wds
[num_wds
].sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
;
925 wds
[num_wds
].pNext
= NULL
;
926 wds
[num_wds
].dstBinding
= shader
->bindings
[j
].binding
;
927 wds
[num_wds
].dstArrayElement
= 0;
928 wds
[num_wds
].descriptorCount
= 1;
929 wds
[num_wds
].descriptorType
= shader
->bindings
[j
].type
;
934 if (num_transitions
> 0) {
936 vkCmdEndRenderPass(batch
->cmdbuf
);
938 for (int i
= 0; i
< num_transitions
; ++i
)
939 zink_resource_barrier(batch
->cmdbuf
, transitions
[i
],
940 transitions
[i
]->aspect
,
941 VK_IMAGE_LAYOUT_GENERAL
);
943 zink_begin_render_pass(ctx
, batch
);
944 } else if (!batch
->rp
)
945 zink_begin_render_pass(ctx
, batch
);
948 vkCmdSetViewport(batch
->cmdbuf
, 0, ctx
->num_viewports
, ctx
->viewports
);
950 if (ctx
->num_scissors
)
951 vkCmdSetScissor(batch
->cmdbuf
, 0, ctx
->num_scissors
, ctx
->scissors
);
952 else if (ctx
->fb_state
.width
&& ctx
->fb_state
.height
) {
953 VkRect2D fb_scissor
= {};
954 fb_scissor
.extent
.width
= ctx
->fb_state
.width
;
955 fb_scissor
.extent
.height
= ctx
->fb_state
.height
;
956 vkCmdSetScissor(batch
->cmdbuf
, 0, 1, &fb_scissor
);
959 vkCmdSetStencilReference(batch
->cmdbuf
, VK_STENCIL_FACE_FRONT_BIT
, ctx
->stencil_ref
[0]);
960 vkCmdSetStencilReference(batch
->cmdbuf
, VK_STENCIL_FACE_BACK_BIT
, ctx
->stencil_ref
[1]);
963 vkCmdSetDepthBias(batch
->cmdbuf
, rast_state
->offset_units
, rast_state
->offset_clamp
, rast_state
->offset_scale
);
965 vkCmdSetDepthBias(batch
->cmdbuf
, 0.0f
, 0.0f
, 0.0f
);
967 if (ctx
->gfx_pipeline_state
.blend_state
->need_blend_constants
)
968 vkCmdSetBlendConstants(batch
->cmdbuf
, ctx
->blend_constants
);
970 for (int i
= 0; i
< num_wds
; ++i
)
971 wds
[i
].dstSet
= desc_set
;
973 vkUpdateDescriptorSets(screen
->dev
, num_wds
, wds
, 0, NULL
);
975 vkCmdBindPipeline(batch
->cmdbuf
, VK_PIPELINE_BIND_POINT_GRAPHICS
, pipeline
);
976 vkCmdBindDescriptorSets(batch
->cmdbuf
, VK_PIPELINE_BIND_POINT_GRAPHICS
,
977 gfx_program
->layout
, 0, 1, &desc_set
, 0, NULL
);
978 zink_bind_vertex_buffers(batch
, ctx
);
980 if (dinfo
->index_size
> 0) {
981 assert(dinfo
->index_size
!= 1);
982 VkIndexType index_type
= dinfo
->index_size
== 2 ? VK_INDEX_TYPE_UINT16
: VK_INDEX_TYPE_UINT32
;
983 struct zink_resource
*res
= zink_resource(index_buffer
);
984 vkCmdBindIndexBuffer(batch
->cmdbuf
, res
->buffer
, index_offset
, index_type
);
985 zink_batch_reference_resoure(batch
, res
);
986 vkCmdDrawIndexed(batch
->cmdbuf
,
987 dinfo
->count
, dinfo
->instance_count
,
988 dinfo
->start
, dinfo
->index_bias
, dinfo
->start_instance
);
990 vkCmdDraw(batch
->cmdbuf
, dinfo
->count
, dinfo
->instance_count
, dinfo
->start
, dinfo
->start_instance
);
992 if (dinfo
->index_size
> 0 && dinfo
->has_user_indices
)
993 pipe_resource_reference(&index_buffer
, NULL
);
997 zink_flush(struct pipe_context
*pctx
,
998 struct pipe_fence_handle
**pfence
,
999 enum pipe_flush_flags flags
)
1001 struct zink_context
*ctx
= zink_context(pctx
);
1003 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
1007 zink_fence_reference(zink_screen(pctx
->screen
),
1008 (struct zink_fence
**)pfence
,
1011 if (flags
& PIPE_FLUSH_END_OF_FRAME
)
1012 pctx
->screen
->fence_finish(pctx
->screen
, pctx
,
1013 (struct pipe_fence_handle
*)batch
->fence
,
1014 PIPE_TIMEOUT_INFINITE
);
1018 zink_blit(struct pipe_context
*pctx
,
1019 const struct pipe_blit_info
*info
)
1021 struct zink_context
*ctx
= zink_context(pctx
);
1022 bool is_resolve
= false;
1023 if (info
->mask
!= PIPE_MASK_RGBA
||
1024 info
->scissor_enable
||
1025 info
->alpha_blend
) {
1026 if (!util_blitter_is_blit_supported(ctx
->blitter
, info
)) {
1027 debug_printf("blit unsupported %s -> %s\n",
1028 util_format_short_name(info
->src
.resource
->format
),
1029 util_format_short_name(info
->dst
.resource
->format
));
1033 util_blitter_save_fragment_constant_buffer_slot(ctx
->blitter
, ctx
->ubos
[PIPE_SHADER_FRAGMENT
]);
1034 util_blitter_save_vertex_buffer_slot(ctx
->blitter
, ctx
->buffers
);
1035 util_blitter_save_vertex_shader(ctx
->blitter
, ctx
->gfx_stages
[PIPE_SHADER_VERTEX
]);
1036 util_blitter_save_fragment_shader(ctx
->blitter
, ctx
->gfx_stages
[PIPE_SHADER_FRAGMENT
]);
1037 util_blitter_save_rasterizer(ctx
->blitter
, ctx
->gfx_pipeline_state
.rast_state
);
1039 util_blitter_blit(ctx
->blitter
, info
);
1043 struct zink_resource
*src
= zink_resource(info
->src
.resource
);
1044 struct zink_resource
*dst
= zink_resource(info
->dst
.resource
);
1046 if (src
->base
.nr_samples
> 1 && dst
->base
.nr_samples
<= 1)
1049 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
1051 vkCmdEndRenderPass(batch
->cmdbuf
);
1053 zink_batch_reference_resoure(batch
, src
);
1054 zink_batch_reference_resoure(batch
, dst
);
1057 VkImageResolve region
= {};
1059 region
.srcSubresource
.aspectMask
= src
->aspect
;
1060 region
.srcSubresource
.mipLevel
= info
->src
.level
;
1061 region
.srcSubresource
.baseArrayLayer
= 0; // no clue
1062 region
.srcSubresource
.layerCount
= 1; // no clue
1063 region
.srcOffset
.x
= info
->src
.box
.x
;
1064 region
.srcOffset
.y
= info
->src
.box
.y
;
1065 region
.srcOffset
.z
= info
->src
.box
.z
;
1067 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1068 region
.dstSubresource
.mipLevel
= info
->dst
.level
;
1069 region
.dstSubresource
.baseArrayLayer
= 0; // no clue
1070 region
.dstSubresource
.layerCount
= 1; // no clue
1071 region
.dstOffset
.x
= info
->dst
.box
.x
;
1072 region
.dstOffset
.y
= info
->dst
.box
.y
;
1073 region
.dstOffset
.z
= info
->dst
.box
.z
;
1075 region
.extent
.width
= info
->dst
.box
.width
;
1076 region
.extent
.height
= info
->dst
.box
.height
;
1077 region
.extent
.depth
= info
->dst
.box
.depth
;
1078 vkCmdResolveImage(batch
->cmdbuf
, src
->image
, src
->layout
,
1079 dst
->image
, dst
->layout
,
1083 if (dst
->layout
!= VK_IMAGE_LAYOUT_GENERAL
&&
1084 dst
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
)
1085 zink_resource_barrier(batch
->cmdbuf
, dst
, dst
->aspect
,
1086 VK_IMAGE_LAYOUT_GENERAL
);
1088 VkImageBlit region
= {};
1089 region
.srcSubresource
.aspectMask
= src
->aspect
;
1090 region
.srcSubresource
.mipLevel
= info
->src
.level
;
1091 region
.srcOffsets
[0].x
= info
->src
.box
.x
;
1092 region
.srcOffsets
[0].y
= info
->src
.box
.y
;
1093 region
.srcOffsets
[1].x
= info
->src
.box
.x
+ info
->src
.box
.width
;
1094 region
.srcOffsets
[1].y
= info
->src
.box
.y
+ info
->src
.box
.height
;
1096 if (src
->base
.array_size
> 1) {
1097 region
.srcOffsets
[0].z
= 0;
1098 region
.srcOffsets
[1].z
= 1;
1099 region
.srcSubresource
.baseArrayLayer
= info
->src
.box
.z
;
1100 region
.srcSubresource
.layerCount
= info
->src
.box
.depth
;
1102 region
.srcOffsets
[0].z
= info
->src
.box
.z
;
1103 region
.srcOffsets
[1].z
= info
->src
.box
.z
+ info
->src
.box
.depth
;
1104 region
.srcSubresource
.baseArrayLayer
= 0;
1105 region
.srcSubresource
.layerCount
= 1;
1108 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1109 region
.dstSubresource
.mipLevel
= info
->dst
.level
;
1110 region
.dstOffsets
[0].x
= info
->dst
.box
.x
;
1111 region
.dstOffsets
[0].y
= info
->dst
.box
.y
;
1112 region
.dstOffsets
[1].x
= info
->dst
.box
.x
+ info
->dst
.box
.width
;
1113 region
.dstOffsets
[1].y
= info
->dst
.box
.y
+ info
->dst
.box
.height
;
1115 if (dst
->base
.array_size
> 1) {
1116 region
.dstOffsets
[0].z
= 0;
1117 region
.dstOffsets
[1].z
= 1;
1118 region
.dstSubresource
.baseArrayLayer
= info
->dst
.box
.z
;
1119 region
.dstSubresource
.layerCount
= info
->dst
.box
.depth
;
1121 region
.dstOffsets
[0].z
= info
->dst
.box
.z
;
1122 region
.dstOffsets
[1].z
= info
->dst
.box
.z
+ info
->dst
.box
.depth
;
1123 region
.dstSubresource
.baseArrayLayer
= 0;
1124 region
.dstSubresource
.layerCount
= 1;
1127 vkCmdBlitImage(batch
->cmdbuf
, src
->image
, src
->layout
,
1128 dst
->image
, dst
->layout
,
1130 filter(info
->filter
));
1134 zink_begin_render_pass(ctx
, batch
);
1136 /* HACK: I have no idea why this is needed, but without it ioquake3
1137 * randomly keeps fading to black.
1143 zink_flush_resource(struct pipe_context
*pipe
,
1144 struct pipe_resource
*resource
)
1149 zink_resource_copy_region(struct pipe_context
*pctx
,
1150 struct pipe_resource
*pdst
,
1151 unsigned dst_level
, unsigned dstx
, unsigned dsty
, unsigned dstz
,
1152 struct pipe_resource
*psrc
,
1153 unsigned src_level
, const struct pipe_box
*src_box
)
1155 struct zink_resource
*dst
= zink_resource(pdst
);
1156 struct zink_resource
*src
= zink_resource(psrc
);
1157 struct zink_context
*ctx
= zink_context(pctx
);
1158 if (dst
->base
.target
!= PIPE_BUFFER
&& src
->base
.target
!= PIPE_BUFFER
) {
1159 VkImageCopy region
= {};
1161 region
.srcSubresource
.aspectMask
= src
->aspect
;
1162 region
.srcSubresource
.mipLevel
= src_level
;
1163 region
.srcSubresource
.layerCount
= 1;
1164 if (src
->base
.array_size
> 1) {
1165 region
.srcSubresource
.baseArrayLayer
= src_box
->z
;
1166 region
.srcSubresource
.layerCount
= src_box
->depth
;
1167 region
.extent
.depth
= 1;
1169 region
.srcOffset
.z
= src_box
->z
;
1170 region
.srcSubresource
.layerCount
= 1;
1171 region
.extent
.depth
= src_box
->depth
;
1174 region
.srcOffset
.x
= src_box
->x
;
1175 region
.srcOffset
.y
= src_box
->y
;
1177 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1178 region
.dstSubresource
.mipLevel
= dst_level
;
1179 if (dst
->base
.array_size
> 1) {
1180 region
.dstSubresource
.baseArrayLayer
= dstz
;
1181 region
.dstSubresource
.layerCount
= src_box
->depth
;
1183 region
.dstOffset
.z
= dstz
;
1184 region
.dstSubresource
.layerCount
= 1;
1187 region
.dstOffset
.x
= dstx
;
1188 region
.dstOffset
.y
= dsty
;
1189 region
.extent
.width
= src_box
->width
;
1190 region
.extent
.height
= src_box
->height
;
1192 struct zink_batch
*batch
= zink_context_curr_batch(ctx
);
1193 zink_batch_reference_resoure(batch
, src
);
1194 zink_batch_reference_resoure(batch
, dst
);
1196 vkCmdCopyImage(batch
->cmdbuf
, src
->image
, src
->layout
,
1197 dst
->image
, dst
->layout
,
1200 debug_printf("zink: TODO resource copy\n");
1203 struct pipe_context
*
1204 zink_context_create(struct pipe_screen
*pscreen
, void *priv
, unsigned flags
)
1206 struct zink_screen
*screen
= zink_screen(pscreen
);
1207 struct zink_context
*ctx
= CALLOC_STRUCT(zink_context
);
1209 ctx
->base
.screen
= pscreen
;
1210 ctx
->base
.priv
= priv
;
1212 ctx
->base
.destroy
= zink_context_destroy
;
1214 zink_context_state_init(&ctx
->base
);
1216 ctx
->base
.create_sampler_state
= zink_create_sampler_state
;
1217 ctx
->base
.bind_sampler_states
= zink_bind_sampler_states
;
1218 ctx
->base
.delete_sampler_state
= zink_delete_sampler_state
;
1220 ctx
->base
.create_sampler_view
= zink_create_sampler_view
;
1221 ctx
->base
.set_sampler_views
= zink_set_sampler_views
;
1222 ctx
->base
.sampler_view_destroy
= zink_destroy_sampler_view
;
1224 ctx
->base
.create_vs_state
= zink_create_vs_state
;
1225 ctx
->base
.bind_vs_state
= zink_bind_vs_state
;
1226 ctx
->base
.delete_vs_state
= zink_delete_vs_state
;
1228 ctx
->base
.create_fs_state
= zink_create_fs_state
;
1229 ctx
->base
.bind_fs_state
= zink_bind_fs_state
;
1230 ctx
->base
.delete_fs_state
= zink_delete_fs_state
;
1232 ctx
->base
.set_polygon_stipple
= zink_set_polygon_stipple
;
1233 ctx
->base
.set_vertex_buffers
= zink_set_vertex_buffers
;
1234 ctx
->base
.set_viewport_states
= zink_set_viewport_states
;
1235 ctx
->base
.set_scissor_states
= zink_set_scissor_states
;
1236 ctx
->base
.set_constant_buffer
= zink_set_constant_buffer
;
1237 ctx
->base
.set_framebuffer_state
= zink_set_framebuffer_state
;
1238 ctx
->base
.set_stencil_ref
= zink_set_stencil_ref
;
1239 ctx
->base
.set_clip_state
= zink_set_clip_state
;
1240 ctx
->base
.set_active_query_state
= zink_set_active_query_state
;
1241 ctx
->base
.set_blend_color
= zink_set_blend_color
;
1243 ctx
->base
.clear
= zink_clear
;
1244 ctx
->base
.draw_vbo
= zink_draw_vbo
;
1245 ctx
->base
.flush
= zink_flush
;
1247 ctx
->base
.resource_copy_region
= zink_resource_copy_region
;
1248 ctx
->base
.blit
= zink_blit
;
1250 ctx
->base
.flush_resource
= zink_flush_resource
;
1251 zink_context_surface_init(&ctx
->base
);
1252 zink_context_resource_init(&ctx
->base
);
1253 zink_context_query_init(&ctx
->base
);
1255 slab_create_child(&ctx
->transfer_pool
, &screen
->transfer_pool
);
1257 ctx
->base
.stream_uploader
= u_upload_create_default(&ctx
->base
);
1258 ctx
->base
.const_uploader
= ctx
->base
.stream_uploader
;
1260 int prim_hwsupport
= 1 << PIPE_PRIM_POINTS
|
1261 1 << PIPE_PRIM_LINES
|
1262 1 << PIPE_PRIM_LINE_STRIP
|
1263 1 << PIPE_PRIM_TRIANGLES
|
1264 1 << PIPE_PRIM_TRIANGLE_STRIP
|
1265 1 << PIPE_PRIM_TRIANGLE_FAN
;
1267 ctx
->primconvert
= util_primconvert_create(&ctx
->base
, prim_hwsupport
);
1268 if (!ctx
->primconvert
)
1271 ctx
->blitter
= util_blitter_create(&ctx
->base
);
1275 VkCommandPoolCreateInfo cpci
= {};
1276 cpci
.sType
= VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO
;
1277 cpci
.queueFamilyIndex
= screen
->gfx_queue
;
1278 cpci
.flags
= VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
;
1279 if (vkCreateCommandPool(screen
->dev
, &cpci
, NULL
, &ctx
->cmdpool
) != VK_SUCCESS
)
1282 VkCommandBufferAllocateInfo cbai
= {};
1283 cbai
.sType
= VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
;
1284 cbai
.commandPool
= ctx
->cmdpool
;
1285 cbai
.level
= VK_COMMAND_BUFFER_LEVEL_PRIMARY
;
1286 cbai
.commandBufferCount
= 1;
1287 for (int i
= 0; i
< ARRAY_SIZE(ctx
->batches
); ++i
) {
1288 if (vkAllocateCommandBuffers(screen
->dev
, &cbai
, &ctx
->batches
[i
].cmdbuf
) != VK_SUCCESS
)
1291 ctx
->batches
[i
].resources
= _mesa_set_create(NULL
, _mesa_hash_pointer
,
1292 _mesa_key_pointer_equal
);
1293 if (!ctx
->batches
[i
].resources
)
1296 util_dynarray_init(&ctx
->batches
[i
].zombie_samplers
, NULL
);
1299 VkDescriptorPoolSize sizes
[] = {
1300 {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
, 1000}
1302 VkDescriptorPoolCreateInfo dpci
= {};
1303 dpci
.sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
;
1304 dpci
.pPoolSizes
= sizes
;
1305 dpci
.poolSizeCount
= ARRAY_SIZE(sizes
);
1306 dpci
.flags
= VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
;
1307 dpci
.maxSets
= 1000;
1309 if(vkCreateDescriptorPool(screen
->dev
, &dpci
, 0, &ctx
->descpool
) != VK_SUCCESS
)
1312 vkGetDeviceQueue(screen
->dev
, screen
->gfx_queue
, 0, &ctx
->queue
);
1314 ctx
->program_cache
= _mesa_hash_table_create(NULL
, hash_gfx_program
, equals_gfx_program
);
1315 if (!ctx
->program_cache
)
1318 ctx
->dirty
= ZINK_DIRTY_PROGRAM
;
1320 /* start the first batch */
1321 zink_start_cmdbuf(ctx
, zink_context_curr_batch(ctx
));
1327 vkDestroyCommandPool(screen
->dev
, ctx
->cmdpool
, NULL
);