2 * Copyright 2018 Collabora Ltd.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
24 #include "zink_context.h"
26 #include "zink_batch.h"
27 #include "zink_compiler.h"
28 #include "zink_fence.h"
29 #include "zink_framebuffer.h"
30 #include "zink_pipeline.h"
31 #include "zink_program.h"
32 #include "zink_render_pass.h"
33 #include "zink_resource.h"
34 #include "zink_screen.h"
35 #include "zink_state.h"
36 #include "zink_surface.h"
38 #include "indices/u_primconvert.h"
39 #include "util/u_blitter.h"
40 #include "util/u_debug.h"
41 #include "util/u_format.h"
42 #include "util/u_framebuffer.h"
43 #include "util/u_helpers.h"
44 #include "util/u_inlines.h"
48 #include "util/u_memory.h"
49 #include "util/u_prim.h"
50 #include "util/u_upload_mgr.h"
53 zink_context_destroy(struct pipe_context
*pctx
)
55 struct zink_context
*ctx
= zink_context(pctx
);
56 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
58 if (vkQueueWaitIdle(ctx
->queue
) != VK_SUCCESS
)
59 debug_printf("vkQueueWaitIdle failed\n");
61 for (int i
= 0; i
< ARRAY_SIZE(ctx
->batches
); ++i
)
62 vkFreeCommandBuffers(screen
->dev
, ctx
->cmdpool
, 1, &ctx
->batches
[i
].cmdbuf
);
63 vkDestroyCommandPool(screen
->dev
, ctx
->cmdpool
, NULL
);
65 util_primconvert_destroy(ctx
->primconvert
);
66 u_upload_destroy(pctx
->stream_uploader
);
67 slab_destroy_child(&ctx
->transfer_pool
);
68 util_blitter_destroy(ctx
->blitter
);
73 filter(enum pipe_tex_filter filter
)
76 case PIPE_TEX_FILTER_NEAREST
: return VK_FILTER_NEAREST
;
77 case PIPE_TEX_FILTER_LINEAR
: return VK_FILTER_LINEAR
;
79 unreachable("unexpected filter");
82 static VkSamplerMipmapMode
83 sampler_mipmap_mode(enum pipe_tex_mipfilter filter
)
86 case PIPE_TEX_MIPFILTER_NEAREST
: return VK_SAMPLER_MIPMAP_MODE_NEAREST
;
87 case PIPE_TEX_MIPFILTER_LINEAR
: return VK_SAMPLER_MIPMAP_MODE_LINEAR
;
88 case PIPE_TEX_MIPFILTER_NONE
:
89 unreachable("PIPE_TEX_MIPFILTER_NONE should be dealt with earlier");
91 unreachable("unexpected filter");
94 static VkSamplerAddressMode
95 sampler_address_mode(enum pipe_tex_wrap filter
)
98 case PIPE_TEX_WRAP_REPEAT
: return VK_SAMPLER_ADDRESS_MODE_REPEAT
;
99 case PIPE_TEX_WRAP_CLAMP
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
100 case PIPE_TEX_WRAP_CLAMP_TO_EDGE
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
;
101 case PIPE_TEX_WRAP_CLAMP_TO_BORDER
: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
;
102 case PIPE_TEX_WRAP_MIRROR_REPEAT
: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT
;
103 case PIPE_TEX_WRAP_MIRROR_CLAMP
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
104 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
;
105 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER
: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
; /* not technically correct, but kinda works */
107 unreachable("unexpected wrap");
111 compare_op(enum pipe_compare_func op
)
114 case PIPE_FUNC_NEVER
: return VK_COMPARE_OP_NEVER
;
115 case PIPE_FUNC_LESS
: return VK_COMPARE_OP_LESS
;
116 case PIPE_FUNC_EQUAL
: return VK_COMPARE_OP_EQUAL
;
117 case PIPE_FUNC_LEQUAL
: return VK_COMPARE_OP_LESS_OR_EQUAL
;
118 case PIPE_FUNC_GREATER
: return VK_COMPARE_OP_GREATER
;
119 case PIPE_FUNC_NOTEQUAL
: return VK_COMPARE_OP_NOT_EQUAL
;
120 case PIPE_FUNC_GEQUAL
: return VK_COMPARE_OP_GREATER_OR_EQUAL
;
121 case PIPE_FUNC_ALWAYS
: return VK_COMPARE_OP_ALWAYS
;
123 unreachable("unexpected compare");
127 zink_create_sampler_state(struct pipe_context
*pctx
,
128 const struct pipe_sampler_state
*state
)
130 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
132 VkSamplerCreateInfo sci
= {};
133 sci
.sType
= VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
;
134 sci
.magFilter
= filter(state
->mag_img_filter
);
135 sci
.minFilter
= filter(state
->min_img_filter
);
137 if (state
->min_mip_filter
!= PIPE_TEX_MIPFILTER_NONE
) {
138 sci
.mipmapMode
= sampler_mipmap_mode(state
->min_mip_filter
);
139 sci
.minLod
= state
->min_lod
;
140 sci
.maxLod
= state
->max_lod
;
142 sci
.mipmapMode
= VK_SAMPLER_MIPMAP_MODE_NEAREST
;
147 sci
.addressModeU
= sampler_address_mode(state
->wrap_s
);
148 sci
.addressModeV
= sampler_address_mode(state
->wrap_t
);
149 sci
.addressModeW
= sampler_address_mode(state
->wrap_r
);
150 sci
.mipLodBias
= state
->lod_bias
;
152 if (state
->compare_mode
== PIPE_TEX_COMPARE_NONE
)
153 sci
.compareOp
= VK_COMPARE_OP_NEVER
;
155 sci
.compareOp
= compare_op(state
->compare_func
);
157 sci
.borderColor
= VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK
; // TODO
158 sci
.unnormalizedCoordinates
= !state
->normalized_coords
;
160 if (state
->max_anisotropy
> 1) {
161 sci
.maxAnisotropy
= state
->max_anisotropy
;
162 sci
.anisotropyEnable
= VK_TRUE
;
165 VkSampler
*sampler
= CALLOC(1, sizeof(VkSampler
));
169 if (vkCreateSampler(screen
->dev
, &sci
, NULL
, sampler
) != VK_SUCCESS
) {
178 zink_bind_sampler_states(struct pipe_context
*pctx
,
179 enum pipe_shader_type shader
,
181 unsigned num_samplers
,
184 struct zink_context
*ctx
= zink_context(pctx
);
185 for (unsigned i
= 0; i
< num_samplers
; ++i
) {
186 VkSampler
*sampler
= samplers
[i
];
187 ctx
->samplers
[shader
][start_slot
+ i
] = sampler
? *sampler
: VK_NULL_HANDLE
;
189 ctx
->num_samplers
[shader
] = start_slot
+ num_samplers
;
193 zink_delete_sampler_state(struct pipe_context
*pctx
,
196 struct zink_batch
*batch
= zink_curr_batch(zink_context(pctx
));
197 util_dynarray_append(&batch
->zombie_samplers
, VkSampler
,
198 *(VkSampler
*)sampler_state
);
203 static VkImageViewType
204 image_view_type(enum pipe_texture_target target
)
207 case PIPE_TEXTURE_1D
: return VK_IMAGE_VIEW_TYPE_1D
;
208 case PIPE_TEXTURE_1D_ARRAY
: return VK_IMAGE_VIEW_TYPE_1D_ARRAY
;
209 case PIPE_TEXTURE_2D
: return VK_IMAGE_VIEW_TYPE_2D
;
210 case PIPE_TEXTURE_2D_ARRAY
: return VK_IMAGE_VIEW_TYPE_2D_ARRAY
;
211 case PIPE_TEXTURE_CUBE
: return VK_IMAGE_VIEW_TYPE_CUBE
;
212 case PIPE_TEXTURE_CUBE_ARRAY
: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
;
213 case PIPE_TEXTURE_3D
: return VK_IMAGE_VIEW_TYPE_3D
;
214 case PIPE_TEXTURE_RECT
: return VK_IMAGE_VIEW_TYPE_2D
; /* not sure */
216 unreachable("unexpected target");
220 static VkComponentSwizzle
221 component_mapping(enum pipe_swizzle swizzle
)
224 case PIPE_SWIZZLE_X
: return VK_COMPONENT_SWIZZLE_R
;
225 case PIPE_SWIZZLE_Y
: return VK_COMPONENT_SWIZZLE_G
;
226 case PIPE_SWIZZLE_Z
: return VK_COMPONENT_SWIZZLE_B
;
227 case PIPE_SWIZZLE_W
: return VK_COMPONENT_SWIZZLE_A
;
228 case PIPE_SWIZZLE_0
: return VK_COMPONENT_SWIZZLE_ZERO
;
229 case PIPE_SWIZZLE_1
: return VK_COMPONENT_SWIZZLE_ONE
;
230 case PIPE_SWIZZLE_NONE
: return VK_COMPONENT_SWIZZLE_IDENTITY
; // ???
232 unreachable("unexpected swizzle");
236 static VkImageAspectFlags
237 sampler_aspect_from_format(enum pipe_format fmt
)
239 if (util_format_is_depth_or_stencil(fmt
)) {
240 const struct util_format_description
*desc
= util_format_description(fmt
);
241 if (util_format_has_depth(desc
))
242 return VK_IMAGE_ASPECT_DEPTH_BIT
;
243 assert(util_format_has_stencil(desc
));
244 return VK_IMAGE_ASPECT_STENCIL_BIT
;
246 return VK_IMAGE_ASPECT_COLOR_BIT
;
249 static struct pipe_sampler_view
*
250 zink_create_sampler_view(struct pipe_context
*pctx
, struct pipe_resource
*pres
,
251 const struct pipe_sampler_view
*state
)
253 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
254 struct zink_resource
*res
= zink_resource(pres
);
255 struct zink_sampler_view
*sampler_view
= CALLOC_STRUCT(zink_sampler_view
);
257 sampler_view
->base
= *state
;
258 sampler_view
->base
.texture
= NULL
;
259 pipe_resource_reference(&sampler_view
->base
.texture
, pres
);
260 sampler_view
->base
.reference
.count
= 1;
261 sampler_view
->base
.context
= pctx
;
263 VkImageViewCreateInfo ivci
= {};
264 ivci
.sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
;
265 ivci
.image
= res
->image
;
266 ivci
.viewType
= image_view_type(state
->target
);
267 ivci
.format
= zink_get_format(screen
, state
->format
);
268 ivci
.components
.r
= component_mapping(state
->swizzle_r
);
269 ivci
.components
.g
= component_mapping(state
->swizzle_g
);
270 ivci
.components
.b
= component_mapping(state
->swizzle_b
);
271 ivci
.components
.a
= component_mapping(state
->swizzle_a
);
273 ivci
.subresourceRange
.aspectMask
= sampler_aspect_from_format(state
->format
);
274 ivci
.subresourceRange
.baseMipLevel
= state
->u
.tex
.first_level
;
275 ivci
.subresourceRange
.baseArrayLayer
= state
->u
.tex
.first_layer
;
276 ivci
.subresourceRange
.levelCount
= state
->u
.tex
.last_level
- state
->u
.tex
.first_level
+ 1;
277 ivci
.subresourceRange
.layerCount
= state
->u
.tex
.last_layer
- state
->u
.tex
.first_layer
+ 1;
279 VkResult err
= vkCreateImageView(screen
->dev
, &ivci
, NULL
, &sampler_view
->image_view
);
280 if (err
!= VK_SUCCESS
) {
285 return &sampler_view
->base
;
289 zink_sampler_view_destroy(struct pipe_context
*pctx
,
290 struct pipe_sampler_view
*pview
)
292 struct zink_sampler_view
*view
= zink_sampler_view(pview
);
293 vkDestroyImageView(zink_screen(pctx
->screen
)->dev
, view
->image_view
, NULL
);
298 zink_create_vs_state(struct pipe_context
*pctx
,
299 const struct pipe_shader_state
*shader
)
301 struct nir_shader
*nir
;
302 if (shader
->type
!= PIPE_SHADER_IR_NIR
)
303 nir
= zink_tgsi_to_nir(pctx
->screen
, shader
->tokens
);
305 nir
= (struct nir_shader
*)shader
->ir
.nir
;
307 return zink_compile_nir(zink_screen(pctx
->screen
), nir
);
311 bind_stage(struct zink_context
*ctx
, enum pipe_shader_type stage
,
312 struct zink_shader
*shader
)
314 assert(stage
< PIPE_SHADER_COMPUTE
);
315 ctx
->gfx_stages
[stage
] = shader
;
316 ctx
->dirty_program
= true;
320 zink_bind_vs_state(struct pipe_context
*pctx
,
323 bind_stage(zink_context(pctx
), PIPE_SHADER_VERTEX
, cso
);
327 zink_delete_vs_state(struct pipe_context
*pctx
,
330 zink_shader_free(zink_screen(pctx
->screen
), cso
);
334 zink_create_fs_state(struct pipe_context
*pctx
,
335 const struct pipe_shader_state
*shader
)
337 struct nir_shader
*nir
;
338 if (shader
->type
!= PIPE_SHADER_IR_NIR
)
339 nir
= zink_tgsi_to_nir(pctx
->screen
, shader
->tokens
);
341 nir
= (struct nir_shader
*)shader
->ir
.nir
;
343 return zink_compile_nir(zink_screen(pctx
->screen
), nir
);
347 zink_bind_fs_state(struct pipe_context
*pctx
,
350 bind_stage(zink_context(pctx
), PIPE_SHADER_FRAGMENT
, cso
);
354 zink_delete_fs_state(struct pipe_context
*pctx
,
357 zink_shader_free(zink_screen(pctx
->screen
), cso
);
361 zink_set_polygon_stipple(struct pipe_context
*pctx
,
362 const struct pipe_poly_stipple
*ps
)
367 zink_set_vertex_buffers(struct pipe_context
*pctx
,
369 unsigned num_buffers
,
370 const struct pipe_vertex_buffer
*buffers
)
372 struct zink_context
*ctx
= zink_context(pctx
);
375 for (int i
= 0; i
< num_buffers
; ++i
) {
376 const struct pipe_vertex_buffer
*vb
= buffers
+ i
;
377 ctx
->gfx_pipeline_state
.bindings
[start_slot
+ i
].stride
= vb
->stride
;
381 util_set_vertex_buffers_mask(ctx
->buffers
, &ctx
->buffers_enabled_mask
,
382 buffers
, start_slot
, num_buffers
);
386 zink_set_viewport_states(struct pipe_context
*pctx
,
388 unsigned num_viewports
,
389 const struct pipe_viewport_state
*state
)
391 struct zink_context
*ctx
= zink_context(pctx
);
393 for (unsigned i
= 0; i
< num_viewports
; ++i
) {
394 VkViewport viewport
= {
395 state
[i
].translate
[0] - state
[i
].scale
[0],
396 state
[i
].translate
[1] - state
[i
].scale
[1],
397 state
[i
].scale
[0] * 2,
398 state
[i
].scale
[1] * 2,
399 state
[i
].translate
[2] - state
[i
].scale
[2],
400 state
[i
].translate
[2] + state
[i
].scale
[2]
402 ctx
->viewport_states
[start_slot
+ i
] = state
[i
];
403 ctx
->viewports
[start_slot
+ i
] = viewport
;
405 ctx
->num_viewports
= start_slot
+ num_viewports
;
409 zink_set_scissor_states(struct pipe_context
*pctx
,
410 unsigned start_slot
, unsigned num_scissors
,
411 const struct pipe_scissor_state
*states
)
413 struct zink_context
*ctx
= zink_context(pctx
);
415 for (unsigned i
= 0; i
< num_scissors
; i
++) {
418 scissor
.offset
.x
= states
[i
].minx
;
419 scissor
.offset
.y
= states
[i
].miny
;
420 scissor
.extent
.width
= states
[i
].maxx
- states
[i
].minx
;
421 scissor
.extent
.height
= states
[i
].maxy
- states
[i
].miny
;
422 ctx
->scissor_states
[start_slot
+ i
] = states
[i
];
423 ctx
->scissors
[start_slot
+ i
] = scissor
;
428 zink_set_constant_buffer(struct pipe_context
*pctx
,
429 enum pipe_shader_type shader
, uint index
,
430 const struct pipe_constant_buffer
*cb
)
432 struct zink_context
*ctx
= zink_context(pctx
);
435 struct pipe_resource
*buffer
= cb
->buffer
;
436 unsigned offset
= cb
->buffer_offset
;
437 if (cb
->user_buffer
) {
438 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
439 u_upload_data(ctx
->base
.const_uploader
, 0, cb
->buffer_size
,
440 screen
->props
.limits
.minUniformBufferOffsetAlignment
,
441 cb
->user_buffer
, &offset
, &buffer
);
444 pipe_resource_reference(&ctx
->ubos
[shader
][index
].buffer
, buffer
);
445 ctx
->ubos
[shader
][index
].buffer_offset
= offset
;
446 ctx
->ubos
[shader
][index
].buffer_size
= cb
->buffer_size
;
447 ctx
->ubos
[shader
][index
].user_buffer
= NULL
;
450 pipe_resource_reference(&buffer
, NULL
);
452 pipe_resource_reference(&ctx
->ubos
[shader
][index
].buffer
, NULL
);
453 ctx
->ubos
[shader
][index
].buffer_offset
= 0;
454 ctx
->ubos
[shader
][index
].buffer_size
= 0;
455 ctx
->ubos
[shader
][index
].user_buffer
= NULL
;
460 zink_set_sampler_views(struct pipe_context
*pctx
,
461 enum pipe_shader_type shader_type
,
464 struct pipe_sampler_view
**views
)
466 struct zink_context
*ctx
= zink_context(pctx
);
468 for (unsigned i
= 0; i
< num_views
; ++i
) {
469 pipe_sampler_view_reference(
470 &ctx
->image_views
[shader_type
][start_slot
+ i
],
473 ctx
->num_image_views
[shader_type
] = start_slot
+ num_views
;
477 zink_set_stencil_ref(struct pipe_context
*pctx
,
478 const struct pipe_stencil_ref
*ref
)
480 struct zink_context
*ctx
= zink_context(pctx
);
481 ctx
->stencil_ref
= *ref
;
485 zink_set_clip_state(struct pipe_context
*pctx
,
486 const struct pipe_clip_state
*pcs
)
490 static struct zink_render_pass
*
491 get_render_pass(struct zink_context
*ctx
)
493 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
494 const struct pipe_framebuffer_state
*fb
= &ctx
->fb_state
;
495 struct zink_render_pass_state state
;
497 for (int i
= 0; i
< fb
->nr_cbufs
; i
++) {
498 struct pipe_resource
*res
= fb
->cbufs
[i
]->texture
;
499 state
.rts
[i
].format
= zink_get_format(screen
, fb
->cbufs
[i
]->format
);
500 state
.rts
[i
].samples
= res
->nr_samples
> 0 ? res
->nr_samples
:
501 VK_SAMPLE_COUNT_1_BIT
;
503 state
.num_cbufs
= fb
->nr_cbufs
;
506 struct zink_resource
*zsbuf
= zink_resource(fb
->zsbuf
->texture
);
507 state
.rts
[fb
->nr_cbufs
].format
= zsbuf
->format
;
508 state
.rts
[fb
->nr_cbufs
].samples
= zsbuf
->base
.nr_samples
> 0 ? zsbuf
->base
.nr_samples
: VK_SAMPLE_COUNT_1_BIT
;
510 state
.have_zsbuf
= fb
->zsbuf
!= NULL
;
512 struct hash_entry
*entry
= _mesa_hash_table_search(ctx
->render_pass_cache
,
515 struct zink_render_pass
*rp
;
516 rp
= zink_create_render_pass(screen
, &state
);
517 entry
= _mesa_hash_table_insert(ctx
->render_pass_cache
, &state
, rp
);
525 static struct zink_framebuffer
*
526 get_framebuffer(struct zink_context
*ctx
)
528 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
530 struct zink_framebuffer_state state
= {};
531 state
.rp
= get_render_pass(ctx
);
532 for (int i
= 0; i
< ctx
->fb_state
.nr_cbufs
; i
++) {
533 struct pipe_surface
*psurf
= ctx
->fb_state
.cbufs
[i
];
534 state
.attachments
[i
] = zink_surface(psurf
);
537 state
.num_attachments
= ctx
->fb_state
.nr_cbufs
;
538 if (ctx
->fb_state
.zsbuf
) {
539 struct pipe_surface
*psurf
= ctx
->fb_state
.zsbuf
;
540 state
.attachments
[state
.num_attachments
++] = zink_surface(psurf
);
543 state
.width
= ctx
->fb_state
.width
;
544 state
.height
= ctx
->fb_state
.height
;
545 state
.layers
= MAX2(ctx
->fb_state
.layers
, 1);
547 struct hash_entry
*entry
= _mesa_hash_table_search(ctx
->framebuffer_cache
,
550 struct zink_framebuffer
*fb
= zink_create_framebuffer(screen
, &state
);
551 entry
= _mesa_hash_table_insert(ctx
->framebuffer_cache
, &state
, fb
);
560 zink_begin_render_pass(struct zink_context
*ctx
, struct zink_batch
*batch
)
562 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
563 assert(batch
== zink_curr_batch(ctx
));
564 assert(ctx
->gfx_pipeline_state
.render_pass
);
566 struct pipe_framebuffer_state
*fb_state
= &ctx
->fb_state
;
568 VkRenderPassBeginInfo rpbi
= {};
569 rpbi
.sType
= VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
;
570 rpbi
.renderPass
= ctx
->gfx_pipeline_state
.render_pass
->render_pass
;
571 rpbi
.renderArea
.offset
.x
= 0;
572 rpbi
.renderArea
.offset
.y
= 0;
573 rpbi
.renderArea
.extent
.width
= fb_state
->width
;
574 rpbi
.renderArea
.extent
.height
= fb_state
->height
;
575 rpbi
.clearValueCount
= 0;
576 rpbi
.pClearValues
= NULL
;
577 rpbi
.framebuffer
= ctx
->framebuffer
->fb
;
579 assert(ctx
->gfx_pipeline_state
.render_pass
&& ctx
->framebuffer
);
580 assert(!batch
->rp
|| batch
->rp
== ctx
->gfx_pipeline_state
.render_pass
);
581 assert(!batch
->fb
|| batch
->fb
== ctx
->framebuffer
);
583 for (int i
= 0; i
< fb_state
->nr_cbufs
; i
++) {
584 struct zink_resource
*res
= zink_resource(fb_state
->cbufs
[i
]->texture
);
585 if (res
->layout
!= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
)
586 zink_resource_barrier(batch
->cmdbuf
, res
, res
->aspect
,
587 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
);
590 if (fb_state
->zsbuf
) {
591 struct zink_resource
*res
= zink_resource(fb_state
->zsbuf
->texture
);
592 if (res
->layout
!= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
)
593 zink_resource_barrier(batch
->cmdbuf
, res
, res
->aspect
,
594 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
);
597 zink_render_pass_reference(screen
, &batch
->rp
, ctx
->gfx_pipeline_state
.render_pass
);
598 zink_framebuffer_reference(screen
, &batch
->fb
, ctx
->framebuffer
);
600 vkCmdBeginRenderPass(batch
->cmdbuf
, &rpbi
, VK_SUBPASS_CONTENTS_INLINE
);
604 flush_batch(struct zink_context
*ctx
)
606 struct zink_batch
*batch
= zink_curr_batch(ctx
);
608 vkCmdEndRenderPass(batch
->cmdbuf
);
610 zink_end_batch(ctx
, batch
);
613 if (ctx
->curr_batch
== ARRAY_SIZE(ctx
->batches
))
616 zink_start_batch(ctx
, zink_curr_batch(ctx
));
620 zink_batch_rp(struct zink_context
*ctx
)
622 struct zink_batch
*batch
= zink_curr_batch(ctx
);
624 zink_begin_render_pass(ctx
, batch
);
631 zink_batch_no_rp(struct zink_context
*ctx
)
633 struct zink_batch
*batch
= zink_curr_batch(ctx
);
635 /* flush batch and get a new one */
637 batch
= zink_curr_batch(ctx
);
644 zink_set_framebuffer_state(struct pipe_context
*pctx
,
645 const struct pipe_framebuffer_state
*state
)
647 struct zink_context
*ctx
= zink_context(pctx
);
648 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
650 VkSampleCountFlagBits rast_samples
= VK_SAMPLE_COUNT_1_BIT
;
651 for (int i
= 0; i
< state
->nr_cbufs
; i
++)
652 rast_samples
= MAX2(rast_samples
, state
->cbufs
[i
]->texture
->nr_samples
);
653 if (state
->zsbuf
&& state
->zsbuf
->texture
->nr_samples
)
654 rast_samples
= MAX2(rast_samples
, state
->zsbuf
->texture
->nr_samples
);
656 util_copy_framebuffer_state(&ctx
->fb_state
, state
);
658 struct zink_framebuffer
*fb
= get_framebuffer(ctx
);
659 zink_framebuffer_reference(screen
, &ctx
->framebuffer
, fb
);
660 zink_render_pass_reference(screen
, &ctx
->gfx_pipeline_state
.render_pass
, fb
->rp
);
662 ctx
->gfx_pipeline_state
.rast_samples
= rast_samples
;
663 ctx
->gfx_pipeline_state
.num_attachments
= state
->nr_cbufs
;
665 struct zink_batch
*batch
= zink_batch_no_rp(ctx
);
667 for (int i
= 0; i
< state
->nr_cbufs
; i
++) {
668 struct zink_resource
*res
= zink_resource(state
->cbufs
[i
]->texture
);
669 if (res
->layout
!= VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
)
670 zink_resource_barrier(batch
->cmdbuf
, res
, res
->aspect
,
671 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
);
675 struct zink_resource
*res
= zink_resource(state
->zsbuf
->texture
);
676 if (res
->layout
!= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
)
677 zink_resource_barrier(batch
->cmdbuf
, res
, res
->aspect
,
678 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
);
683 zink_set_blend_color(struct pipe_context
*pctx
,
684 const struct pipe_blend_color
*color
)
686 struct zink_context
*ctx
= zink_context(pctx
);
687 memcpy(ctx
->blend_constants
, color
->color
, sizeof(float) * 4);
691 zink_set_sample_mask(struct pipe_context
*pctx
, unsigned sample_mask
)
693 struct zink_context
*ctx
= zink_context(pctx
);
694 ctx
->gfx_pipeline_state
.sample_mask
= sample_mask
;
698 access_src_flags(VkImageLayout layout
)
701 case VK_IMAGE_LAYOUT_UNDEFINED
:
702 case VK_IMAGE_LAYOUT_GENERAL
:
705 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
:
706 return VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
;
707 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
:
708 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
;
710 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
:
711 return VK_ACCESS_SHADER_READ_BIT
;
713 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
:
714 return VK_ACCESS_TRANSFER_READ_BIT
;
716 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
:
717 return VK_ACCESS_TRANSFER_WRITE_BIT
;
719 case VK_IMAGE_LAYOUT_PREINITIALIZED
:
720 return VK_ACCESS_HOST_WRITE_BIT
;
723 unreachable("unexpected layout");
728 access_dst_flags(VkImageLayout layout
)
731 case VK_IMAGE_LAYOUT_UNDEFINED
:
732 case VK_IMAGE_LAYOUT_GENERAL
:
735 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
:
736 return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
;
737 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
:
738 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
;
740 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
:
741 return VK_ACCESS_TRANSFER_READ_BIT
;
743 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
:
744 return VK_ACCESS_TRANSFER_WRITE_BIT
;
747 unreachable("unexpected layout");
751 static VkPipelineStageFlags
752 pipeline_dst_stage(VkImageLayout layout
)
755 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
:
756 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
;
757 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
:
758 return VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
;
760 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
:
761 return VK_PIPELINE_STAGE_TRANSFER_BIT
;
762 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
:
763 return VK_PIPELINE_STAGE_TRANSFER_BIT
;
766 return VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
;
770 static VkPipelineStageFlags
771 pipeline_src_stage(VkImageLayout layout
)
774 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
:
775 return VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
;
776 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
:
777 return VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
;
779 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
:
780 return VK_PIPELINE_STAGE_TRANSFER_BIT
;
781 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
:
782 return VK_PIPELINE_STAGE_TRANSFER_BIT
;
785 return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
;
791 zink_resource_barrier(VkCommandBuffer cmdbuf
, struct zink_resource
*res
,
792 VkImageAspectFlags aspect
, VkImageLayout new_layout
)
794 VkImageSubresourceRange isr
= {
796 0, VK_REMAINING_MIP_LEVELS
,
797 0, VK_REMAINING_ARRAY_LAYERS
800 VkImageMemoryBarrier imb
= {
801 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER
,
803 access_src_flags(res
->layout
),
804 access_dst_flags(new_layout
),
807 VK_QUEUE_FAMILY_IGNORED
,
808 VK_QUEUE_FAMILY_IGNORED
,
812 vkCmdPipelineBarrier(
814 pipeline_src_stage(res
->layout
),
815 pipeline_dst_stage(new_layout
),
822 res
->layout
= new_layout
;
826 zink_clear(struct pipe_context
*pctx
,
828 const union pipe_color_union
*pcolor
,
829 double depth
, unsigned stencil
)
831 struct zink_context
*ctx
= zink_context(pctx
);
832 struct pipe_framebuffer_state
*fb
= &ctx
->fb_state
;
834 /* FIXME: this is very inefficient; if no renderpass has been started yet,
835 * we should record the clear if it's full-screen, and apply it as we
836 * start the render-pass. Otherwise we can do a partial out-of-renderpass
839 struct zink_batch
*batch
= zink_batch_rp(ctx
);
841 VkClearAttachment attachments
[1 + PIPE_MAX_COLOR_BUFS
];
842 int num_attachments
= 0;
844 if (buffers
& PIPE_CLEAR_COLOR
) {
845 VkClearColorValue color
;
846 color
.float32
[0] = pcolor
->f
[0];
847 color
.float32
[1] = pcolor
->f
[1];
848 color
.float32
[2] = pcolor
->f
[2];
849 color
.float32
[3] = pcolor
->f
[3];
851 for (unsigned i
= 0; i
< fb
->nr_cbufs
; i
++) {
852 if (!(buffers
& (PIPE_CLEAR_COLOR0
<< i
)) || !fb
->cbufs
[i
])
855 attachments
[num_attachments
].aspectMask
= VK_IMAGE_ASPECT_COLOR_BIT
;
856 attachments
[num_attachments
].colorAttachment
= i
;
857 attachments
[num_attachments
].clearValue
.color
= color
;
862 if (buffers
& PIPE_CLEAR_DEPTHSTENCIL
&& fb
->zsbuf
) {
863 VkImageAspectFlags aspect
= 0;
864 if (buffers
& PIPE_CLEAR_DEPTH
)
865 aspect
|= VK_IMAGE_ASPECT_DEPTH_BIT
;
866 if (buffers
& PIPE_CLEAR_STENCIL
)
867 aspect
|= VK_IMAGE_ASPECT_STENCIL_BIT
;
869 attachments
[num_attachments
].aspectMask
= aspect
;
870 attachments
[num_attachments
].clearValue
.depthStencil
.depth
= depth
;
871 attachments
[num_attachments
].clearValue
.depthStencil
.stencil
= stencil
;
876 cr
.rect
.offset
.x
= 0;
877 cr
.rect
.offset
.y
= 0;
878 cr
.rect
.extent
.width
= fb
->width
;
879 cr
.rect
.extent
.height
= fb
->height
;
880 cr
.baseArrayLayer
= 0;
881 cr
.layerCount
= util_framebuffer_get_num_layers(fb
);
882 vkCmdClearAttachments(batch
->cmdbuf
, num_attachments
, attachments
, 1, &cr
);
885 VkShaderStageFlagBits
886 zink_shader_stage(enum pipe_shader_type type
)
888 VkShaderStageFlagBits stages
[] = {
889 [PIPE_SHADER_VERTEX
] = VK_SHADER_STAGE_VERTEX_BIT
,
890 [PIPE_SHADER_FRAGMENT
] = VK_SHADER_STAGE_FRAGMENT_BIT
,
891 [PIPE_SHADER_GEOMETRY
] = VK_SHADER_STAGE_GEOMETRY_BIT
,
892 [PIPE_SHADER_TESS_CTRL
] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT
,
893 [PIPE_SHADER_TESS_EVAL
] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT
,
894 [PIPE_SHADER_COMPUTE
] = VK_SHADER_STAGE_COMPUTE_BIT
,
899 static VkDescriptorSet
900 allocate_descriptor_set(struct zink_screen
*screen
,
901 struct zink_batch
*batch
,
902 struct zink_gfx_program
*prog
)
904 assert(batch
->descs_left
>= prog
->num_descriptors
);
905 VkDescriptorSetAllocateInfo dsai
;
906 memset((void *)&dsai
, 0, sizeof(dsai
));
907 dsai
.sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
;
909 dsai
.descriptorPool
= batch
->descpool
;
910 dsai
.descriptorSetCount
= 1;
911 dsai
.pSetLayouts
= &prog
->dsl
;
913 VkDescriptorSet desc_set
;
914 if (vkAllocateDescriptorSets(screen
->dev
, &dsai
, &desc_set
) != VK_SUCCESS
) {
915 debug_printf("ZINK: failed to allocate descriptor set :/");
916 return VK_NULL_HANDLE
;
919 batch
->descs_left
-= prog
->num_descriptors
;
924 zink_bind_vertex_buffers(struct zink_batch
*batch
, struct zink_context
*ctx
)
926 VkBuffer buffers
[PIPE_MAX_ATTRIBS
];
927 VkDeviceSize buffer_offsets
[PIPE_MAX_ATTRIBS
];
928 const struct zink_vertex_elements_state
*elems
= ctx
->element_state
;
929 for (unsigned i
= 0; i
< elems
->hw_state
.num_bindings
; i
++) {
930 struct pipe_vertex_buffer
*vb
= ctx
->buffers
+ ctx
->element_state
->binding_map
[i
];
931 assert(vb
&& vb
->buffer
.resource
);
932 struct zink_resource
*res
= zink_resource(vb
->buffer
.resource
);
933 buffers
[i
] = res
->buffer
;
934 buffer_offsets
[i
] = vb
->buffer_offset
;
935 zink_batch_reference_resoure(batch
, res
);
938 if (elems
->hw_state
.num_bindings
> 0)
939 vkCmdBindVertexBuffers(batch
->cmdbuf
, 0,
940 elems
->hw_state
.num_bindings
,
941 buffers
, buffer_offsets
);
945 hash_gfx_program(const void *key
)
947 return _mesa_hash_data(key
, sizeof(struct zink_shader
*) * (PIPE_SHADER_TYPES
- 1));
951 equals_gfx_program(const void *a
, const void *b
)
953 return memcmp(a
, b
, sizeof(struct zink_shader
*) * (PIPE_SHADER_TYPES
- 1)) == 0;
957 hash_render_pass_state(const void *key
)
959 return _mesa_hash_data(key
, sizeof(struct zink_render_pass_state
));
963 equals_render_pass_state(const void *a
, const void *b
)
965 return memcmp(a
, b
, sizeof(struct zink_render_pass_state
)) == 0;
969 hash_framebuffer_state(const void *key
)
971 struct zink_framebuffer_state
*s
= (struct zink_framebuffer_state
*)key
;
972 return _mesa_hash_data(key
, sizeof(struct zink_framebuffer_state
) + sizeof(s
->attachments
) * s
->num_attachments
);
976 equals_framebuffer_state(const void *a
, const void *b
)
978 struct zink_framebuffer_state
*s
= (struct zink_framebuffer_state
*)a
;
979 return memcmp(a
, b
, sizeof(struct zink_framebuffer_state
) + sizeof(s
->attachments
) * s
->num_attachments
) == 0;
982 static struct zink_gfx_program
*
983 get_gfx_program(struct zink_context
*ctx
)
985 if (ctx
->dirty_program
) {
986 struct hash_entry
*entry
= _mesa_hash_table_search(ctx
->program_cache
,
989 struct zink_gfx_program
*prog
;
990 prog
= zink_create_gfx_program(zink_screen(ctx
->base
.screen
),
992 entry
= _mesa_hash_table_insert(ctx
->program_cache
, prog
->stages
, prog
);
996 ctx
->curr_program
= entry
->data
;
997 ctx
->dirty_program
= false;
1000 assert(ctx
->curr_program
);
1001 return ctx
->curr_program
;
1005 line_width_needed(enum pipe_prim_type reduced_prim
,
1006 VkPolygonMode polygon_mode
)
1008 switch (reduced_prim
) {
1009 case PIPE_PRIM_POINTS
:
1012 case PIPE_PRIM_LINES
:
1015 case PIPE_PRIM_TRIANGLES
:
1016 return polygon_mode
== VK_POLYGON_MODE_LINE
;
1019 unreachable("unexpected reduced prim");
1024 zink_draw_vbo(struct pipe_context
*pctx
,
1025 const struct pipe_draw_info
*dinfo
)
1027 struct zink_context
*ctx
= zink_context(pctx
);
1028 struct zink_screen
*screen
= zink_screen(pctx
->screen
);
1029 struct zink_rasterizer_state
*rast_state
= ctx
->rast_state
;
1031 if (dinfo
->mode
>= PIPE_PRIM_QUADS
||
1032 dinfo
->mode
== PIPE_PRIM_LINE_LOOP
||
1033 dinfo
->index_size
== 1) {
1034 if (!u_trim_pipe_prim(dinfo
->mode
, (unsigned *)&dinfo
->count
))
1037 util_primconvert_save_rasterizer_state(ctx
->primconvert
, &rast_state
->base
);
1038 util_primconvert_draw_vbo(ctx
->primconvert
, dinfo
);
1042 struct zink_gfx_program
*gfx_program
= get_gfx_program(ctx
);
1046 VkPipeline pipeline
= zink_get_gfx_pipeline(screen
, gfx_program
,
1047 &ctx
->gfx_pipeline_state
,
1050 enum pipe_prim_type reduced_prim
= u_reduced_prim(dinfo
->mode
);
1052 bool depth_bias
= false;
1053 switch (reduced_prim
) {
1054 case PIPE_PRIM_POINTS
:
1055 depth_bias
= rast_state
->offset_point
;
1058 case PIPE_PRIM_LINES
:
1059 depth_bias
= rast_state
->offset_line
;
1062 case PIPE_PRIM_TRIANGLES
:
1063 depth_bias
= rast_state
->offset_tri
;
1067 unreachable("unexpected reduced prim");
1070 unsigned index_offset
= 0;
1071 struct pipe_resource
*index_buffer
= NULL
;
1072 if (dinfo
->index_size
> 0) {
1073 if (dinfo
->has_user_indices
) {
1074 if (!util_upload_index_buffer(pctx
, dinfo
, &index_buffer
, &index_offset
)) {
1075 debug_printf("util_upload_index_buffer() failed\n");
1079 index_buffer
= dinfo
->index
.resource
;
1082 VkWriteDescriptorSet wds
[PIPE_SHADER_TYPES
* PIPE_MAX_CONSTANT_BUFFERS
+ PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
1083 VkDescriptorBufferInfo buffer_infos
[PIPE_SHADER_TYPES
* PIPE_MAX_CONSTANT_BUFFERS
];
1084 VkDescriptorImageInfo image_infos
[PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
1085 int num_wds
= 0, num_buffer_info
= 0, num_image_info
= 0;
1087 struct zink_resource
*transitions
[PIPE_SHADER_TYPES
* PIPE_MAX_SHADER_SAMPLER_VIEWS
];
1088 int num_transitions
= 0;
1090 for (int i
= 0; i
< ARRAY_SIZE(ctx
->gfx_stages
); i
++) {
1091 struct zink_shader
*shader
= ctx
->gfx_stages
[i
];
1095 for (int j
= 0; j
< shader
->num_bindings
; j
++) {
1096 int index
= shader
->bindings
[j
].index
;
1097 if (shader
->bindings
[j
].type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
) {
1098 assert(ctx
->ubos
[i
][index
].buffer_size
> 0);
1099 assert(ctx
->ubos
[i
][index
].buffer_size
<= screen
->props
.limits
.maxUniformBufferRange
);
1100 assert(ctx
->ubos
[i
][index
].buffer
);
1101 struct zink_resource
*res
= zink_resource(ctx
->ubos
[i
][index
].buffer
);
1102 buffer_infos
[num_buffer_info
].buffer
= res
->buffer
;
1103 buffer_infos
[num_buffer_info
].offset
= ctx
->ubos
[i
][index
].buffer_offset
;
1104 buffer_infos
[num_buffer_info
].range
= ctx
->ubos
[i
][index
].buffer_size
;
1105 wds
[num_wds
].pBufferInfo
= buffer_infos
+ num_buffer_info
;
1108 struct pipe_sampler_view
*psampler_view
= ctx
->image_views
[i
][index
];
1109 assert(psampler_view
);
1110 struct zink_sampler_view
*sampler_view
= zink_sampler_view(psampler_view
);
1112 struct zink_resource
*res
= zink_resource(psampler_view
->texture
);
1113 VkImageLayout layout
= res
->layout
;
1114 if (layout
!= VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
&&
1115 layout
!= VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
&&
1116 layout
!= VK_IMAGE_LAYOUT_GENERAL
) {
1117 transitions
[num_transitions
++] = res
;
1118 layout
= VK_IMAGE_LAYOUT_GENERAL
;
1120 image_infos
[num_image_info
].imageLayout
= layout
;
1121 image_infos
[num_image_info
].imageView
= sampler_view
->image_view
;
1122 image_infos
[num_image_info
].sampler
= ctx
->samplers
[i
][index
];
1123 wds
[num_wds
].pImageInfo
= image_infos
+ num_image_info
;
1127 wds
[num_wds
].sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
;
1128 wds
[num_wds
].pNext
= NULL
;
1129 wds
[num_wds
].dstBinding
= shader
->bindings
[j
].binding
;
1130 wds
[num_wds
].dstArrayElement
= 0;
1131 wds
[num_wds
].descriptorCount
= 1;
1132 wds
[num_wds
].descriptorType
= shader
->bindings
[j
].type
;
1137 struct zink_batch
*batch
;
1138 if (num_transitions
> 0) {
1139 batch
= zink_batch_no_rp(ctx
);
1141 for (int i
= 0; i
< num_transitions
; ++i
)
1142 zink_resource_barrier(batch
->cmdbuf
, transitions
[i
],
1143 transitions
[i
]->aspect
,
1144 VK_IMAGE_LAYOUT_GENERAL
);
1147 batch
= zink_batch_rp(ctx
);
1149 if (batch
->descs_left
< gfx_program
->num_descriptors
) {
1151 batch
= zink_batch_rp(ctx
);
1152 assert(batch
->descs_left
>= gfx_program
->num_descriptors
);
1155 VkDescriptorSet desc_set
= allocate_descriptor_set(screen
, batch
,
1157 assert(desc_set
!= VK_NULL_HANDLE
);
1159 for (int i
= 0; i
< ARRAY_SIZE(ctx
->gfx_stages
); i
++) {
1160 struct zink_shader
*shader
= ctx
->gfx_stages
[i
];
1164 for (int j
= 0; j
< shader
->num_bindings
; j
++) {
1165 int index
= shader
->bindings
[j
].index
;
1166 if (shader
->bindings
[j
].type
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
) {
1167 struct zink_resource
*res
= zink_resource(ctx
->ubos
[i
][index
].buffer
);
1168 zink_batch_reference_resoure(batch
, res
);
1170 struct zink_sampler_view
*sampler_view
= zink_sampler_view(ctx
->image_views
[i
][index
]);
1171 zink_batch_reference_sampler_view(batch
, sampler_view
);
1176 vkCmdSetViewport(batch
->cmdbuf
, 0, ctx
->num_viewports
, ctx
->viewports
);
1177 if (ctx
->rast_state
->base
.scissor
)
1178 vkCmdSetScissor(batch
->cmdbuf
, 0, ctx
->num_viewports
, ctx
->scissors
);
1179 else if (ctx
->fb_state
.width
&& ctx
->fb_state
.height
) {
1180 VkRect2D fb_scissor
= {};
1181 fb_scissor
.extent
.width
= ctx
->fb_state
.width
;
1182 fb_scissor
.extent
.height
= ctx
->fb_state
.height
;
1183 vkCmdSetScissor(batch
->cmdbuf
, 0, 1, &fb_scissor
);
1186 if (line_width_needed(reduced_prim
, rast_state
->hw_state
.polygon_mode
)) {
1187 if (screen
->feats
.wideLines
|| ctx
->line_width
== 1.0f
)
1188 vkCmdSetLineWidth(batch
->cmdbuf
, ctx
->line_width
);
1190 debug_printf("BUG: wide lines not supported, needs fallback!");
1193 vkCmdSetStencilReference(batch
->cmdbuf
, VK_STENCIL_FACE_FRONT_BIT
, ctx
->stencil_ref
.ref_value
[0]);
1194 vkCmdSetStencilReference(batch
->cmdbuf
, VK_STENCIL_FACE_BACK_BIT
, ctx
->stencil_ref
.ref_value
[1]);
1197 vkCmdSetDepthBias(batch
->cmdbuf
, rast_state
->offset_units
, rast_state
->offset_clamp
, rast_state
->offset_scale
);
1199 vkCmdSetDepthBias(batch
->cmdbuf
, 0.0f
, 0.0f
, 0.0f
);
1201 if (ctx
->gfx_pipeline_state
.blend_state
->need_blend_constants
)
1202 vkCmdSetBlendConstants(batch
->cmdbuf
, ctx
->blend_constants
);
1205 for (int i
= 0; i
< num_wds
; ++i
)
1206 wds
[i
].dstSet
= desc_set
;
1207 vkUpdateDescriptorSets(screen
->dev
, num_wds
, wds
, 0, NULL
);
1210 vkCmdBindPipeline(batch
->cmdbuf
, VK_PIPELINE_BIND_POINT_GRAPHICS
, pipeline
);
1211 vkCmdBindDescriptorSets(batch
->cmdbuf
, VK_PIPELINE_BIND_POINT_GRAPHICS
,
1212 gfx_program
->layout
, 0, 1, &desc_set
, 0, NULL
);
1213 zink_bind_vertex_buffers(batch
, ctx
);
1215 if (dinfo
->index_size
> 0) {
1216 assert(dinfo
->index_size
!= 1);
1217 VkIndexType index_type
= dinfo
->index_size
== 2 ? VK_INDEX_TYPE_UINT16
: VK_INDEX_TYPE_UINT32
;
1218 struct zink_resource
*res
= zink_resource(index_buffer
);
1219 vkCmdBindIndexBuffer(batch
->cmdbuf
, res
->buffer
, index_offset
, index_type
);
1220 zink_batch_reference_resoure(batch
, res
);
1221 vkCmdDrawIndexed(batch
->cmdbuf
,
1222 dinfo
->count
, dinfo
->instance_count
,
1223 dinfo
->start
, dinfo
->index_bias
, dinfo
->start_instance
);
1225 vkCmdDraw(batch
->cmdbuf
, dinfo
->count
, dinfo
->instance_count
, dinfo
->start
, dinfo
->start_instance
);
1227 if (dinfo
->index_size
> 0 && dinfo
->has_user_indices
)
1228 pipe_resource_reference(&index_buffer
, NULL
);
1232 zink_flush(struct pipe_context
*pctx
,
1233 struct pipe_fence_handle
**pfence
,
1234 enum pipe_flush_flags flags
)
1236 struct zink_context
*ctx
= zink_context(pctx
);
1238 struct zink_batch
*batch
= zink_curr_batch(ctx
);
1242 zink_fence_reference(zink_screen(pctx
->screen
),
1243 (struct zink_fence
**)pfence
,
1247 * For some strange reason, we need to finish before presenting, or else
1248 * we start rendering on top of the back-buffer for the next frame. This
1249 * seems like a bug in the DRI-driver to me, because we really should
1250 * be properly protected by fences here, and the back-buffer should
1251 * either be swapped with the front-buffer, or blitted from. But for
1252 * some strange reason, neither of these things happen.
1254 if (flags
& PIPE_FLUSH_END_OF_FRAME
)
1255 pctx
->screen
->fence_finish(pctx
->screen
, pctx
,
1256 (struct pipe_fence_handle
*)batch
->fence
,
1257 PIPE_TIMEOUT_INFINITE
);
1261 blit_resolve(struct zink_context
*ctx
, const struct pipe_blit_info
*info
)
1263 if (info
->mask
!= PIPE_MASK_RGBA
||
1264 info
->scissor_enable
||
1268 struct zink_resource
*src
= zink_resource(info
->src
.resource
);
1269 struct zink_resource
*dst
= zink_resource(info
->dst
.resource
);
1271 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
1272 if (src
->format
!= zink_get_format(screen
, info
->src
.format
) ||
1273 dst
->format
!= zink_get_format(screen
, info
->dst
.format
))
1276 struct zink_batch
*batch
= zink_batch_no_rp(ctx
);
1278 zink_batch_reference_resoure(batch
, src
);
1279 zink_batch_reference_resoure(batch
, dst
);
1281 if (src
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
)
1282 zink_resource_barrier(batch
->cmdbuf
, src
, src
->aspect
,
1283 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
);
1285 if (dst
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
)
1286 zink_resource_barrier(batch
->cmdbuf
, dst
, dst
->aspect
,
1287 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
);
1289 VkImageResolve region
= {};
1291 region
.srcSubresource
.aspectMask
= src
->aspect
;
1292 region
.srcSubresource
.mipLevel
= info
->src
.level
;
1293 region
.srcSubresource
.baseArrayLayer
= 0; // no clue
1294 region
.srcSubresource
.layerCount
= 1; // no clue
1295 region
.srcOffset
.x
= info
->src
.box
.x
;
1296 region
.srcOffset
.y
= info
->src
.box
.y
;
1297 region
.srcOffset
.z
= info
->src
.box
.z
;
1299 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1300 region
.dstSubresource
.mipLevel
= info
->dst
.level
;
1301 region
.dstSubresource
.baseArrayLayer
= 0; // no clue
1302 region
.dstSubresource
.layerCount
= 1; // no clue
1303 region
.dstOffset
.x
= info
->dst
.box
.x
;
1304 region
.dstOffset
.y
= info
->dst
.box
.y
;
1305 region
.dstOffset
.z
= info
->dst
.box
.z
;
1307 region
.extent
.width
= info
->dst
.box
.width
;
1308 region
.extent
.height
= info
->dst
.box
.height
;
1309 region
.extent
.depth
= info
->dst
.box
.depth
;
1310 vkCmdResolveImage(batch
->cmdbuf
, src
->image
, src
->layout
,
1311 dst
->image
, dst
->layout
,
1314 /* HACK: I have no idea why this is needed, but without it ioquake3
1315 * randomly keeps fading to black.
1323 blit_native(struct zink_context
*ctx
, const struct pipe_blit_info
*info
)
1325 if (info
->mask
!= PIPE_MASK_RGBA
||
1326 info
->scissor_enable
||
1330 struct zink_resource
*src
= zink_resource(info
->src
.resource
);
1331 struct zink_resource
*dst
= zink_resource(info
->dst
.resource
);
1333 struct zink_screen
*screen
= zink_screen(ctx
->base
.screen
);
1334 if (src
->format
!= zink_get_format(screen
, info
->src
.format
) ||
1335 dst
->format
!= zink_get_format(screen
, info
->dst
.format
))
1338 struct zink_batch
*batch
= zink_batch_no_rp(ctx
);
1339 zink_batch_reference_resoure(batch
, src
);
1340 zink_batch_reference_resoure(batch
, dst
);
1342 if (src
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
)
1343 zink_resource_barrier(batch
->cmdbuf
, src
, src
->aspect
,
1344 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
);
1346 if (dst
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
)
1347 zink_resource_barrier(batch
->cmdbuf
, dst
, dst
->aspect
,
1348 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
);
1350 VkImageBlit region
= {};
1351 region
.srcSubresource
.aspectMask
= src
->aspect
;
1352 region
.srcSubresource
.mipLevel
= info
->src
.level
;
1353 region
.srcOffsets
[0].x
= info
->src
.box
.x
;
1354 region
.srcOffsets
[0].y
= info
->src
.box
.y
;
1355 region
.srcOffsets
[1].x
= info
->src
.box
.x
+ info
->src
.box
.width
;
1356 region
.srcOffsets
[1].y
= info
->src
.box
.y
+ info
->src
.box
.height
;
1358 if (src
->base
.array_size
> 1) {
1359 region
.srcOffsets
[0].z
= 0;
1360 region
.srcOffsets
[1].z
= 1;
1361 region
.srcSubresource
.baseArrayLayer
= info
->src
.box
.z
;
1362 region
.srcSubresource
.layerCount
= info
->src
.box
.depth
;
1364 region
.srcOffsets
[0].z
= info
->src
.box
.z
;
1365 region
.srcOffsets
[1].z
= info
->src
.box
.z
+ info
->src
.box
.depth
;
1366 region
.srcSubresource
.baseArrayLayer
= 0;
1367 region
.srcSubresource
.layerCount
= 1;
1370 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1371 region
.dstSubresource
.mipLevel
= info
->dst
.level
;
1372 region
.dstOffsets
[0].x
= info
->dst
.box
.x
;
1373 region
.dstOffsets
[0].y
= info
->dst
.box
.y
;
1374 region
.dstOffsets
[1].x
= info
->dst
.box
.x
+ info
->dst
.box
.width
;
1375 region
.dstOffsets
[1].y
= info
->dst
.box
.y
+ info
->dst
.box
.height
;
1377 if (dst
->base
.array_size
> 1) {
1378 region
.dstOffsets
[0].z
= 0;
1379 region
.dstOffsets
[1].z
= 1;
1380 region
.dstSubresource
.baseArrayLayer
= info
->dst
.box
.z
;
1381 region
.dstSubresource
.layerCount
= info
->dst
.box
.depth
;
1383 region
.dstOffsets
[0].z
= info
->dst
.box
.z
;
1384 region
.dstOffsets
[1].z
= info
->dst
.box
.z
+ info
->dst
.box
.depth
;
1385 region
.dstSubresource
.baseArrayLayer
= 0;
1386 region
.dstSubresource
.layerCount
= 1;
1389 vkCmdBlitImage(batch
->cmdbuf
, src
->image
, src
->layout
,
1390 dst
->image
, dst
->layout
,
1392 filter(info
->filter
));
1394 /* HACK: I have no idea why this is needed, but without it ioquake3
1395 * randomly keeps fading to black.
1403 zink_blit(struct pipe_context
*pctx
,
1404 const struct pipe_blit_info
*info
)
1406 struct zink_context
*ctx
= zink_context(pctx
);
1407 if (info
->src
.resource
->nr_samples
> 1 &&
1408 info
->dst
.resource
->nr_samples
<= 1) {
1409 if (blit_resolve(ctx
, info
))
1412 if (blit_native(ctx
, info
))
1416 if (!util_blitter_is_blit_supported(ctx
->blitter
, info
)) {
1417 debug_printf("blit unsupported %s -> %s\n",
1418 util_format_short_name(info
->src
.resource
->format
),
1419 util_format_short_name(info
->dst
.resource
->format
));
1423 util_blitter_save_blend(ctx
->blitter
, ctx
->gfx_pipeline_state
.blend_state
);
1424 util_blitter_save_depth_stencil_alpha(ctx
->blitter
, ctx
->gfx_pipeline_state
.depth_stencil_alpha_state
);
1425 util_blitter_save_vertex_elements(ctx
->blitter
, ctx
->element_state
);
1426 util_blitter_save_stencil_ref(ctx
->blitter
, &ctx
->stencil_ref
);
1427 util_blitter_save_rasterizer(ctx
->blitter
, ctx
->rast_state
);
1428 util_blitter_save_fragment_shader(ctx
->blitter
, ctx
->gfx_stages
[PIPE_SHADER_FRAGMENT
]);
1429 util_blitter_save_vertex_shader(ctx
->blitter
, ctx
->gfx_stages
[PIPE_SHADER_VERTEX
]);
1430 util_blitter_save_framebuffer(ctx
->blitter
, &ctx
->fb_state
);
1431 util_blitter_save_viewport(ctx
->blitter
, ctx
->viewport_states
);
1432 util_blitter_save_scissor(ctx
->blitter
, ctx
->scissor_states
);
1433 util_blitter_save_fragment_sampler_states(ctx
->blitter
,
1434 ctx
->num_samplers
[PIPE_SHADER_FRAGMENT
],
1435 (void **)ctx
->samplers
[PIPE_SHADER_FRAGMENT
]);
1436 util_blitter_save_fragment_sampler_views(ctx
->blitter
,
1437 ctx
->num_image_views
[PIPE_SHADER_FRAGMENT
],
1438 ctx
->image_views
[PIPE_SHADER_FRAGMENT
]);
1439 util_blitter_save_fragment_constant_buffer_slot(ctx
->blitter
, ctx
->ubos
[PIPE_SHADER_FRAGMENT
]);
1440 util_blitter_save_vertex_buffer_slot(ctx
->blitter
, ctx
->buffers
);
1441 util_blitter_save_sample_mask(ctx
->blitter
, ctx
->gfx_pipeline_state
.sample_mask
);
1443 util_blitter_blit(ctx
->blitter
, info
);
1447 zink_flush_resource(struct pipe_context
*pipe
,
1448 struct pipe_resource
*resource
)
1453 zink_resource_copy_region(struct pipe_context
*pctx
,
1454 struct pipe_resource
*pdst
,
1455 unsigned dst_level
, unsigned dstx
, unsigned dsty
, unsigned dstz
,
1456 struct pipe_resource
*psrc
,
1457 unsigned src_level
, const struct pipe_box
*src_box
)
1459 struct zink_resource
*dst
= zink_resource(pdst
);
1460 struct zink_resource
*src
= zink_resource(psrc
);
1461 struct zink_context
*ctx
= zink_context(pctx
);
1462 if (dst
->base
.target
!= PIPE_BUFFER
&& src
->base
.target
!= PIPE_BUFFER
) {
1463 VkImageCopy region
= {};
1465 region
.srcSubresource
.aspectMask
= src
->aspect
;
1466 region
.srcSubresource
.mipLevel
= src_level
;
1467 region
.srcSubresource
.layerCount
= 1;
1468 if (src
->base
.array_size
> 1) {
1469 region
.srcSubresource
.baseArrayLayer
= src_box
->z
;
1470 region
.srcSubresource
.layerCount
= src_box
->depth
;
1471 region
.extent
.depth
= 1;
1473 region
.srcOffset
.z
= src_box
->z
;
1474 region
.srcSubresource
.layerCount
= 1;
1475 region
.extent
.depth
= src_box
->depth
;
1478 region
.srcOffset
.x
= src_box
->x
;
1479 region
.srcOffset
.y
= src_box
->y
;
1481 region
.dstSubresource
.aspectMask
= dst
->aspect
;
1482 region
.dstSubresource
.mipLevel
= dst_level
;
1483 if (dst
->base
.array_size
> 1) {
1484 region
.dstSubresource
.baseArrayLayer
= dstz
;
1485 region
.dstSubresource
.layerCount
= src_box
->depth
;
1487 region
.dstOffset
.z
= dstz
;
1488 region
.dstSubresource
.layerCount
= 1;
1491 region
.dstOffset
.x
= dstx
;
1492 region
.dstOffset
.y
= dsty
;
1493 region
.extent
.width
= src_box
->width
;
1494 region
.extent
.height
= src_box
->height
;
1496 struct zink_batch
*batch
= zink_batch_no_rp(ctx
);
1497 zink_batch_reference_resoure(batch
, src
);
1498 zink_batch_reference_resoure(batch
, dst
);
1500 if (src
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
) {
1501 zink_resource_barrier(batch
->cmdbuf
, src
, src
->aspect
,
1502 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
);
1505 if (dst
->layout
!= VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
) {
1506 zink_resource_barrier(batch
->cmdbuf
, dst
, dst
->aspect
,
1507 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
);
1510 vkCmdCopyImage(batch
->cmdbuf
, src
->image
, src
->layout
,
1511 dst
->image
, dst
->layout
,
1514 debug_printf("zink: TODO resource copy\n");
1517 struct pipe_context
*
1518 zink_context_create(struct pipe_screen
*pscreen
, void *priv
, unsigned flags
)
1520 struct zink_screen
*screen
= zink_screen(pscreen
);
1521 struct zink_context
*ctx
= CALLOC_STRUCT(zink_context
);
1523 ctx
->base
.screen
= pscreen
;
1524 ctx
->base
.priv
= priv
;
1526 ctx
->base
.destroy
= zink_context_destroy
;
1528 zink_context_state_init(&ctx
->base
);
1530 ctx
->base
.create_sampler_state
= zink_create_sampler_state
;
1531 ctx
->base
.bind_sampler_states
= zink_bind_sampler_states
;
1532 ctx
->base
.delete_sampler_state
= zink_delete_sampler_state
;
1534 ctx
->base
.create_sampler_view
= zink_create_sampler_view
;
1535 ctx
->base
.set_sampler_views
= zink_set_sampler_views
;
1536 ctx
->base
.sampler_view_destroy
= zink_sampler_view_destroy
;
1538 ctx
->base
.create_vs_state
= zink_create_vs_state
;
1539 ctx
->base
.bind_vs_state
= zink_bind_vs_state
;
1540 ctx
->base
.delete_vs_state
= zink_delete_vs_state
;
1542 ctx
->base
.create_fs_state
= zink_create_fs_state
;
1543 ctx
->base
.bind_fs_state
= zink_bind_fs_state
;
1544 ctx
->base
.delete_fs_state
= zink_delete_fs_state
;
1546 ctx
->base
.set_polygon_stipple
= zink_set_polygon_stipple
;
1547 ctx
->base
.set_vertex_buffers
= zink_set_vertex_buffers
;
1548 ctx
->base
.set_viewport_states
= zink_set_viewport_states
;
1549 ctx
->base
.set_scissor_states
= zink_set_scissor_states
;
1550 ctx
->base
.set_constant_buffer
= zink_set_constant_buffer
;
1551 ctx
->base
.set_framebuffer_state
= zink_set_framebuffer_state
;
1552 ctx
->base
.set_stencil_ref
= zink_set_stencil_ref
;
1553 ctx
->base
.set_clip_state
= zink_set_clip_state
;
1554 ctx
->base
.set_blend_color
= zink_set_blend_color
;
1556 ctx
->base
.set_sample_mask
= zink_set_sample_mask
;
1558 ctx
->base
.clear
= zink_clear
;
1559 ctx
->base
.draw_vbo
= zink_draw_vbo
;
1560 ctx
->base
.flush
= zink_flush
;
1562 ctx
->base
.resource_copy_region
= zink_resource_copy_region
;
1563 ctx
->base
.blit
= zink_blit
;
1565 ctx
->base
.flush_resource
= zink_flush_resource
;
1566 zink_context_surface_init(&ctx
->base
);
1567 zink_context_resource_init(&ctx
->base
);
1568 zink_context_query_init(&ctx
->base
);
1570 slab_create_child(&ctx
->transfer_pool
, &screen
->transfer_pool
);
1572 ctx
->base
.stream_uploader
= u_upload_create_default(&ctx
->base
);
1573 ctx
->base
.const_uploader
= ctx
->base
.stream_uploader
;
1575 int prim_hwsupport
= 1 << PIPE_PRIM_POINTS
|
1576 1 << PIPE_PRIM_LINES
|
1577 1 << PIPE_PRIM_LINE_STRIP
|
1578 1 << PIPE_PRIM_TRIANGLES
|
1579 1 << PIPE_PRIM_TRIANGLE_STRIP
|
1580 1 << PIPE_PRIM_TRIANGLE_FAN
;
1582 ctx
->primconvert
= util_primconvert_create(&ctx
->base
, prim_hwsupport
);
1583 if (!ctx
->primconvert
)
1586 ctx
->blitter
= util_blitter_create(&ctx
->base
);
1590 VkCommandPoolCreateInfo cpci
= {};
1591 cpci
.sType
= VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO
;
1592 cpci
.queueFamilyIndex
= screen
->gfx_queue
;
1593 cpci
.flags
= VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
;
1594 if (vkCreateCommandPool(screen
->dev
, &cpci
, NULL
, &ctx
->cmdpool
) != VK_SUCCESS
)
1597 VkCommandBufferAllocateInfo cbai
= {};
1598 cbai
.sType
= VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
;
1599 cbai
.commandPool
= ctx
->cmdpool
;
1600 cbai
.level
= VK_COMMAND_BUFFER_LEVEL_PRIMARY
;
1601 cbai
.commandBufferCount
= 1;
1603 VkDescriptorPoolSize sizes
[] = {
1604 {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
, ZINK_BATCH_DESC_SIZE
}
1606 VkDescriptorPoolCreateInfo dpci
= {};
1607 dpci
.sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
;
1608 dpci
.pPoolSizes
= sizes
;
1609 dpci
.poolSizeCount
= ARRAY_SIZE(sizes
);
1610 dpci
.flags
= VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
;
1611 dpci
.maxSets
= ZINK_BATCH_DESC_SIZE
;
1613 for (int i
= 0; i
< ARRAY_SIZE(ctx
->batches
); ++i
) {
1614 if (vkAllocateCommandBuffers(screen
->dev
, &cbai
, &ctx
->batches
[i
].cmdbuf
) != VK_SUCCESS
)
1617 ctx
->batches
[i
].resources
= _mesa_set_create(NULL
, _mesa_hash_pointer
,
1618 _mesa_key_pointer_equal
);
1619 ctx
->batches
[i
].sampler_views
= _mesa_set_create(NULL
,
1621 _mesa_key_pointer_equal
);
1623 if (!ctx
->batches
[i
].resources
|| !ctx
->batches
[i
].sampler_views
)
1626 util_dynarray_init(&ctx
->batches
[i
].zombie_samplers
, NULL
);
1628 if (vkCreateDescriptorPool(screen
->dev
, &dpci
, 0,
1629 &ctx
->batches
[i
].descpool
) != VK_SUCCESS
)
1633 vkGetDeviceQueue(screen
->dev
, screen
->gfx_queue
, 0, &ctx
->queue
);
1635 ctx
->program_cache
= _mesa_hash_table_create(NULL
,
1637 equals_gfx_program
);
1638 ctx
->render_pass_cache
= _mesa_hash_table_create(NULL
,
1639 hash_render_pass_state
,
1640 equals_render_pass_state
);
1641 ctx
->framebuffer_cache
= _mesa_hash_table_create(NULL
,
1642 hash_framebuffer_state
,
1643 equals_framebuffer_state
);
1645 if (!ctx
->program_cache
|| !ctx
->render_pass_cache
||
1646 !ctx
->framebuffer_cache
)
1649 ctx
->dirty_program
= true;
1651 /* start the first batch */
1652 zink_start_batch(ctx
, zink_curr_batch(ctx
));
1658 vkDestroyCommandPool(screen
->dev
, ctx
->cmdpool
, NULL
);