zink: delete samplers after the current cmdbuf
[mesa.git] / src / gallium / drivers / zink / zink_context.c
1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_context.h"
25
26 #include "zink_cmdbuf.h"
27 #include "zink_compiler.h"
28 #include "zink_fence.h"
29 #include "zink_framebuffer.h"
30 #include "zink_pipeline.h"
31 #include "zink_program.h"
32 #include "zink_render_pass.h"
33 #include "zink_resource.h"
34 #include "zink_screen.h"
35 #include "zink_state.h"
36 #include "zink_surface.h"
37
38 #include "indices/u_primconvert.h"
39 #include "util/u_blitter.h"
40 #include "util/u_debug.h"
41 #include "util/u_format.h"
42 #include "util/u_framebuffer.h"
43 #include "util/u_helpers.h"
44 #include "util/u_inlines.h"
45
46 #include "nir.h"
47
48 #include "util/u_memory.h"
49 #include "util/u_prim.h"
50 #include "util/u_upload_mgr.h"
51
52 static void
53 zink_context_destroy(struct pipe_context *pctx)
54 {
55 struct zink_context *ctx = zink_context(pctx);
56 struct zink_screen *screen = zink_screen(pctx->screen);
57
58 if (vkQueueWaitIdle(ctx->queue) != VK_SUCCESS)
59 debug_printf("vkQueueWaitIdle failed\n");
60
61 for (int i = 0; i < ARRAY_SIZE(ctx->cmdbufs); ++i)
62 vkFreeCommandBuffers(screen->dev, ctx->cmdpool, 1, &ctx->cmdbufs[i].cmdbuf);
63 vkDestroyCommandPool(screen->dev, ctx->cmdpool, NULL);
64
65 util_primconvert_destroy(ctx->primconvert);
66 u_upload_destroy(pctx->stream_uploader);
67 slab_destroy_child(&ctx->transfer_pool);
68 util_blitter_destroy(ctx->blitter);
69 FREE(ctx);
70 }
71
72 static VkFilter
73 filter(enum pipe_tex_filter filter)
74 {
75 switch (filter) {
76 case PIPE_TEX_FILTER_NEAREST: return VK_FILTER_NEAREST;
77 case PIPE_TEX_FILTER_LINEAR: return VK_FILTER_LINEAR;
78 }
79 unreachable("unexpected filter");
80 }
81
82 static VkSamplerMipmapMode
83 sampler_mipmap_mode(enum pipe_tex_mipfilter filter)
84 {
85 switch (filter) {
86 case PIPE_TEX_MIPFILTER_NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
87 case PIPE_TEX_MIPFILTER_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
88 case PIPE_TEX_MIPFILTER_NONE:
89 unreachable("PIPE_TEX_MIPFILTER_NONE should be dealt with earlier");
90 }
91 unreachable("unexpected filter");
92 }
93
94 static VkSamplerAddressMode
95 sampler_address_mode(enum pipe_tex_wrap filter)
96 {
97 switch (filter) {
98 case PIPE_TEX_WRAP_REPEAT: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
99 case PIPE_TEX_WRAP_CLAMP: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
100 case PIPE_TEX_WRAP_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
101 case PIPE_TEX_WRAP_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
102 case PIPE_TEX_WRAP_MIRROR_REPEAT: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
103 case PIPE_TEX_WRAP_MIRROR_CLAMP: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
104 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
105 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
106 }
107 unreachable("unexpected wrap");
108 }
109
110 static void *
111 zink_create_sampler_state(struct pipe_context *pctx,
112 const struct pipe_sampler_state *state)
113 {
114 struct zink_screen *screen = zink_screen(pctx->screen);
115
116 VkSamplerCreateInfo sci = {};
117 sci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
118 sci.magFilter = filter(state->mag_img_filter);
119 sci.minFilter = filter(state->min_img_filter);
120
121 if (state->min_mip_filter != PIPE_TEX_MIPFILTER_NONE) {
122 sci.mipmapMode = sampler_mipmap_mode(state->min_mip_filter);
123 sci.minLod = state->min_lod;
124 sci.maxLod = state->max_lod;
125 } else {
126 sci.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
127 sci.minLod = 0;
128 sci.maxLod = 0;
129 }
130
131 sci.addressModeU = sampler_address_mode(state->wrap_s);
132 sci.addressModeV = sampler_address_mode(state->wrap_t);
133 sci.addressModeW = sampler_address_mode(state->wrap_r);
134 sci.mipLodBias = state->lod_bias;
135 sci.compareOp = VK_COMPARE_OP_NEVER; // TODO
136 sci.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // TODO
137
138 if (state->max_anisotropy > 1) {
139 sci.maxAnisotropy = state->max_anisotropy;
140 sci.anisotropyEnable = VK_TRUE;
141 }
142
143 VkSampler sampler;
144 VkResult err = vkCreateSampler(screen->dev, &sci, NULL, &sampler);
145 if (err != VK_SUCCESS)
146 return NULL;
147
148 return sampler;
149 }
150
151 static void
152 zink_bind_sampler_states(struct pipe_context *pctx,
153 enum pipe_shader_type shader,
154 unsigned start_slot,
155 unsigned num_samplers,
156 void **samplers)
157 {
158 struct zink_context *ctx = zink_context(pctx);
159 for (unsigned i = 0; i < num_samplers; ++i)
160 ctx->samplers[shader][start_slot + i] = (VkSampler)samplers[i];
161 }
162
163 static void
164 zink_delete_sampler_state(struct pipe_context *pctx,
165 void *sampler_state)
166 {
167 struct zink_cmdbuf *cmdbuf = zink_context_curr_cmdbuf(zink_context(pctx));
168 util_dynarray_append(&cmdbuf->zombie_samplers,
169 VkSampler, sampler_state);
170 }
171
172
173 static VkImageViewType
174 image_view_type(enum pipe_texture_target target)
175 {
176 switch (target) {
177 case PIPE_TEXTURE_1D: return VK_IMAGE_VIEW_TYPE_1D;
178 case PIPE_TEXTURE_1D_ARRAY: return VK_IMAGE_VIEW_TYPE_1D_ARRAY;
179 case PIPE_TEXTURE_2D: return VK_IMAGE_VIEW_TYPE_2D;
180 case PIPE_TEXTURE_2D_ARRAY: return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
181 case PIPE_TEXTURE_CUBE: return VK_IMAGE_VIEW_TYPE_CUBE;
182 case PIPE_TEXTURE_CUBE_ARRAY: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
183 case PIPE_TEXTURE_3D: return VK_IMAGE_VIEW_TYPE_3D;
184 case PIPE_TEXTURE_RECT: return VK_IMAGE_VIEW_TYPE_2D; /* not sure */
185 default:
186 unreachable("unexpected target");
187 }
188 }
189
190 static VkComponentSwizzle
191 component_mapping(enum pipe_swizzle swizzle)
192 {
193 switch (swizzle) {
194 case PIPE_SWIZZLE_X: return VK_COMPONENT_SWIZZLE_R;
195 case PIPE_SWIZZLE_Y: return VK_COMPONENT_SWIZZLE_G;
196 case PIPE_SWIZZLE_Z: return VK_COMPONENT_SWIZZLE_B;
197 case PIPE_SWIZZLE_W: return VK_COMPONENT_SWIZZLE_A;
198 case PIPE_SWIZZLE_0: return VK_COMPONENT_SWIZZLE_ZERO;
199 case PIPE_SWIZZLE_1: return VK_COMPONENT_SWIZZLE_ONE;
200 case PIPE_SWIZZLE_NONE: return VK_COMPONENT_SWIZZLE_IDENTITY; // ???
201 default:
202 unreachable("unexpected swizzle");
203 }
204 }
205
206 static struct pipe_sampler_view *
207 zink_create_sampler_view(struct pipe_context *pctx, struct pipe_resource *pres,
208 const struct pipe_sampler_view *state)
209 {
210 struct zink_screen *screen = zink_screen(pctx->screen);
211 struct zink_resource *res = zink_resource(pres);
212 struct zink_sampler_view *sampler_view = CALLOC_STRUCT(zink_sampler_view);
213
214 sampler_view->base = *state;
215 sampler_view->base.texture = NULL;
216 pipe_resource_reference(&sampler_view->base.texture, pres);
217 sampler_view->base.reference.count = 1;
218 sampler_view->base.context = pctx;
219
220 VkImageViewCreateInfo ivci = {};
221 ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
222 ivci.image = res->image;
223 ivci.viewType = image_view_type(state->target);
224 ivci.format = zink_get_format(state->format);
225 ivci.components.r = component_mapping(state->swizzle_r);
226 ivci.components.g = component_mapping(state->swizzle_g);
227 ivci.components.b = component_mapping(state->swizzle_b);
228 ivci.components.a = component_mapping(state->swizzle_a);
229 ivci.subresourceRange.aspectMask = zink_aspect_from_format(state->format);
230 ivci.subresourceRange.baseMipLevel = state->u.tex.first_level;
231 ivci.subresourceRange.baseArrayLayer = state->u.tex.first_layer;
232 ivci.subresourceRange.levelCount = state->u.tex.last_level - state->u.tex.first_level + 1;
233 ivci.subresourceRange.layerCount = state->u.tex.last_layer - state->u.tex.first_layer + 1;
234
235 VkResult err = vkCreateImageView(screen->dev, &ivci, NULL, &sampler_view->image_view);
236 if (err != VK_SUCCESS) {
237 FREE(sampler_view);
238 return NULL;
239 }
240
241 return &sampler_view->base;
242 }
243
244 static void
245 zink_destroy_sampler_view(struct pipe_context *pctx,
246 struct pipe_sampler_view *pview)
247 {
248 struct zink_sampler_view *view = zink_sampler_view(pview);
249 vkDestroyImageView(zink_screen(pctx->screen)->dev, view->image_view, NULL);
250 FREE(view);
251 }
252
253 static void *
254 zink_create_vs_state(struct pipe_context *pctx,
255 const struct pipe_shader_state *shader)
256 {
257 struct nir_shader *nir;
258 if (shader->type != PIPE_SHADER_IR_NIR)
259 nir = zink_tgsi_to_nir(pctx->screen, shader->tokens);
260 else
261 nir = (struct nir_shader *)shader->ir.nir;
262
263 return zink_compile_nir(zink_screen(pctx->screen), nir);
264 }
265
266 static void
267 bind_stage(struct zink_context *ctx, enum pipe_shader_type stage,
268 struct zink_shader *shader)
269 {
270 assert(stage < PIPE_SHADER_COMPUTE);
271 ctx->gfx_stages[stage] = shader;
272 ctx->dirty |= ZINK_DIRTY_PROGRAM;
273 }
274
275 static void
276 zink_bind_vs_state(struct pipe_context *pctx,
277 void *cso)
278 {
279 bind_stage(zink_context(pctx), PIPE_SHADER_VERTEX, cso);
280 }
281
282 static void
283 zink_delete_vs_state(struct pipe_context *pctx,
284 void *cso)
285 {
286 zink_shader_free(zink_screen(pctx->screen), cso);
287 }
288
289 static void *
290 zink_create_fs_state(struct pipe_context *pctx,
291 const struct pipe_shader_state *shader)
292 {
293 struct nir_shader *nir;
294 if (shader->type != PIPE_SHADER_IR_NIR)
295 nir = zink_tgsi_to_nir(pctx->screen, shader->tokens);
296 else
297 nir = (struct nir_shader *)shader->ir.nir;
298
299 return zink_compile_nir(zink_screen(pctx->screen), nir);
300 }
301
302 static void
303 zink_bind_fs_state(struct pipe_context *pctx,
304 void *cso)
305 {
306 bind_stage(zink_context(pctx), PIPE_SHADER_FRAGMENT, cso);
307 }
308
309 static void
310 zink_delete_fs_state(struct pipe_context *pctx,
311 void *cso)
312 {
313 zink_shader_free(zink_screen(pctx->screen), cso);
314 }
315
316 static void
317 zink_set_polygon_stipple(struct pipe_context *pctx,
318 const struct pipe_poly_stipple *ps)
319 {
320 }
321
322 static void
323 zink_set_vertex_buffers(struct pipe_context *pctx,
324 unsigned start_slot,
325 unsigned num_buffers,
326 const struct pipe_vertex_buffer *buffers)
327 {
328 struct zink_context *ctx = zink_context(pctx);
329
330 if (buffers) {
331 for (int i = 0; i < num_buffers; ++i) {
332 const struct pipe_vertex_buffer *vb = buffers + i;
333 ctx->gfx_pipeline_state.bindings[start_slot + i].stride = vb->stride;
334 }
335 }
336
337 util_set_vertex_buffers_mask(ctx->buffers, &ctx->buffers_enabled_mask,
338 buffers, start_slot, num_buffers);
339 }
340
341 static void
342 zink_set_viewport_states(struct pipe_context *pctx,
343 unsigned start_slot,
344 unsigned num_viewports,
345 const struct pipe_viewport_state *state)
346 {
347 struct zink_context *ctx = zink_context(pctx);
348
349 for (unsigned i = 0; i < num_viewports; ++i) {
350 VkViewport viewport = {
351 state[i].translate[0] - state[i].scale[0],
352 state[i].translate[1] - state[i].scale[1],
353 state[i].scale[0] * 2,
354 state[i].scale[1] * 2,
355 state[i].translate[2] - state[i].scale[2],
356 state[i].translate[2] + state[i].scale[2]
357 };
358 ctx->viewports[start_slot + i] = viewport;
359 }
360 ctx->num_viewports = start_slot + num_viewports;
361 }
362
363 static void
364 zink_set_scissor_states(struct pipe_context *pctx,
365 unsigned start_slot, unsigned num_scissors,
366 const struct pipe_scissor_state *states)
367 {
368 struct zink_context *ctx = zink_context(pctx);
369
370 for (unsigned i = 0; i < num_scissors; i++) {
371 VkRect2D scissor;
372
373 scissor.offset.x = states[i].minx;
374 scissor.offset.y = states[i].miny;
375 scissor.extent.width = states[i].maxx - states[i].minx;
376 scissor.extent.height = states[i].maxy - states[i].miny;
377 ctx->scissors[start_slot + i] = scissor;
378 }
379 ctx->num_scissors = start_slot + num_scissors;
380 }
381
382 static void
383 zink_set_constant_buffer(struct pipe_context *pctx,
384 enum pipe_shader_type shader, uint index,
385 const struct pipe_constant_buffer *cb)
386 {
387 struct zink_context *ctx = zink_context(pctx);
388
389 if (cb) {
390 struct pipe_resource *buffer = cb->buffer;
391 unsigned offset = cb->buffer_offset;
392 if (cb->user_buffer)
393 u_upload_data(ctx->base.const_uploader, 0, cb->buffer_size, 64,
394 cb->user_buffer, &offset, &buffer);
395
396 pipe_resource_reference(&ctx->ubos[shader][index].buffer, buffer);
397 ctx->ubos[shader][index].buffer_offset = offset;
398 ctx->ubos[shader][index].buffer_size = cb->buffer_size;
399 ctx->ubos[shader][index].user_buffer = NULL;
400
401 if (cb->user_buffer)
402 pipe_resource_reference(&buffer, NULL);
403 } else {
404 pipe_resource_reference(&ctx->ubos[shader][index].buffer, NULL);
405 ctx->ubos[shader][index].buffer_offset = 0;
406 ctx->ubos[shader][index].buffer_size = 0;
407 ctx->ubos[shader][index].user_buffer = NULL;
408 }
409 }
410
411 static void
412 zink_set_sampler_views(struct pipe_context *pctx,
413 enum pipe_shader_type shader_type,
414 unsigned start_slot,
415 unsigned num_views,
416 struct pipe_sampler_view **views)
417 {
418 struct zink_context *ctx = zink_context(pctx);
419 assert(views);
420 for (unsigned i = 0; i < num_views; ++i) {
421 pipe_sampler_view_reference(
422 &ctx->image_views[shader_type][start_slot + i],
423 views[i]);
424 }
425 }
426
427 static void
428 zink_set_stencil_ref(struct pipe_context *pctx,
429 const struct pipe_stencil_ref *ref)
430 {
431 struct zink_context *ctx = zink_context(pctx);
432 ctx->stencil_ref[0] = ref->ref_value[0];
433 ctx->stencil_ref[1] = ref->ref_value[1];
434 }
435
436 static void
437 zink_set_clip_state(struct pipe_context *pctx,
438 const struct pipe_clip_state *pcs)
439 {
440 }
441
442 static struct zink_render_pass *
443 get_render_pass(struct zink_context *ctx,
444 const struct pipe_framebuffer_state *fb)
445 {
446 struct zink_render_pass_state state;
447
448 for (int i = 0; i < fb->nr_cbufs; i++) {
449 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
450 state.rts[i].format = cbuf->format;
451 }
452 state.num_cbufs = fb->nr_cbufs;
453
454 if (fb->zsbuf) {
455 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
456 state.rts[fb->nr_cbufs].format = zsbuf->format;
457 }
458 state.have_zsbuf = fb->zsbuf != NULL;
459
460 // TODO: cache instead!
461 return zink_create_render_pass(zink_screen(ctx->base.screen), &state);
462 }
463
464 static struct zink_framebuffer *
465 get_framebuffer(struct zink_context *ctx,
466 const struct pipe_framebuffer_state *fb,
467 struct zink_render_pass *rp)
468 {
469 // TODO: cache!
470 return zink_create_framebuffer(zink_screen(ctx->base.screen), fb, rp);
471 }
472
473 static void
474 zink_set_framebuffer_state(struct pipe_context *pctx,
475 const struct pipe_framebuffer_state *state)
476 {
477 struct zink_context *ctx = zink_context(pctx);
478 struct zink_screen *screen = zink_screen(pctx->screen);
479
480 struct zink_render_pass *rp = get_render_pass(ctx, state);
481 zink_render_pass_reference(screen, &ctx->gfx_pipeline_state.render_pass, rp);
482
483 struct zink_framebuffer *fb = get_framebuffer(ctx, state, rp);
484 zink_framebuffer_reference(screen, &ctx->framebuffer, fb);
485 zink_framebuffer_reference(screen, &fb, NULL);
486 zink_render_pass_reference(screen, &rp, NULL);
487
488 ctx->gfx_pipeline_state.num_attachments = state->nr_cbufs;
489
490 util_copy_framebuffer_state(&ctx->fb_state, state);
491
492 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
493 if (!cmdbuf)
494 return;
495
496 for (int i = 0; i < state->nr_cbufs; i++) {
497 struct zink_resource *res = zink_resource(state->cbufs[i]->texture);
498 if (res->layout != VK_IMAGE_LAYOUT_GENERAL &&
499 res->layout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
500 zink_resource_barrier(cmdbuf->cmdbuf, res, res->aspect,
501 VK_IMAGE_LAYOUT_GENERAL);
502 }
503
504 if (state->zsbuf) {
505 struct zink_resource *res = zink_resource(state->zsbuf->texture);
506 if (res->layout != VK_IMAGE_LAYOUT_GENERAL &&
507 res->layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
508 zink_resource_barrier(cmdbuf->cmdbuf, res, res->aspect,
509 VK_IMAGE_LAYOUT_GENERAL);
510 }
511
512 zink_end_cmdbuf(ctx, cmdbuf);
513 }
514
515 static void
516 zink_set_active_query_state(struct pipe_context *pctx, bool enable)
517 {
518 }
519
520 static void
521 zink_set_blend_color(struct pipe_context *pctx,
522 const struct pipe_blend_color *color)
523 {
524 struct zink_context *ctx = zink_context(pctx);
525 memcpy(ctx->blend_constants, color->color, sizeof(float) * 4);
526 }
527
528 static VkAccessFlags
529 access_flags(VkImageLayout layout)
530 {
531 switch (layout) {
532 case VK_IMAGE_LAYOUT_UNDEFINED:
533 case VK_IMAGE_LAYOUT_GENERAL:
534 return 0;
535
536 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
537 return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
538 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
539 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
540
541 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
542 return VK_ACCESS_SHADER_READ_BIT;
543
544 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
545 return VK_ACCESS_TRANSFER_READ_BIT;
546
547 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
548 return VK_ACCESS_TRANSFER_WRITE_BIT;
549
550 case VK_IMAGE_LAYOUT_PREINITIALIZED:
551 return VK_ACCESS_HOST_WRITE_BIT;
552
553 default:
554 unreachable("unexpected layout");
555 }
556 }
557
558 void
559 zink_resource_barrier(VkCommandBuffer cmdbuf, struct zink_resource *res,
560 VkImageAspectFlags aspect, VkImageLayout new_layout)
561 {
562 VkImageSubresourceRange isr = {
563 aspect,
564 0, VK_REMAINING_MIP_LEVELS,
565 0, VK_REMAINING_ARRAY_LAYERS
566 };
567
568 VkImageMemoryBarrier imb = {
569 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
570 NULL,
571 access_flags(res->layout),
572 access_flags(new_layout),
573 res->layout,
574 new_layout,
575 VK_QUEUE_FAMILY_IGNORED,
576 VK_QUEUE_FAMILY_IGNORED,
577 res->image,
578 isr
579 };
580 vkCmdPipelineBarrier(
581 cmdbuf,
582 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
583 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
584 0,
585 0, NULL,
586 0, NULL,
587 1, &imb
588 );
589
590 res->layout = new_layout;
591 }
592
593 static void
594 zink_clear(struct pipe_context *pctx,
595 unsigned buffers,
596 const union pipe_color_union *pcolor,
597 double depth, unsigned stencil)
598 {
599 struct zink_context *ctx = zink_context(pctx);
600 struct pipe_framebuffer_state *fb = &ctx->fb_state;
601
602 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
603 if (!cmdbuf)
604 return;
605
606 // first transition all images to a compatible layout
607 if (buffers & PIPE_CLEAR_COLOR) {
608 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
609 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
610 continue;
611
612 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
613
614 if (cbuf->layout != VK_IMAGE_LAYOUT_GENERAL &&
615 cbuf->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
616 zink_resource_barrier(cmdbuf->cmdbuf, cbuf, cbuf->aspect,
617 VK_IMAGE_LAYOUT_GENERAL);
618 }
619 }
620
621 VkImageAspectFlags depthStencilAspect = 0;
622 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) {
623 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
624 if (buffers & PIPE_CLEAR_DEPTH)
625 depthStencilAspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
626 if (buffers & PIPE_CLEAR_STENCIL)
627 depthStencilAspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
628
629 if (zsbuf->layout != VK_IMAGE_LAYOUT_GENERAL &&
630 zsbuf->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
631 zink_resource_barrier(cmdbuf->cmdbuf, zsbuf, depthStencilAspect,
632 VK_IMAGE_LAYOUT_GENERAL);
633 }
634
635 VkClearColorValue color;
636 color.float32[0] = pcolor->f[0];
637 color.float32[1] = pcolor->f[1];
638 color.float32[2] = pcolor->f[2];
639 color.float32[3] = pcolor->f[3];
640
641 if (buffers & PIPE_CLEAR_COLOR) {
642 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
643 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
644 continue;
645
646 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
647
648 VkImageSubresourceRange range;
649 range.aspectMask = cbuf->aspect;
650 range.baseMipLevel = 0;
651 range.levelCount = VK_REMAINING_MIP_LEVELS;
652 range.baseArrayLayer = 0;
653 range.layerCount = VK_REMAINING_ARRAY_LAYERS;
654 vkCmdClearColorImage(cmdbuf->cmdbuf,
655 cbuf->image, VK_IMAGE_LAYOUT_GENERAL,
656 &color,
657 1, &range);
658 }
659 }
660
661 if (depthStencilAspect) {
662 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
663
664 VkClearDepthStencilValue zsvalue = { depth, stencil };
665
666 VkImageSubresourceRange range;
667 range.aspectMask = depthStencilAspect;
668 range.baseMipLevel = 0;
669 range.levelCount = VK_REMAINING_MIP_LEVELS;
670 range.baseArrayLayer = 0;
671 range.layerCount = VK_REMAINING_ARRAY_LAYERS;
672
673 vkCmdClearDepthStencilImage(cmdbuf->cmdbuf,
674 zsbuf->image, VK_IMAGE_LAYOUT_GENERAL,
675 &zsvalue,
676 1, &range);
677 }
678
679 zink_end_cmdbuf(ctx, cmdbuf);
680 }
681
682 VkShaderStageFlagBits
683 zink_shader_stage(enum pipe_shader_type type)
684 {
685 VkShaderStageFlagBits stages[] = {
686 [PIPE_SHADER_VERTEX] = VK_SHADER_STAGE_VERTEX_BIT,
687 [PIPE_SHADER_FRAGMENT] = VK_SHADER_STAGE_FRAGMENT_BIT,
688 [PIPE_SHADER_GEOMETRY] = VK_SHADER_STAGE_GEOMETRY_BIT,
689 [PIPE_SHADER_TESS_CTRL] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
690 [PIPE_SHADER_TESS_EVAL] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
691 [PIPE_SHADER_COMPUTE] = VK_SHADER_STAGE_COMPUTE_BIT,
692 };
693 return stages[type];
694 }
695
696 static VkDescriptorSet
697 allocate_descriptor_set(struct zink_context *ctx, VkDescriptorSetLayout dsl)
698 {
699 struct zink_screen *screen = zink_screen(ctx->base.screen);
700 VkDescriptorSetAllocateInfo dsai;
701 memset((void *)&dsai, 0, sizeof(dsai));
702 dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
703 dsai.pNext = NULL;
704 dsai.descriptorPool = ctx->descpool;
705 dsai.descriptorSetCount = 1;
706 dsai.pSetLayouts = &dsl;
707
708 VkDescriptorSet desc_set;
709 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
710 if (vkResetDescriptorPool(screen->dev, ctx->descpool, 0) != VK_SUCCESS) {
711 fprintf(stderr, "vkResetDescriptorPool failed\n");
712 return VK_NULL_HANDLE;
713 }
714 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
715 fprintf(stderr, "vkAllocateDescriptorSets failed\n");
716 return VK_NULL_HANDLE;
717 }
718 }
719
720 return desc_set;
721 }
722
723 static VkPrimitiveTopology
724 zink_primitive_topology(enum pipe_prim_type mode)
725 {
726 switch (mode) {
727 case PIPE_PRIM_POINTS:
728 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
729
730 case PIPE_PRIM_LINES:
731 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
732
733 case PIPE_PRIM_LINE_STRIP:
734 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
735
736 case PIPE_PRIM_TRIANGLES:
737 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
738
739 case PIPE_PRIM_TRIANGLE_STRIP:
740 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
741
742 case PIPE_PRIM_TRIANGLE_FAN:
743 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
744
745 default:
746 unreachable("unexpected enum pipe_prim_type");
747 }
748 }
749
750 static void
751 zink_bind_vertex_buffers(struct zink_cmdbuf *cmdbuf, struct zink_context *ctx)
752 {
753 VkBuffer buffers[PIPE_MAX_ATTRIBS];
754 VkDeviceSize buffer_offsets[PIPE_MAX_ATTRIBS];
755 struct zink_vertex_elements_state *elems = ctx->gfx_pipeline_state.element_state;
756 for (unsigned i = 0; i < elems->num_bindings; i++) {
757 struct pipe_vertex_buffer *vb = ctx->buffers + elems->binding_map[i];
758 assert(vb && vb->buffer.resource);
759 struct zink_resource *res = zink_resource(vb->buffer.resource);
760 buffers[i] = res->buffer;
761 buffer_offsets[i] = vb->buffer_offset;
762 zink_cmdbuf_reference_resoure(cmdbuf, res);
763 }
764
765 if (elems->num_bindings > 0)
766 vkCmdBindVertexBuffers(cmdbuf->cmdbuf, 0, elems->num_bindings, buffers, buffer_offsets);
767 }
768
769 static void
770 begin_render_pass(struct zink_screen *screen, struct zink_cmdbuf *cmdbuf,
771 struct zink_render_pass *rp, struct zink_framebuffer *fb,
772 unsigned width, unsigned height)
773 {
774 VkRenderPassBeginInfo rpbi = {};
775 rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
776 rpbi.renderPass = rp->render_pass;
777 rpbi.renderArea.offset.x = 0;
778 rpbi.renderArea.offset.y = 0;
779 rpbi.renderArea.extent.width = width;
780 rpbi.renderArea.extent.height = height;
781 rpbi.clearValueCount = 0;
782 rpbi.pClearValues = NULL;
783 rpbi.framebuffer = fb->fb;
784
785 assert(rp && fb);
786 assert(!cmdbuf->rp && !cmdbuf->fb);
787 zink_render_pass_reference(screen, &cmdbuf->rp, rp);
788 zink_framebuffer_reference(screen, &cmdbuf->fb, fb);
789
790 vkCmdBeginRenderPass(cmdbuf->cmdbuf, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
791 }
792
793 static uint32_t
794 hash_gfx_program(const void *key)
795 {
796 return _mesa_hash_data(key, sizeof(struct zink_shader *) * (PIPE_SHADER_TYPES - 1));
797 }
798
799 static bool
800 equals_gfx_program(const void *a, const void *b)
801 {
802 return memcmp(a, b, sizeof(struct zink_shader *) * (PIPE_SHADER_TYPES - 1)) == 0;
803 }
804
805 static struct zink_gfx_program *
806 get_gfx_program(struct zink_context *ctx)
807 {
808 if (ctx->dirty & ZINK_DIRTY_PROGRAM) {
809 struct hash_entry *entry = _mesa_hash_table_search(ctx->program_cache,
810 ctx->gfx_stages);
811 if (!entry) {
812 struct zink_gfx_program *prog;
813 prog = zink_create_gfx_program(zink_screen(ctx->base.screen)->dev,
814 ctx->gfx_stages);
815 entry = _mesa_hash_table_insert(ctx->program_cache, prog->stages, prog);
816 if (!entry)
817 return NULL;
818 }
819 ctx->curr_program = entry->data;
820 ctx->dirty &= ~ZINK_DIRTY_PROGRAM;
821 }
822
823 assert(ctx->curr_program);
824 return ctx->curr_program;
825 }
826
827 static void
828 zink_draw_vbo(struct pipe_context *pctx,
829 const struct pipe_draw_info *dinfo)
830 {
831 struct zink_context *ctx = zink_context(pctx);
832 struct zink_screen *screen = zink_screen(pctx->screen);
833 struct zink_rasterizer_state *rast_state = ctx->gfx_pipeline_state.rast_state;
834
835 if (dinfo->mode >= PIPE_PRIM_QUADS ||
836 dinfo->mode == PIPE_PRIM_LINE_LOOP) {
837 if (!u_trim_pipe_prim(dinfo->mode, (unsigned *)&dinfo->count))
838 return;
839
840 util_primconvert_save_rasterizer_state(ctx->primconvert, &rast_state->base);
841 util_primconvert_draw_vbo(ctx->primconvert, dinfo);
842 return;
843 }
844
845 struct zink_gfx_program *gfx_program = get_gfx_program(ctx);
846 if (!gfx_program)
847 return;
848
849 ctx->gfx_pipeline_state.primitive_topology = zink_primitive_topology(dinfo->mode);
850
851 VkPipeline pipeline = zink_get_gfx_pipeline(screen->dev, gfx_program,
852 &ctx->gfx_pipeline_state);
853
854 bool depth_bias = false;
855 switch (u_reduced_prim(dinfo->mode)) {
856 case PIPE_PRIM_POINTS:
857 depth_bias = rast_state->offset_point;
858 break;
859
860 case PIPE_PRIM_LINES:
861 depth_bias = rast_state->offset_line;
862 break;
863
864 case PIPE_PRIM_TRIANGLES:
865 depth_bias = rast_state->offset_tri;
866 break;
867
868 default:
869 unreachable("unexpected reduced prim");
870 }
871
872 unsigned index_offset = 0;
873 struct pipe_resource *index_buffer = NULL;
874 if (dinfo->index_size > 0) {
875 if (dinfo->has_user_indices) {
876 if (!util_upload_index_buffer(pctx, dinfo, &index_buffer, &index_offset)) {
877 debug_printf("util_upload_index_buffer() failed\n");
878 return;
879 }
880 } else
881 index_buffer = dinfo->index.resource;
882 }
883
884 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
885 if (!cmdbuf)
886 return;
887
888 begin_render_pass(screen, cmdbuf, ctx->gfx_pipeline_state.render_pass,
889 ctx->framebuffer,
890 ctx->fb_state.width, ctx->fb_state.height);
891
892 vkCmdSetViewport(cmdbuf->cmdbuf, 0, ctx->num_viewports, ctx->viewports);
893
894 if (ctx->num_scissors)
895 vkCmdSetScissor(cmdbuf->cmdbuf, 0, ctx->num_scissors, ctx->scissors);
896 else if (ctx->fb_state.width && ctx->fb_state.height) {
897 VkRect2D fb_scissor = {};
898 fb_scissor.extent.width = ctx->fb_state.width;
899 fb_scissor.extent.height = ctx->fb_state.height;
900 vkCmdSetScissor(cmdbuf->cmdbuf, 0, 1, &fb_scissor);
901 }
902
903 vkCmdSetStencilReference(cmdbuf->cmdbuf, VK_STENCIL_FACE_FRONT_BIT, ctx->stencil_ref[0]);
904 vkCmdSetStencilReference(cmdbuf->cmdbuf, VK_STENCIL_FACE_BACK_BIT, ctx->stencil_ref[1]);
905
906 if (depth_bias)
907 vkCmdSetDepthBias(cmdbuf->cmdbuf, rast_state->offset_units, rast_state->offset_clamp, rast_state->offset_scale);
908 else
909 vkCmdSetDepthBias(cmdbuf->cmdbuf, 0.0f, 0.0f, 0.0f);
910
911 if (ctx->gfx_pipeline_state.blend_state->need_blend_constants)
912 vkCmdSetBlendConstants(cmdbuf->cmdbuf, ctx->blend_constants);
913
914 VkDescriptorSet desc_set = allocate_descriptor_set(ctx, gfx_program->dsl);
915
916 VkWriteDescriptorSet wds[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS + PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
917 VkDescriptorBufferInfo buffer_infos[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS];
918 VkDescriptorImageInfo image_infos[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
919 int num_wds = 0, num_buffer_info = 0, num_image_info = 0;
920
921 for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
922 struct zink_shader *shader = ctx->gfx_stages[i];
923 if (!shader)
924 continue;
925
926 for (int j = 0; j < shader->num_bindings; j++) {
927 int index = shader->bindings[j].index;
928 if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
929 assert(ctx->ubos[i][index].buffer_size > 0);
930 assert(ctx->ubos[i][index].buffer);
931 struct zink_resource *res = zink_resource(ctx->ubos[i][index].buffer);
932 buffer_infos[num_buffer_info].buffer = res->buffer;
933 buffer_infos[num_buffer_info].offset = ctx->ubos[i][index].buffer_offset;
934 buffer_infos[num_buffer_info].range = VK_WHOLE_SIZE;
935 wds[num_wds].pBufferInfo = buffer_infos + num_buffer_info;
936 ++num_buffer_info;
937 zink_cmdbuf_reference_resoure(cmdbuf, res);
938 } else {
939 struct pipe_sampler_view *psampler_view = ctx->image_views[i][index];
940 assert(psampler_view);
941 struct zink_sampler_view *sampler_view = (struct zink_sampler_view *)psampler_view;
942 struct zink_resource *res = zink_resource(psampler_view->texture);
943 image_infos[num_image_info].imageLayout = res->layout;
944 image_infos[num_image_info].imageView = sampler_view->image_view;
945 image_infos[num_image_info].sampler = ctx->samplers[i][index];
946 wds[num_wds].pImageInfo = image_infos + num_image_info;
947 ++num_image_info;
948 zink_cmdbuf_reference_resoure(cmdbuf, res);
949 }
950
951 wds[num_wds].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
952 wds[num_wds].pNext = NULL;
953 wds[num_wds].dstSet = desc_set;
954 wds[num_wds].dstBinding = shader->bindings[j].binding;
955 wds[num_wds].dstArrayElement = 0;
956 wds[num_wds].descriptorCount = 1;
957 wds[num_wds].descriptorType = shader->bindings[j].type;
958 ++num_wds;
959 }
960 }
961
962 vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
963
964 vkCmdBindPipeline(cmdbuf->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
965 vkCmdBindDescriptorSets(cmdbuf->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS,
966 gfx_program->layout, 0, 1, &desc_set, 0, NULL);
967 zink_bind_vertex_buffers(cmdbuf, ctx);
968
969 if (dinfo->index_size > 0) {
970 assert(dinfo->index_size != 1);
971 VkIndexType index_type = dinfo->index_size == 2 ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
972 struct zink_resource *res = zink_resource(index_buffer);
973 vkCmdBindIndexBuffer(cmdbuf->cmdbuf, res->buffer, index_offset, index_type);
974 zink_cmdbuf_reference_resoure(cmdbuf, res);
975 vkCmdDrawIndexed(cmdbuf->cmdbuf,
976 dinfo->count, dinfo->instance_count,
977 dinfo->start, dinfo->index_bias, dinfo->start_instance);
978 } else
979 vkCmdDraw(cmdbuf->cmdbuf, dinfo->count, dinfo->instance_count, dinfo->start, dinfo->start_instance);
980
981 vkCmdEndRenderPass(cmdbuf->cmdbuf);
982
983 zink_end_cmdbuf(ctx, cmdbuf);
984
985 if (dinfo->index_size > 0 && dinfo->has_user_indices)
986 pipe_resource_reference(&index_buffer, NULL);
987 }
988
989 static void
990 zink_flush(struct pipe_context *pctx,
991 struct pipe_fence_handle **pfence,
992 enum pipe_flush_flags flags)
993 {
994 struct zink_context *ctx = zink_context(pctx);
995
996 if (pfence)
997 zink_fence_reference(zink_screen(pctx->screen), (struct zink_fence **)pfence,
998 zink_context_curr_cmdbuf(ctx)->fence);
999 }
1000
1001 static void
1002 zink_blit(struct pipe_context *pctx,
1003 const struct pipe_blit_info *info)
1004 {
1005 struct zink_context *ctx = zink_context(pctx);
1006 bool is_resolve = false;
1007 if (info->mask != PIPE_MASK_RGBA ||
1008 info->scissor_enable ||
1009 info->alpha_blend) {
1010 if (!util_blitter_is_blit_supported(ctx->blitter, info)) {
1011 debug_printf("blit unsupported %s -> %s\n",
1012 util_format_short_name(info->src.resource->format),
1013 util_format_short_name(info->dst.resource->format));
1014 return;
1015 }
1016
1017 util_blitter_save_fragment_constant_buffer_slot(ctx->blitter, ctx->ubos[PIPE_SHADER_FRAGMENT]);
1018 util_blitter_save_vertex_buffer_slot(ctx->blitter, ctx->buffers);
1019 util_blitter_save_vertex_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_VERTEX]);
1020 util_blitter_save_fragment_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_FRAGMENT]);
1021 util_blitter_save_rasterizer(ctx->blitter, ctx->gfx_pipeline_state.rast_state);
1022
1023 util_blitter_blit(ctx->blitter, info);
1024 }
1025
1026 struct zink_resource *src = zink_resource(info->src.resource);
1027 struct zink_resource *dst = zink_resource(info->dst.resource);
1028
1029 if (src->base.nr_samples > 1 && dst->base.nr_samples <= 1)
1030 is_resolve = true;
1031
1032 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
1033 if (!cmdbuf)
1034 return;
1035
1036 zink_cmdbuf_reference_resoure(cmdbuf, src);
1037 zink_cmdbuf_reference_resoure(cmdbuf, dst);
1038
1039 if (is_resolve) {
1040 VkImageResolve region = {};
1041
1042 region.srcSubresource.aspectMask = src->aspect;
1043 region.srcSubresource.mipLevel = info->src.level;
1044 region.srcSubresource.baseArrayLayer = 0; // no clue
1045 region.srcSubresource.layerCount = 1; // no clue
1046 region.srcOffset.x = info->src.box.x;
1047 region.srcOffset.y = info->src.box.y;
1048 region.srcOffset.z = info->src.box.z;
1049
1050 region.dstSubresource.aspectMask = dst->aspect;
1051 region.dstSubresource.mipLevel = info->dst.level;
1052 region.dstSubresource.baseArrayLayer = 0; // no clue
1053 region.dstSubresource.layerCount = 1; // no clue
1054 region.dstOffset.x = info->dst.box.x;
1055 region.dstOffset.y = info->dst.box.y;
1056 region.dstOffset.z = info->dst.box.z;
1057
1058 region.extent.width = info->dst.box.width;
1059 region.extent.height = info->dst.box.height;
1060 region.extent.depth = info->dst.box.depth;
1061 vkCmdResolveImage(cmdbuf->cmdbuf, src->image, src->layout,
1062 dst->image, dst->layout,
1063 1, &region);
1064
1065 } else {
1066 if (dst->layout != VK_IMAGE_LAYOUT_GENERAL &&
1067 dst->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
1068 zink_resource_barrier(cmdbuf->cmdbuf, dst, dst->aspect,
1069 VK_IMAGE_LAYOUT_GENERAL);
1070
1071 VkImageBlit region = {};
1072 region.srcSubresource.aspectMask = src->aspect;
1073 region.srcSubresource.mipLevel = info->src.level;
1074 region.srcOffsets[0].x = info->src.box.x;
1075 region.srcOffsets[0].y = info->src.box.y;
1076 region.srcOffsets[1].x = info->src.box.x + info->src.box.width;
1077 region.srcOffsets[1].y = info->src.box.y + info->src.box.height;
1078
1079 if (src->base.array_size > 1) {
1080 region.srcOffsets[0].z = 0;
1081 region.srcOffsets[1].z = 1;
1082 region.srcSubresource.baseArrayLayer = info->src.box.z;
1083 region.srcSubresource.layerCount = info->src.box.depth;
1084 } else {
1085 region.srcOffsets[0].z = info->src.box.z;
1086 region.srcOffsets[1].z = info->src.box.z + info->src.box.depth;
1087 region.srcSubresource.baseArrayLayer = 0;
1088 region.srcSubresource.layerCount = 1;
1089 }
1090
1091 region.dstSubresource.aspectMask = dst->aspect;
1092 region.dstSubresource.mipLevel = info->dst.level;
1093 region.dstOffsets[0].x = info->dst.box.x;
1094 region.dstOffsets[0].y = info->dst.box.y;
1095 region.dstOffsets[1].x = info->dst.box.x + info->dst.box.width;
1096 region.dstOffsets[1].y = info->dst.box.y + info->dst.box.height;
1097
1098 if (dst->base.array_size > 1) {
1099 region.dstOffsets[0].z = 0;
1100 region.dstOffsets[1].z = 1;
1101 region.dstSubresource.baseArrayLayer = info->dst.box.z;
1102 region.dstSubresource.layerCount = info->dst.box.depth;
1103 } else {
1104 region.dstOffsets[0].z = info->dst.box.z;
1105 region.dstOffsets[1].z = info->dst.box.z + info->dst.box.depth;
1106 region.dstSubresource.baseArrayLayer = 0;
1107 region.dstSubresource.layerCount = 1;
1108 }
1109
1110 vkCmdBlitImage(cmdbuf->cmdbuf, src->image, src->layout,
1111 dst->image, dst->layout,
1112 1, &region,
1113 filter(info->filter));
1114 }
1115 zink_end_cmdbuf(ctx, cmdbuf);
1116 }
1117
1118 static void
1119 zink_flush_resource(struct pipe_context *pipe,
1120 struct pipe_resource *resource)
1121 {
1122 }
1123
1124 static void
1125 zink_resource_copy_region(struct pipe_context *pctx,
1126 struct pipe_resource *pdst,
1127 unsigned dst_level, unsigned dstx, unsigned dsty, unsigned dstz,
1128 struct pipe_resource *psrc,
1129 unsigned src_level, const struct pipe_box *src_box)
1130 {
1131 struct zink_resource *dst = zink_resource(pdst);
1132 struct zink_resource *src = zink_resource(psrc);
1133 struct zink_context *ctx = zink_context(pctx);
1134 if (dst->base.target != PIPE_BUFFER && src->base.target != PIPE_BUFFER) {
1135 VkImageCopy region = {};
1136
1137 region.srcSubresource.aspectMask = src->aspect;
1138 region.srcSubresource.mipLevel = src_level;
1139 region.srcSubresource.layerCount = 1;
1140 if (src->base.array_size > 1) {
1141 region.srcSubresource.baseArrayLayer = src_box->z;
1142 region.srcSubresource.layerCount = src_box->depth;
1143 region.extent.depth = 1;
1144 } else {
1145 region.srcOffset.z = src_box->z;
1146 region.srcSubresource.layerCount = 1;
1147 region.extent.depth = src_box->depth;
1148 }
1149
1150 region.srcOffset.x = src_box->x;
1151 region.srcOffset.y = src_box->y;
1152
1153 region.dstSubresource.aspectMask = dst->aspect;
1154 region.dstSubresource.mipLevel = dst_level;
1155 if (dst->base.array_size > 1) {
1156 region.dstSubresource.baseArrayLayer = dstz;
1157 region.dstSubresource.layerCount = src_box->depth;
1158 } else {
1159 region.dstOffset.z = dstz;
1160 region.dstSubresource.layerCount = 1;
1161 }
1162
1163 region.dstOffset.x = dstx;
1164 region.dstOffset.y = dsty;
1165 region.extent.width = src_box->width;
1166 region.extent.height = src_box->height;
1167
1168 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
1169 if (!cmdbuf)
1170 return;
1171
1172 zink_cmdbuf_reference_resoure(cmdbuf, src);
1173 zink_cmdbuf_reference_resoure(cmdbuf, dst);
1174
1175 vkCmdCopyImage(cmdbuf->cmdbuf, src->image, src->layout,
1176 dst->image, dst->layout,
1177 1, &region);
1178 zink_end_cmdbuf(ctx, cmdbuf);
1179 } else
1180 debug_printf("zink: TODO resource copy\n");
1181 }
1182
1183 struct pipe_context *
1184 zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
1185 {
1186 struct zink_screen *screen = zink_screen(pscreen);
1187 struct zink_context *ctx = CALLOC_STRUCT(zink_context);
1188
1189 ctx->base.screen = pscreen;
1190 ctx->base.priv = priv;
1191
1192 ctx->base.destroy = zink_context_destroy;
1193
1194 zink_context_state_init(&ctx->base);
1195
1196 ctx->base.create_sampler_state = zink_create_sampler_state;
1197 ctx->base.bind_sampler_states = zink_bind_sampler_states;
1198 ctx->base.delete_sampler_state = zink_delete_sampler_state;
1199
1200 ctx->base.create_sampler_view = zink_create_sampler_view;
1201 ctx->base.set_sampler_views = zink_set_sampler_views;
1202 ctx->base.sampler_view_destroy = zink_destroy_sampler_view;
1203
1204 ctx->base.create_vs_state = zink_create_vs_state;
1205 ctx->base.bind_vs_state = zink_bind_vs_state;
1206 ctx->base.delete_vs_state = zink_delete_vs_state;
1207
1208 ctx->base.create_fs_state = zink_create_fs_state;
1209 ctx->base.bind_fs_state = zink_bind_fs_state;
1210 ctx->base.delete_fs_state = zink_delete_fs_state;
1211
1212 ctx->base.set_polygon_stipple = zink_set_polygon_stipple;
1213 ctx->base.set_vertex_buffers = zink_set_vertex_buffers;
1214 ctx->base.set_viewport_states = zink_set_viewport_states;
1215 ctx->base.set_scissor_states = zink_set_scissor_states;
1216 ctx->base.set_constant_buffer = zink_set_constant_buffer;
1217 ctx->base.set_framebuffer_state = zink_set_framebuffer_state;
1218 ctx->base.set_stencil_ref = zink_set_stencil_ref;
1219 ctx->base.set_clip_state = zink_set_clip_state;
1220 ctx->base.set_active_query_state = zink_set_active_query_state;
1221 ctx->base.set_blend_color = zink_set_blend_color;
1222
1223 ctx->base.clear = zink_clear;
1224 ctx->base.draw_vbo = zink_draw_vbo;
1225 ctx->base.flush = zink_flush;
1226
1227 ctx->base.resource_copy_region = zink_resource_copy_region;
1228 ctx->base.blit = zink_blit;
1229
1230 ctx->base.flush_resource = zink_flush_resource;
1231 zink_context_surface_init(&ctx->base);
1232 zink_context_resource_init(&ctx->base);
1233 zink_context_query_init(&ctx->base);
1234
1235 slab_create_child(&ctx->transfer_pool, &screen->transfer_pool);
1236
1237 ctx->base.stream_uploader = u_upload_create_default(&ctx->base);
1238 ctx->base.const_uploader = ctx->base.stream_uploader;
1239
1240 int prim_hwsupport = 1 << PIPE_PRIM_POINTS |
1241 1 << PIPE_PRIM_LINES |
1242 1 << PIPE_PRIM_LINE_STRIP |
1243 1 << PIPE_PRIM_TRIANGLES |
1244 1 << PIPE_PRIM_TRIANGLE_STRIP |
1245 1 << PIPE_PRIM_TRIANGLE_FAN;
1246
1247 ctx->primconvert = util_primconvert_create(&ctx->base, prim_hwsupport);
1248 if (!ctx->primconvert)
1249 goto fail;
1250
1251 ctx->blitter = util_blitter_create(&ctx->base);
1252 if (!ctx->blitter)
1253 goto fail;
1254
1255 VkCommandPoolCreateInfo cpci = {};
1256 cpci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1257 cpci.queueFamilyIndex = screen->gfx_queue;
1258 cpci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1259 if (vkCreateCommandPool(screen->dev, &cpci, NULL, &ctx->cmdpool) != VK_SUCCESS)
1260 goto fail;
1261
1262 VkCommandBufferAllocateInfo cbai = {};
1263 cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1264 cbai.commandPool = ctx->cmdpool;
1265 cbai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1266 cbai.commandBufferCount = 1;
1267 for (int i = 0; i < ARRAY_SIZE(ctx->cmdbufs); ++i) {
1268 if (vkAllocateCommandBuffers(screen->dev, &cbai, &ctx->cmdbufs[i].cmdbuf) != VK_SUCCESS)
1269 goto fail;
1270
1271 ctx->cmdbufs[i].resources = _mesa_set_create(NULL, _mesa_hash_pointer,
1272 _mesa_key_pointer_equal);
1273 if (!ctx->cmdbufs[i].resources)
1274 goto fail;
1275
1276 util_dynarray_init(&ctx->cmdbufs[i].zombie_samplers, NULL);
1277 }
1278
1279 VkDescriptorPoolSize sizes[] = {
1280 {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1000}
1281 };
1282 VkDescriptorPoolCreateInfo dpci = {};
1283 dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1284 dpci.pPoolSizes = sizes;
1285 dpci.poolSizeCount = ARRAY_SIZE(sizes);
1286 dpci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
1287 dpci.maxSets = 1000;
1288
1289 if(vkCreateDescriptorPool(screen->dev, &dpci, 0, &ctx->descpool) != VK_SUCCESS)
1290 goto fail;
1291
1292 vkGetDeviceQueue(screen->dev, screen->gfx_queue, 0, &ctx->queue);
1293
1294 ctx->program_cache = _mesa_hash_table_create(NULL, hash_gfx_program, equals_gfx_program);
1295 if (!ctx->program_cache)
1296 goto fail;
1297
1298 ctx->dirty = ZINK_DIRTY_PROGRAM;
1299
1300 return &ctx->base;
1301
1302 fail:
1303 if (ctx) {
1304 vkDestroyCommandPool(screen->dev, ctx->cmdpool, NULL);
1305 FREE(ctx);
1306 }
1307 return NULL;
1308 }