zink: move render-pass begin to helper
[mesa.git] / src / gallium / drivers / zink / zink_context.c
1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_context.h"
25
26 #include "zink_cmdbuf.h"
27 #include "zink_compiler.h"
28 #include "zink_framebuffer.h"
29 #include "zink_pipeline.h"
30 #include "zink_program.h"
31 #include "zink_render_pass.h"
32 #include "zink_resource.h"
33 #include "zink_screen.h"
34 #include "zink_state.h"
35 #include "zink_surface.h"
36
37 #include "indices/u_primconvert.h"
38 #include "util/u_blitter.h"
39 #include "util/u_debug.h"
40 #include "util/u_format.h"
41 #include "util/u_framebuffer.h"
42 #include "util/u_helpers.h"
43 #include "util/u_inlines.h"
44
45 #include "nir.h"
46
47 #include "util/u_memory.h"
48 #include "util/u_prim.h"
49 #include "util/u_upload_mgr.h"
50
51 static void
52 zink_context_destroy(struct pipe_context *pctx)
53 {
54 struct zink_context *ctx = zink_context(pctx);
55 struct zink_screen *screen = zink_screen(pctx->screen);
56 vkFreeCommandBuffers(screen->dev, ctx->cmdpool, 1, &ctx->cmdbuf.cmdbuf);
57 vkDestroyCommandPool(screen->dev, ctx->cmdpool, NULL);
58
59 util_primconvert_destroy(ctx->primconvert);
60 u_upload_destroy(pctx->stream_uploader);
61 slab_destroy_child(&ctx->transfer_pool);
62 util_blitter_destroy(ctx->blitter);
63 FREE(ctx);
64 }
65
66 static VkFilter
67 filter(enum pipe_tex_filter filter)
68 {
69 switch (filter) {
70 case PIPE_TEX_FILTER_NEAREST: return VK_FILTER_NEAREST;
71 case PIPE_TEX_FILTER_LINEAR: return VK_FILTER_LINEAR;
72 }
73 unreachable("unexpected filter");
74 }
75
76 static VkSamplerMipmapMode
77 sampler_mipmap_mode(enum pipe_tex_mipfilter filter)
78 {
79 switch (filter) {
80 case PIPE_TEX_MIPFILTER_NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
81 case PIPE_TEX_MIPFILTER_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
82 case PIPE_TEX_MIPFILTER_NONE:
83 unreachable("PIPE_TEX_MIPFILTER_NONE should be dealt with earlier");
84 }
85 unreachable("unexpected filter");
86 }
87
88 static VkSamplerAddressMode
89 sampler_address_mode(enum pipe_tex_wrap filter)
90 {
91 switch (filter) {
92 case PIPE_TEX_WRAP_REPEAT: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
93 case PIPE_TEX_WRAP_CLAMP: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
94 case PIPE_TEX_WRAP_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
95 case PIPE_TEX_WRAP_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
96 case PIPE_TEX_WRAP_MIRROR_REPEAT: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
97 case PIPE_TEX_WRAP_MIRROR_CLAMP: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
98 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
99 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
100 }
101 unreachable("unexpected wrap");
102 }
103
104 static void *
105 zink_create_sampler_state(struct pipe_context *pctx,
106 const struct pipe_sampler_state *state)
107 {
108 struct zink_screen *screen = zink_screen(pctx->screen);
109
110 VkSamplerCreateInfo sci = {};
111 sci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
112 sci.magFilter = filter(state->mag_img_filter);
113 sci.minFilter = filter(state->min_img_filter);
114
115 if (state->min_mip_filter != PIPE_TEX_MIPFILTER_NONE) {
116 sci.mipmapMode = sampler_mipmap_mode(state->min_mip_filter);
117 sci.minLod = state->min_lod;
118 sci.maxLod = state->max_lod;
119 } else {
120 sci.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
121 sci.minLod = 0;
122 sci.maxLod = 0;
123 }
124
125 sci.addressModeU = sampler_address_mode(state->wrap_s);
126 sci.addressModeV = sampler_address_mode(state->wrap_t);
127 sci.addressModeW = sampler_address_mode(state->wrap_r);
128 sci.mipLodBias = state->lod_bias;
129 sci.compareOp = VK_COMPARE_OP_NEVER; // TODO
130 sci.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // TODO
131
132 if (state->max_anisotropy > 1) {
133 sci.maxAnisotropy = state->max_anisotropy;
134 sci.anisotropyEnable = VK_TRUE;
135 }
136
137 VkSampler sampler;
138 VkResult err = vkCreateSampler(screen->dev, &sci, NULL, &sampler);
139 if (err != VK_SUCCESS)
140 return NULL;
141
142 return sampler;
143 }
144
145 static void
146 zink_bind_sampler_states(struct pipe_context *pctx,
147 enum pipe_shader_type shader,
148 unsigned start_slot,
149 unsigned num_samplers,
150 void **samplers)
151 {
152 struct zink_context *ctx = zink_context(pctx);
153 for (unsigned i = 0; i < num_samplers; ++i)
154 ctx->samplers[shader][start_slot + i] = (VkSampler)samplers[i];
155 }
156
157 static void
158 zink_delete_sampler_state(struct pipe_context *pctx,
159 void *sampler_state)
160 {
161 struct zink_screen *screen = zink_screen(pctx->screen);
162 vkDestroySampler(screen->dev, sampler_state, NULL);
163 }
164
165
166 static VkImageViewType
167 image_view_type(enum pipe_texture_target target)
168 {
169 switch (target) {
170 case PIPE_TEXTURE_1D: return VK_IMAGE_VIEW_TYPE_1D;
171 case PIPE_TEXTURE_1D_ARRAY: return VK_IMAGE_VIEW_TYPE_1D_ARRAY;
172 case PIPE_TEXTURE_2D: return VK_IMAGE_VIEW_TYPE_2D;
173 case PIPE_TEXTURE_2D_ARRAY: return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
174 case PIPE_TEXTURE_CUBE: return VK_IMAGE_VIEW_TYPE_CUBE;
175 case PIPE_TEXTURE_CUBE_ARRAY: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
176 case PIPE_TEXTURE_3D: return VK_IMAGE_VIEW_TYPE_3D;
177 case PIPE_TEXTURE_RECT: return VK_IMAGE_VIEW_TYPE_2D; /* not sure */
178 default:
179 unreachable("unexpected target");
180 }
181 }
182
183 static VkComponentSwizzle
184 component_mapping(enum pipe_swizzle swizzle)
185 {
186 switch (swizzle) {
187 case PIPE_SWIZZLE_X: return VK_COMPONENT_SWIZZLE_R;
188 case PIPE_SWIZZLE_Y: return VK_COMPONENT_SWIZZLE_G;
189 case PIPE_SWIZZLE_Z: return VK_COMPONENT_SWIZZLE_B;
190 case PIPE_SWIZZLE_W: return VK_COMPONENT_SWIZZLE_A;
191 case PIPE_SWIZZLE_0: return VK_COMPONENT_SWIZZLE_ZERO;
192 case PIPE_SWIZZLE_1: return VK_COMPONENT_SWIZZLE_ONE;
193 case PIPE_SWIZZLE_NONE: return VK_COMPONENT_SWIZZLE_IDENTITY; // ???
194 default:
195 unreachable("unexpected swizzle");
196 }
197 }
198
199 static struct pipe_sampler_view *
200 zink_create_sampler_view(struct pipe_context *pctx, struct pipe_resource *pres,
201 const struct pipe_sampler_view *state)
202 {
203 struct zink_screen *screen = zink_screen(pctx->screen);
204 struct zink_resource *res = zink_resource(pres);
205 struct zink_sampler_view *sampler_view = CALLOC_STRUCT(zink_sampler_view);
206
207 sampler_view->base = *state;
208 sampler_view->base.texture = NULL;
209 pipe_resource_reference(&sampler_view->base.texture, pres);
210 sampler_view->base.reference.count = 1;
211 sampler_view->base.context = pctx;
212
213 VkImageViewCreateInfo ivci = {};
214 ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
215 ivci.image = res->image;
216 ivci.viewType = image_view_type(state->target);
217 ivci.format = zink_get_format(state->format);
218 ivci.components.r = component_mapping(state->swizzle_r);
219 ivci.components.g = component_mapping(state->swizzle_g);
220 ivci.components.b = component_mapping(state->swizzle_b);
221 ivci.components.a = component_mapping(state->swizzle_a);
222 ivci.subresourceRange.aspectMask = zink_aspect_from_format(state->format);
223 ivci.subresourceRange.baseMipLevel = state->u.tex.first_level;
224 ivci.subresourceRange.baseArrayLayer = state->u.tex.first_layer;
225 ivci.subresourceRange.levelCount = state->u.tex.last_level - state->u.tex.first_level + 1;
226 ivci.subresourceRange.layerCount = state->u.tex.last_layer - state->u.tex.first_layer + 1;
227
228 VkResult err = vkCreateImageView(screen->dev, &ivci, NULL, &sampler_view->image_view);
229 if (err != VK_SUCCESS) {
230 FREE(sampler_view);
231 return NULL;
232 }
233
234 return &sampler_view->base;
235 }
236
237 static void
238 zink_destroy_sampler_view(struct pipe_context *pctx,
239 struct pipe_sampler_view *view)
240 {
241 FREE(view);
242 }
243
244 static void *
245 zink_create_vs_state(struct pipe_context *pctx,
246 const struct pipe_shader_state *shader)
247 {
248 struct nir_shader *nir;
249 if (shader->type != PIPE_SHADER_IR_NIR)
250 nir = zink_tgsi_to_nir(pctx->screen, shader->tokens);
251 else
252 nir = (struct nir_shader *)shader->ir.nir;
253
254 return zink_compile_nir(zink_screen(pctx->screen), nir);
255 }
256
257 static void
258 zink_bind_vs_state(struct pipe_context *pctx,
259 void *cso)
260 {
261 struct zink_context *ctx = zink_context(pctx);
262 ctx->gfx_stages[PIPE_SHADER_VERTEX] = cso;
263 }
264
265 static void
266 zink_delete_vs_state(struct pipe_context *pctx,
267 void *cso)
268 {
269 zink_shader_free(zink_screen(pctx->screen), cso);
270 }
271
272 static void *
273 zink_create_fs_state(struct pipe_context *pctx,
274 const struct pipe_shader_state *shader)
275 {
276 struct nir_shader *nir;
277 if (shader->type != PIPE_SHADER_IR_NIR)
278 nir = zink_tgsi_to_nir(pctx->screen, shader->tokens);
279 else
280 nir = (struct nir_shader *)shader->ir.nir;
281
282 return zink_compile_nir(zink_screen(pctx->screen), nir);
283 }
284
285 static void
286 zink_bind_fs_state(struct pipe_context *pctx,
287 void *cso)
288 {
289 struct zink_context *ctx = zink_context(pctx);
290 ctx->gfx_stages[PIPE_SHADER_FRAGMENT] = cso;
291 }
292
293 static void
294 zink_delete_fs_state(struct pipe_context *pctx,
295 void *cso)
296 {
297 zink_shader_free(zink_screen(pctx->screen), cso);
298 }
299
300 static void
301 zink_set_polygon_stipple(struct pipe_context *pctx,
302 const struct pipe_poly_stipple *ps)
303 {
304 }
305
306 static void
307 zink_set_vertex_buffers(struct pipe_context *pctx,
308 unsigned start_slot,
309 unsigned num_buffers,
310 const struct pipe_vertex_buffer *buffers)
311 {
312 struct zink_context *ctx = zink_context(pctx);
313
314 if (buffers) {
315 for (int i = 0; i < num_buffers; ++i) {
316 const struct pipe_vertex_buffer *vb = buffers + i;
317 ctx->gfx_pipeline_state.bindings[start_slot + i].stride = vb->stride;
318 }
319 }
320
321 util_set_vertex_buffers_mask(ctx->buffers, &ctx->buffers_enabled_mask,
322 buffers, start_slot, num_buffers);
323 }
324
325 static void
326 zink_set_viewport_states(struct pipe_context *pctx,
327 unsigned start_slot,
328 unsigned num_viewports,
329 const struct pipe_viewport_state *state)
330 {
331 struct zink_context *ctx = zink_context(pctx);
332
333 for (unsigned i = 0; i < num_viewports; ++i) {
334 VkViewport viewport = {
335 state[i].translate[0] - state[i].scale[0],
336 state[i].translate[1] - state[i].scale[1],
337 state[i].scale[0] * 2,
338 state[i].scale[1] * 2,
339 state[i].translate[2] - state[i].scale[2],
340 state[i].translate[2] + state[i].scale[2]
341 };
342 ctx->viewports[start_slot + i] = viewport;
343 }
344 ctx->num_viewports = start_slot + num_viewports;
345 }
346
347 static void
348 zink_set_scissor_states(struct pipe_context *pctx,
349 unsigned start_slot, unsigned num_scissors,
350 const struct pipe_scissor_state *states)
351 {
352 struct zink_context *ctx = zink_context(pctx);
353
354 for (unsigned i = 0; i < num_scissors; i++) {
355 VkRect2D scissor;
356
357 scissor.offset.x = states[i].minx;
358 scissor.offset.y = states[i].miny;
359 scissor.extent.width = states[i].maxx - states[i].minx;
360 scissor.extent.height = states[i].maxy - states[i].miny;
361 ctx->scissors[start_slot + i] = scissor;
362 }
363 ctx->num_scissors = start_slot + num_scissors;
364 }
365
366 static void
367 zink_set_constant_buffer(struct pipe_context *pctx,
368 enum pipe_shader_type shader, uint index,
369 const struct pipe_constant_buffer *cb)
370 {
371 struct zink_context *ctx = zink_context(pctx);
372
373 if (cb) {
374 struct pipe_resource *buffer = cb->buffer;
375 unsigned offset = cb->buffer_offset;
376 if (cb->user_buffer)
377 u_upload_data(ctx->base.const_uploader, 0, cb->buffer_size, 64,
378 cb->user_buffer, &offset, &buffer);
379
380 pipe_resource_reference(&ctx->ubos[shader][index].buffer, buffer);
381 ctx->ubos[shader][index].buffer_offset = offset;
382 ctx->ubos[shader][index].buffer_size = cb->buffer_size;
383 ctx->ubos[shader][index].user_buffer = NULL;
384
385 if (cb->user_buffer)
386 pipe_resource_reference(&buffer, NULL);
387 } else {
388 pipe_resource_reference(&ctx->ubos[shader][index].buffer, NULL);
389 ctx->ubos[shader][index].buffer_offset = 0;
390 ctx->ubos[shader][index].buffer_size = 0;
391 ctx->ubos[shader][index].user_buffer = NULL;
392 }
393 }
394
395 static void
396 zink_set_sampler_views(struct pipe_context *pctx,
397 enum pipe_shader_type shader_type,
398 unsigned start_slot,
399 unsigned num_views,
400 struct pipe_sampler_view **views)
401 {
402 struct zink_context *ctx = zink_context(pctx);
403 assert(views);
404 for (unsigned i = 0; i < num_views; ++i) {
405 pipe_sampler_view_reference(
406 &ctx->image_views[shader_type][start_slot + i],
407 views[i]);
408 }
409 }
410
411 static void
412 zink_set_stencil_ref(struct pipe_context *pctx,
413 const struct pipe_stencil_ref *ref)
414 {
415 struct zink_context *ctx = zink_context(pctx);
416 ctx->stencil_ref[0] = ref->ref_value[0];
417 ctx->stencil_ref[1] = ref->ref_value[1];
418 }
419
420 static void
421 zink_set_clip_state(struct pipe_context *pctx,
422 const struct pipe_clip_state *pcs)
423 {
424 }
425
426 static struct zink_render_pass *
427 get_render_pass(struct zink_context *ctx,
428 const struct pipe_framebuffer_state *fb)
429 {
430 struct zink_render_pass_state state;
431
432 for (int i = 0; i < fb->nr_cbufs; i++) {
433 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
434 state.rts[i].format = cbuf->format;
435 }
436 state.num_cbufs = fb->nr_cbufs;
437
438 if (fb->zsbuf) {
439 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
440 state.rts[fb->nr_cbufs].format = zsbuf->format;
441 }
442 state.have_zsbuf = fb->zsbuf != NULL;
443
444 // TODO: cache instead!
445 return zink_create_render_pass(zink_screen(ctx->base.screen), &state);
446 }
447
448 static struct zink_framebuffer *
449 get_framebuffer(struct zink_context *ctx,
450 const struct pipe_framebuffer_state *fb,
451 struct zink_render_pass *rp)
452 {
453 // TODO: cache!
454 return zink_create_framebuffer(zink_screen(ctx->base.screen), fb, rp);
455 }
456
457 static void
458 zink_set_framebuffer_state(struct pipe_context *pctx,
459 const struct pipe_framebuffer_state *state)
460 {
461 struct zink_context *ctx = zink_context(pctx);
462 struct zink_screen *screen = zink_screen(pctx->screen);
463
464 struct zink_render_pass *rp = get_render_pass(ctx, state);
465 zink_render_pass_reference(screen, &ctx->render_pass, rp);
466
467 struct zink_framebuffer *fb = get_framebuffer(ctx, state, rp);
468 zink_framebuffer_reference(screen, &ctx->framebuffer, fb);
469 zink_framebuffer_reference(screen, &fb, NULL);
470 zink_render_pass_reference(screen, &rp, NULL);
471
472 ctx->gfx_pipeline_state.num_attachments = state->nr_cbufs;
473
474 util_copy_framebuffer_state(&ctx->fb_state, state);
475
476 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
477 if (!cmdbuf)
478 return;
479
480 for (int i = 0; i < state->nr_cbufs; i++) {
481 struct zink_resource *res = zink_resource(state->cbufs[i]->texture);
482 if (res->layout != VK_IMAGE_LAYOUT_GENERAL &&
483 res->layout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
484 zink_resource_barrier(cmdbuf->cmdbuf, res, res->aspect,
485 VK_IMAGE_LAYOUT_GENERAL);
486 }
487
488 if (state->zsbuf) {
489 struct zink_resource *res = zink_resource(state->zsbuf->texture);
490 if (res->layout != VK_IMAGE_LAYOUT_GENERAL &&
491 res->layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
492 zink_resource_barrier(cmdbuf->cmdbuf, res, res->aspect,
493 VK_IMAGE_LAYOUT_GENERAL);
494 }
495
496 zink_end_cmdbuf(ctx, cmdbuf);
497 }
498
499 static void
500 zink_set_active_query_state(struct pipe_context *pctx, bool enable)
501 {
502 }
503
504 static void
505 zink_set_blend_color(struct pipe_context *pctx,
506 const struct pipe_blend_color *color)
507 {
508 struct zink_context *ctx = zink_context(pctx);
509 memcpy(ctx->blend_constants, color->color, sizeof(float) * 4);
510 }
511
512 static VkAccessFlags
513 access_flags(VkImageLayout layout)
514 {
515 switch (layout) {
516 case VK_IMAGE_LAYOUT_UNDEFINED:
517 case VK_IMAGE_LAYOUT_GENERAL:
518 return 0;
519
520 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
521 return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
522 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
523 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
524
525 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
526 return VK_ACCESS_SHADER_READ_BIT;
527
528 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
529 return VK_ACCESS_TRANSFER_READ_BIT;
530
531 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
532 return VK_ACCESS_TRANSFER_WRITE_BIT;
533
534 case VK_IMAGE_LAYOUT_PREINITIALIZED:
535 return VK_ACCESS_HOST_WRITE_BIT;
536
537 default:
538 unreachable("unexpected layout");
539 }
540 }
541
542 void
543 zink_resource_barrier(VkCommandBuffer cmdbuf, struct zink_resource *res,
544 VkImageAspectFlags aspect, VkImageLayout new_layout)
545 {
546 VkImageSubresourceRange isr = {
547 aspect,
548 0, VK_REMAINING_MIP_LEVELS,
549 0, VK_REMAINING_ARRAY_LAYERS
550 };
551
552 VkImageMemoryBarrier imb = {
553 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
554 NULL,
555 access_flags(res->layout),
556 access_flags(new_layout),
557 res->layout,
558 new_layout,
559 VK_QUEUE_FAMILY_IGNORED,
560 VK_QUEUE_FAMILY_IGNORED,
561 res->image,
562 isr
563 };
564 vkCmdPipelineBarrier(
565 cmdbuf,
566 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
567 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
568 0,
569 0, NULL,
570 0, NULL,
571 1, &imb
572 );
573
574 res->layout = new_layout;
575 }
576
577 static void
578 zink_clear(struct pipe_context *pctx,
579 unsigned buffers,
580 const union pipe_color_union *pcolor,
581 double depth, unsigned stencil)
582 {
583 struct zink_context *ctx = zink_context(pctx);
584 struct pipe_framebuffer_state *fb = &ctx->fb_state;
585
586 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
587 if (!cmdbuf)
588 return;
589
590 // first transition all images to a compatible layout
591 if (buffers & PIPE_CLEAR_COLOR) {
592 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
593 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
594 continue;
595
596 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
597
598 if (cbuf->layout != VK_IMAGE_LAYOUT_GENERAL &&
599 cbuf->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
600 zink_resource_barrier(cmdbuf->cmdbuf, cbuf, cbuf->aspect,
601 VK_IMAGE_LAYOUT_GENERAL);
602 }
603 }
604
605 VkImageAspectFlags depthStencilAspect = 0;
606 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) {
607 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
608 if (buffers & PIPE_CLEAR_DEPTH)
609 depthStencilAspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
610 if (buffers & PIPE_CLEAR_STENCIL)
611 depthStencilAspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
612
613 if (zsbuf->layout != VK_IMAGE_LAYOUT_GENERAL &&
614 zsbuf->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
615 zink_resource_barrier(cmdbuf->cmdbuf, zsbuf, depthStencilAspect,
616 VK_IMAGE_LAYOUT_GENERAL);
617 }
618
619 VkClearColorValue color;
620 color.float32[0] = pcolor->f[0];
621 color.float32[1] = pcolor->f[1];
622 color.float32[2] = pcolor->f[2];
623 color.float32[3] = pcolor->f[3];
624
625 if (buffers & PIPE_CLEAR_COLOR) {
626 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
627 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
628 continue;
629
630 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
631
632 VkImageSubresourceRange range;
633 range.aspectMask = cbuf->aspect;
634 range.baseMipLevel = 0;
635 range.levelCount = VK_REMAINING_MIP_LEVELS;
636 range.baseArrayLayer = 0;
637 range.layerCount = VK_REMAINING_ARRAY_LAYERS;
638 vkCmdClearColorImage(cmdbuf->cmdbuf,
639 cbuf->image, VK_IMAGE_LAYOUT_GENERAL,
640 &color,
641 1, &range);
642 }
643 }
644
645 if (depthStencilAspect) {
646 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
647
648 VkClearDepthStencilValue zsvalue = { depth, stencil };
649
650 VkImageSubresourceRange range;
651 range.aspectMask = depthStencilAspect;
652 range.baseMipLevel = 0;
653 range.levelCount = VK_REMAINING_MIP_LEVELS;
654 range.baseArrayLayer = 0;
655 range.layerCount = VK_REMAINING_ARRAY_LAYERS;
656
657 vkCmdClearDepthStencilImage(cmdbuf->cmdbuf,
658 zsbuf->image, VK_IMAGE_LAYOUT_GENERAL,
659 &zsvalue,
660 1, &range);
661 }
662
663 zink_end_cmdbuf(ctx, cmdbuf);
664 }
665
666 VkShaderStageFlagBits
667 zink_shader_stage(enum pipe_shader_type type)
668 {
669 VkShaderStageFlagBits stages[] = {
670 [PIPE_SHADER_VERTEX] = VK_SHADER_STAGE_VERTEX_BIT,
671 [PIPE_SHADER_FRAGMENT] = VK_SHADER_STAGE_FRAGMENT_BIT,
672 [PIPE_SHADER_GEOMETRY] = VK_SHADER_STAGE_GEOMETRY_BIT,
673 [PIPE_SHADER_TESS_CTRL] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
674 [PIPE_SHADER_TESS_EVAL] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
675 [PIPE_SHADER_COMPUTE] = VK_SHADER_STAGE_COMPUTE_BIT,
676 };
677 return stages[type];
678 }
679
680 static VkDescriptorSet
681 allocate_descriptor_set(struct zink_context *ctx, VkDescriptorSetLayout dsl)
682 {
683 struct zink_screen *screen = zink_screen(ctx->base.screen);
684 VkDescriptorSetAllocateInfo dsai;
685 memset((void *)&dsai, 0, sizeof(dsai));
686 dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
687 dsai.pNext = NULL;
688 dsai.descriptorPool = ctx->descpool;
689 dsai.descriptorSetCount = 1;
690 dsai.pSetLayouts = &dsl;
691
692 VkDescriptorSet desc_set;
693 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
694 if (vkResetDescriptorPool(screen->dev, ctx->descpool, 0) != VK_SUCCESS) {
695 fprintf(stderr, "vkResetDescriptorPool failed\n");
696 return VK_NULL_HANDLE;
697 }
698 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
699 fprintf(stderr, "vkAllocateDescriptorSets failed\n");
700 return VK_NULL_HANDLE;
701 }
702 }
703
704 return desc_set;
705 }
706
707 static VkPrimitiveTopology
708 zink_primitive_topology(enum pipe_prim_type mode)
709 {
710 switch (mode) {
711 case PIPE_PRIM_POINTS:
712 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
713
714 case PIPE_PRIM_LINES:
715 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
716
717 case PIPE_PRIM_LINE_STRIP:
718 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
719
720 case PIPE_PRIM_TRIANGLES:
721 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
722
723 case PIPE_PRIM_TRIANGLE_STRIP:
724 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
725
726 case PIPE_PRIM_TRIANGLE_FAN:
727 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
728
729 default:
730 unreachable("unexpected enum pipe_prim_type");
731 }
732 }
733
734 static void
735 zink_bind_vertex_buffers(VkCommandBuffer cmdbuf, struct zink_context *ctx)
736 {
737 VkBuffer buffers[PIPE_MAX_ATTRIBS];
738 VkDeviceSize buffer_offsets[PIPE_MAX_ATTRIBS];
739 struct zink_vertex_elements_state *elems = ctx->gfx_pipeline_state.element_state;
740 for (unsigned i = 0; i < elems->num_bindings; i++) {
741 struct pipe_vertex_buffer *vb = ctx->buffers + elems->binding_map[i];
742 assert(vb && vb->buffer.resource);
743 struct zink_resource *res = zink_resource(vb->buffer.resource);
744 buffers[i] = res->buffer;
745 buffer_offsets[i] = vb->buffer_offset;
746 }
747
748 if (elems->num_bindings > 0)
749 vkCmdBindVertexBuffers(cmdbuf, 0, elems->num_bindings, buffers, buffer_offsets);
750 }
751
752 static void
753 begin_render_pass(struct zink_cmdbuf *cmdbuf, struct zink_render_pass *rp,
754 struct zink_framebuffer *fb, unsigned width, unsigned height)
755 {
756 VkRenderPassBeginInfo rpbi = {};
757 rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
758 rpbi.renderPass = rp->render_pass;
759 rpbi.renderArea.offset.x = 0;
760 rpbi.renderArea.offset.y = 0;
761 rpbi.renderArea.extent.width = width;
762 rpbi.renderArea.extent.height = height;
763 rpbi.clearValueCount = 0;
764 rpbi.pClearValues = NULL;
765 rpbi.framebuffer = fb->fb;
766
767 vkCmdBeginRenderPass(cmdbuf->cmdbuf, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
768 }
769
770 static void
771 zink_draw_vbo(struct pipe_context *pctx,
772 const struct pipe_draw_info *dinfo)
773 {
774 struct zink_context *ctx = zink_context(pctx);
775 struct zink_screen *screen = zink_screen(pctx->screen);
776 struct zink_rasterizer_state *rast_state = ctx->gfx_pipeline_state.rast_state;
777
778 if (dinfo->mode >= PIPE_PRIM_QUADS ||
779 dinfo->mode == PIPE_PRIM_LINE_LOOP) {
780 if (!u_trim_pipe_prim(dinfo->mode, (unsigned *)&dinfo->count))
781 return;
782
783 util_primconvert_save_rasterizer_state(ctx->primconvert, &rast_state->base);
784 util_primconvert_draw_vbo(ctx->primconvert, dinfo);
785 return;
786 }
787
788 struct zink_gfx_program *gfx_program = zink_create_gfx_program(screen->dev,
789 ctx->gfx_stages);
790 if (!gfx_program)
791 return;
792
793 ctx->gfx_pipeline_state.primitive_topology = zink_primitive_topology(dinfo->mode);
794
795 VkPipeline pipeline = zink_create_gfx_pipeline(screen->dev,
796 gfx_program,
797 &ctx->gfx_pipeline_state,
798 ctx->render_pass->render_pass);
799
800 bool depth_bias = false;
801 switch (u_reduced_prim(dinfo->mode)) {
802 case PIPE_PRIM_POINTS:
803 depth_bias = rast_state->offset_point;
804 break;
805
806 case PIPE_PRIM_LINES:
807 depth_bias = rast_state->offset_line;
808 break;
809
810 case PIPE_PRIM_TRIANGLES:
811 depth_bias = rast_state->offset_tri;
812 break;
813
814 default:
815 unreachable("unexpected reduced prim");
816 }
817
818 unsigned index_offset = 0;
819 struct pipe_resource *index_buffer = NULL;
820 if (dinfo->index_size > 0) {
821 if (dinfo->has_user_indices) {
822 if (!util_upload_index_buffer(pctx, dinfo, &index_buffer, &index_offset)) {
823 debug_printf("util_upload_index_buffer() failed\n");
824 return;
825 }
826 } else
827 index_buffer = dinfo->index.resource;
828 }
829
830 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
831 if (!cmdbuf)
832 return;
833
834 begin_render_pass(cmdbuf, ctx->render_pass, ctx->framebuffer,
835 ctx->fb_state.width, ctx->fb_state.height);
836
837 vkCmdSetViewport(cmdbuf->cmdbuf, 0, ctx->num_viewports, ctx->viewports);
838
839 if (ctx->num_scissors)
840 vkCmdSetScissor(cmdbuf->cmdbuf, 0, ctx->num_scissors, ctx->scissors);
841 else if (ctx->fb_state.width && ctx->fb_state.height) {
842 VkRect2D fb_scissor = {};
843 fb_scissor.extent.width = ctx->fb_state.width;
844 fb_scissor.extent.height = ctx->fb_state.height;
845 vkCmdSetScissor(cmdbuf->cmdbuf, 0, 1, &fb_scissor);
846 }
847
848 vkCmdSetStencilReference(cmdbuf->cmdbuf, VK_STENCIL_FACE_FRONT_BIT, ctx->stencil_ref[0]);
849 vkCmdSetStencilReference(cmdbuf->cmdbuf, VK_STENCIL_FACE_BACK_BIT, ctx->stencil_ref[1]);
850
851 if (depth_bias)
852 vkCmdSetDepthBias(cmdbuf->cmdbuf, rast_state->offset_units, rast_state->offset_clamp, rast_state->offset_scale);
853 else
854 vkCmdSetDepthBias(cmdbuf->cmdbuf, 0.0f, 0.0f, 0.0f);
855
856 if (ctx->gfx_pipeline_state.blend_state->need_blend_constants)
857 vkCmdSetBlendConstants(cmdbuf->cmdbuf, ctx->blend_constants);
858
859 VkDescriptorSet desc_set = allocate_descriptor_set(ctx, gfx_program->dsl);
860
861 VkWriteDescriptorSet wds[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS + PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
862 VkDescriptorBufferInfo buffer_infos[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS];
863 VkDescriptorImageInfo image_infos[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
864 int num_wds = 0, num_buffer_info = 0, num_image_info = 0;
865
866 for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
867 struct zink_shader *shader = ctx->gfx_stages[i];
868 if (!shader)
869 continue;
870
871 for (int j = 0; j < shader->num_bindings; j++) {
872 int index = shader->bindings[j].index;
873 if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
874 assert(ctx->ubos[i][index].buffer_size > 0);
875 assert(ctx->ubos[i][index].buffer);
876 buffer_infos[num_buffer_info].buffer = zink_resource(ctx->ubos[i][index].buffer)->buffer;
877 buffer_infos[num_buffer_info].offset = ctx->ubos[i][index].buffer_offset;
878 buffer_infos[num_buffer_info].range = VK_WHOLE_SIZE;
879 wds[num_wds].pBufferInfo = buffer_infos + num_buffer_info;
880 ++num_buffer_info;
881 } else {
882 struct pipe_sampler_view *psampler_view = ctx->image_views[i][index];
883 assert(psampler_view);
884 struct zink_sampler_view *sampler_view = (struct zink_sampler_view *)psampler_view;
885 struct zink_resource *resource = zink_resource(psampler_view->texture);
886 image_infos[num_image_info].imageLayout = resource->layout;
887 image_infos[num_image_info].imageView = sampler_view->image_view;
888 image_infos[num_image_info].sampler = ctx->samplers[i][index];
889 wds[num_wds].pImageInfo = image_infos + num_image_info;
890 ++num_image_info;
891 }
892
893 wds[num_wds].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
894 wds[num_wds].pNext = NULL;
895 wds[num_wds].dstSet = desc_set;
896 wds[num_wds].dstBinding = shader->bindings[j].binding;
897 wds[num_wds].dstArrayElement = 0;
898 wds[num_wds].descriptorCount = 1;
899 wds[num_wds].descriptorType = shader->bindings[j].type;
900 ++num_wds;
901 }
902 }
903
904 vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
905
906 vkCmdBindPipeline(cmdbuf->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
907 vkCmdBindDescriptorSets(cmdbuf->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS,
908 gfx_program->layout, 0, 1, &desc_set, 0, NULL);
909 zink_bind_vertex_buffers(cmdbuf->cmdbuf, ctx);
910
911 if (dinfo->index_size > 0) {
912 assert(dinfo->index_size != 1);
913 VkIndexType index_type = dinfo->index_size == 2 ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
914 vkCmdBindIndexBuffer(cmdbuf->cmdbuf, zink_resource(index_buffer)->buffer, index_offset, index_type);
915 vkCmdDrawIndexed(cmdbuf->cmdbuf,
916 dinfo->count, dinfo->instance_count,
917 dinfo->start, dinfo->index_bias, dinfo->start_instance);
918 } else
919 vkCmdDraw(cmdbuf->cmdbuf, dinfo->count, dinfo->instance_count, dinfo->start, dinfo->start_instance);
920
921 vkCmdEndRenderPass(cmdbuf->cmdbuf);
922
923 zink_end_cmdbuf(ctx, cmdbuf);
924
925 vkDestroyPipeline(screen->dev, pipeline, NULL);
926
927 if (dinfo->index_size > 0 && dinfo->has_user_indices)
928 pipe_resource_reference(&index_buffer, NULL);
929 }
930
931 static void
932 zink_flush(struct pipe_context *pctx,
933 struct pipe_fence_handle **pfence,
934 enum pipe_flush_flags flags)
935 {
936 }
937
938 static void
939 zink_blit(struct pipe_context *pctx,
940 const struct pipe_blit_info *info)
941 {
942 struct zink_context *ctx = zink_context(pctx);
943 bool is_resolve = false;
944 if (info->mask != PIPE_MASK_RGBA ||
945 info->scissor_enable ||
946 info->alpha_blend) {
947 if (!util_blitter_is_blit_supported(ctx->blitter, info)) {
948 debug_printf("blit unsupported %s -> %s\n",
949 util_format_short_name(info->src.resource->format),
950 util_format_short_name(info->dst.resource->format));
951 return;
952 }
953
954 util_blitter_save_fragment_constant_buffer_slot(ctx->blitter, ctx->ubos[PIPE_SHADER_FRAGMENT]);
955 util_blitter_save_vertex_buffer_slot(ctx->blitter, ctx->buffers);
956 util_blitter_save_vertex_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_VERTEX]);
957 util_blitter_save_fragment_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_FRAGMENT]);
958 util_blitter_save_rasterizer(ctx->blitter, ctx->gfx_pipeline_state.rast_state);
959
960 util_blitter_blit(ctx->blitter, info);
961 }
962
963 struct zink_resource *src = zink_resource(info->src.resource);
964 struct zink_resource *dst = zink_resource(info->dst.resource);
965
966 if (src->base.nr_samples > 1 && dst->base.nr_samples <= 1)
967 is_resolve = true;
968
969 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
970 if (!cmdbuf)
971 return;
972
973 if (is_resolve) {
974 VkImageResolve region = {};
975
976 region.srcSubresource.aspectMask = src->aspect;
977 region.srcSubresource.mipLevel = info->src.level;
978 region.srcSubresource.baseArrayLayer = 0; // no clue
979 region.srcSubresource.layerCount = 1; // no clue
980 region.srcOffset.x = info->src.box.x;
981 region.srcOffset.y = info->src.box.y;
982 region.srcOffset.z = info->src.box.z;
983
984 region.dstSubresource.aspectMask = dst->aspect;
985 region.dstSubresource.mipLevel = info->dst.level;
986 region.dstSubresource.baseArrayLayer = 0; // no clue
987 region.dstSubresource.layerCount = 1; // no clue
988 region.dstOffset.x = info->dst.box.x;
989 region.dstOffset.y = info->dst.box.y;
990 region.dstOffset.z = info->dst.box.z;
991
992 region.extent.width = info->dst.box.width;
993 region.extent.height = info->dst.box.height;
994 region.extent.depth = info->dst.box.depth;
995 vkCmdResolveImage(cmdbuf->cmdbuf, src->image, src->layout,
996 dst->image, dst->layout,
997 1, &region);
998
999 } else {
1000 if (dst->layout != VK_IMAGE_LAYOUT_GENERAL &&
1001 dst->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
1002 zink_resource_barrier(cmdbuf->cmdbuf, dst, dst->aspect,
1003 VK_IMAGE_LAYOUT_GENERAL);
1004
1005 VkImageBlit region = {};
1006 region.srcSubresource.aspectMask = src->aspect;
1007 region.srcSubresource.mipLevel = info->src.level;
1008 region.srcOffsets[0].x = info->src.box.x;
1009 region.srcOffsets[0].y = info->src.box.y;
1010 region.srcOffsets[1].x = info->src.box.x + info->src.box.width;
1011 region.srcOffsets[1].y = info->src.box.y + info->src.box.height;
1012
1013 if (src->base.array_size > 1) {
1014 region.srcOffsets[0].z = 0;
1015 region.srcOffsets[1].z = 1;
1016 region.srcSubresource.baseArrayLayer = info->src.box.z;
1017 region.srcSubresource.layerCount = info->src.box.depth;
1018 } else {
1019 region.srcOffsets[0].z = info->src.box.z;
1020 region.srcOffsets[1].z = info->src.box.z + info->src.box.depth;
1021 region.srcSubresource.baseArrayLayer = 0;
1022 region.srcSubresource.layerCount = 1;
1023 }
1024
1025 region.dstSubresource.aspectMask = dst->aspect;
1026 region.dstSubresource.mipLevel = info->dst.level;
1027 region.dstOffsets[0].x = info->dst.box.x;
1028 region.dstOffsets[0].y = info->dst.box.y;
1029 region.dstOffsets[1].x = info->dst.box.x + info->dst.box.width;
1030 region.dstOffsets[1].y = info->dst.box.y + info->dst.box.height;
1031
1032 if (dst->base.array_size > 1) {
1033 region.dstOffsets[0].z = 0;
1034 region.dstOffsets[1].z = 1;
1035 region.dstSubresource.baseArrayLayer = info->dst.box.z;
1036 region.dstSubresource.layerCount = info->dst.box.depth;
1037 } else {
1038 region.dstOffsets[0].z = info->dst.box.z;
1039 region.dstOffsets[1].z = info->dst.box.z + info->dst.box.depth;
1040 region.dstSubresource.baseArrayLayer = 0;
1041 region.dstSubresource.layerCount = 1;
1042 }
1043
1044 vkCmdBlitImage(cmdbuf->cmdbuf, src->image, src->layout,
1045 dst->image, dst->layout,
1046 1, &region,
1047 filter(info->filter));
1048 }
1049 zink_end_cmdbuf(ctx, cmdbuf);
1050 }
1051
1052 static void
1053 zink_flush_resource(struct pipe_context *pipe,
1054 struct pipe_resource *resource)
1055 {
1056 }
1057
1058 static void
1059 zink_resource_copy_region(struct pipe_context *pctx,
1060 struct pipe_resource *pdst,
1061 unsigned dst_level, unsigned dstx, unsigned dsty, unsigned dstz,
1062 struct pipe_resource *psrc,
1063 unsigned src_level, const struct pipe_box *src_box)
1064 {
1065 struct zink_resource *dst = zink_resource(pdst);
1066 struct zink_resource *src = zink_resource(psrc);
1067 struct zink_context *ctx = zink_context(pctx);
1068 if (dst->base.target != PIPE_BUFFER && src->base.target != PIPE_BUFFER) {
1069 VkImageCopy region = {};
1070
1071 region.srcSubresource.aspectMask = src->aspect;
1072 region.srcSubresource.mipLevel = src_level;
1073 region.srcSubresource.layerCount = 1;
1074 if (src->base.array_size > 1) {
1075 region.srcSubresource.baseArrayLayer = src_box->z;
1076 region.srcSubresource.layerCount = src_box->depth;
1077 region.extent.depth = 1;
1078 } else {
1079 region.srcOffset.z = src_box->z;
1080 region.srcSubresource.layerCount = 1;
1081 region.extent.depth = src_box->depth;
1082 }
1083
1084 region.srcOffset.x = src_box->x;
1085 region.srcOffset.y = src_box->y;
1086
1087 region.dstSubresource.aspectMask = dst->aspect;
1088 region.dstSubresource.mipLevel = dst_level;
1089 if (dst->base.array_size > 1) {
1090 region.dstSubresource.baseArrayLayer = dstz;
1091 region.dstSubresource.layerCount = src_box->depth;
1092 } else {
1093 region.dstOffset.z = dstz;
1094 region.dstSubresource.layerCount = 1;
1095 }
1096
1097 region.dstOffset.x = dstx;
1098 region.dstOffset.y = dsty;
1099 region.extent.width = src_box->width;
1100 region.extent.height = src_box->height;
1101
1102 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
1103 if (!cmdbuf)
1104 return;
1105
1106 vkCmdCopyImage(cmdbuf->cmdbuf, src->image, src->layout,
1107 dst->image, dst->layout,
1108 1, &region);
1109 zink_end_cmdbuf(ctx, cmdbuf);
1110 } else
1111 debug_printf("zink: TODO resource copy\n");
1112 }
1113
1114 struct pipe_context *
1115 zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
1116 {
1117 struct zink_screen *screen = zink_screen(pscreen);
1118 struct zink_context *ctx = CALLOC_STRUCT(zink_context);
1119
1120 ctx->base.screen = pscreen;
1121 ctx->base.priv = priv;
1122
1123 ctx->base.destroy = zink_context_destroy;
1124
1125 zink_context_state_init(&ctx->base);
1126
1127 ctx->base.create_sampler_state = zink_create_sampler_state;
1128 ctx->base.bind_sampler_states = zink_bind_sampler_states;
1129 ctx->base.delete_sampler_state = zink_delete_sampler_state;
1130
1131 ctx->base.create_sampler_view = zink_create_sampler_view;
1132 ctx->base.set_sampler_views = zink_set_sampler_views;
1133 ctx->base.sampler_view_destroy = zink_destroy_sampler_view;
1134
1135 ctx->base.create_vs_state = zink_create_vs_state;
1136 ctx->base.bind_vs_state = zink_bind_vs_state;
1137 ctx->base.delete_vs_state = zink_delete_vs_state;
1138
1139 ctx->base.create_fs_state = zink_create_fs_state;
1140 ctx->base.bind_fs_state = zink_bind_fs_state;
1141 ctx->base.delete_fs_state = zink_delete_fs_state;
1142
1143 ctx->base.set_polygon_stipple = zink_set_polygon_stipple;
1144 ctx->base.set_vertex_buffers = zink_set_vertex_buffers;
1145 ctx->base.set_viewport_states = zink_set_viewport_states;
1146 ctx->base.set_scissor_states = zink_set_scissor_states;
1147 ctx->base.set_constant_buffer = zink_set_constant_buffer;
1148 ctx->base.set_framebuffer_state = zink_set_framebuffer_state;
1149 ctx->base.set_stencil_ref = zink_set_stencil_ref;
1150 ctx->base.set_clip_state = zink_set_clip_state;
1151 ctx->base.set_active_query_state = zink_set_active_query_state;
1152 ctx->base.set_blend_color = zink_set_blend_color;
1153
1154 ctx->base.clear = zink_clear;
1155 ctx->base.draw_vbo = zink_draw_vbo;
1156 ctx->base.flush = zink_flush;
1157
1158 ctx->base.resource_copy_region = zink_resource_copy_region;
1159 ctx->base.blit = zink_blit;
1160
1161 ctx->base.flush_resource = zink_flush_resource;
1162 zink_context_surface_init(&ctx->base);
1163 zink_context_resource_init(&ctx->base);
1164 zink_context_query_init(&ctx->base);
1165
1166 slab_create_child(&ctx->transfer_pool, &screen->transfer_pool);
1167
1168 ctx->base.stream_uploader = u_upload_create_default(&ctx->base);
1169 ctx->base.const_uploader = ctx->base.stream_uploader;
1170
1171 int prim_hwsupport = 1 << PIPE_PRIM_POINTS |
1172 1 << PIPE_PRIM_LINES |
1173 1 << PIPE_PRIM_LINE_STRIP |
1174 1 << PIPE_PRIM_TRIANGLES |
1175 1 << PIPE_PRIM_TRIANGLE_STRIP |
1176 1 << PIPE_PRIM_TRIANGLE_FAN;
1177
1178 ctx->primconvert = util_primconvert_create(&ctx->base, prim_hwsupport);
1179 if (!ctx->primconvert)
1180 goto fail;
1181
1182 ctx->blitter = util_blitter_create(&ctx->base);
1183 if (!ctx->blitter)
1184 goto fail;
1185
1186 VkCommandPoolCreateInfo cpci = {};
1187 cpci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1188 cpci.queueFamilyIndex = screen->gfx_queue;
1189 cpci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1190 if (vkCreateCommandPool(screen->dev, &cpci, NULL, &ctx->cmdpool) != VK_SUCCESS)
1191 goto fail;
1192
1193 VkCommandBufferAllocateInfo cbai = {};
1194 cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1195 cbai.commandPool = ctx->cmdpool;
1196 cbai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1197 cbai.commandBufferCount = 1;
1198 if (vkAllocateCommandBuffers(screen->dev, &cbai, &ctx->cmdbuf.cmdbuf) != VK_SUCCESS)
1199 goto fail;
1200
1201 VkDescriptorPoolSize sizes[] = {
1202 {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1000}
1203 };
1204 VkDescriptorPoolCreateInfo dpci = {};
1205 dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1206 dpci.pPoolSizes = sizes;
1207 dpci.poolSizeCount = ARRAY_SIZE(sizes);
1208 dpci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
1209 dpci.maxSets = 1000;
1210
1211 if(vkCreateDescriptorPool(screen->dev, &dpci, 0, &ctx->descpool) != VK_SUCCESS)
1212 goto fail;
1213
1214 vkGetDeviceQueue(screen->dev, screen->gfx_queue, 0, &ctx->queue);
1215
1216 return &ctx->base;
1217
1218 fail:
1219 if (ctx) {
1220 vkDestroyCommandPool(screen->dev, ctx->cmdpool, NULL);
1221 FREE(ctx);
1222 }
1223 return NULL;
1224 }