zink: add dri loader
[mesa.git] / src / gallium / drivers / zink / zink_context.c
1 /*
2 * Copyright 2018 Collabora Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "zink_context.h"
25
26 #include "zink_cmdbuf.h"
27 #include "zink_compiler.h"
28 #include "zink_framebuffer.h"
29 #include "zink_pipeline.h"
30 #include "zink_program.h"
31 #include "zink_render_pass.h"
32 #include "zink_resource.h"
33 #include "zink_screen.h"
34 #include "zink_state.h"
35 #include "zink_surface.h"
36
37 #include "indices/u_primconvert.h"
38 #include "util/u_blitter.h"
39 #include "util/u_debug.h"
40 #include "util/u_format.h"
41 #include "util/u_framebuffer.h"
42 #include "util/u_helpers.h"
43 #include "util/u_inlines.h"
44
45 #include "nir.h"
46
47 #include "util/u_memory.h"
48 #include "util/u_prim.h"
49 #include "util/u_upload_mgr.h"
50
51 static void
52 zink_context_destroy(struct pipe_context *pctx)
53 {
54 struct zink_context *ctx = zink_context(pctx);
55 struct zink_screen *screen = zink_screen(pctx->screen);
56 vkFreeCommandBuffers(screen->dev, ctx->cmdpool, 1, &ctx->cmdbuf.cmdbuf);
57 vkDestroyCommandPool(screen->dev, ctx->cmdpool, NULL);
58
59 util_primconvert_destroy(ctx->primconvert);
60 u_upload_destroy(pctx->stream_uploader);
61 slab_destroy_child(&ctx->transfer_pool);
62 util_blitter_destroy(ctx->blitter);
63 FREE(ctx);
64 }
65
66 static VkFilter
67 filter(enum pipe_tex_filter filter)
68 {
69 switch (filter) {
70 case PIPE_TEX_FILTER_NEAREST: return VK_FILTER_NEAREST;
71 case PIPE_TEX_FILTER_LINEAR: return VK_FILTER_LINEAR;
72 }
73 unreachable("unexpected filter");
74 }
75
76 static VkSamplerMipmapMode
77 sampler_mipmap_mode(enum pipe_tex_mipfilter filter)
78 {
79 switch (filter) {
80 case PIPE_TEX_MIPFILTER_NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
81 case PIPE_TEX_MIPFILTER_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
82 case PIPE_TEX_MIPFILTER_NONE:
83 unreachable("PIPE_TEX_MIPFILTER_NONE should be dealt with earlier");
84 }
85 unreachable("unexpected filter");
86 }
87
88 static VkSamplerAddressMode
89 sampler_address_mode(enum pipe_tex_wrap filter)
90 {
91 switch (filter) {
92 case PIPE_TEX_WRAP_REPEAT: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
93 case PIPE_TEX_WRAP_CLAMP: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
94 case PIPE_TEX_WRAP_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
95 case PIPE_TEX_WRAP_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
96 case PIPE_TEX_WRAP_MIRROR_REPEAT: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
97 case PIPE_TEX_WRAP_MIRROR_CLAMP: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
98 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
99 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; /* not technically correct, but kinda works */
100 }
101 unreachable("unexpected wrap");
102 }
103
104 static void *
105 zink_create_sampler_state(struct pipe_context *pctx,
106 const struct pipe_sampler_state *state)
107 {
108 struct zink_screen *screen = zink_screen(pctx->screen);
109
110 VkSamplerCreateInfo sci = {};
111 sci.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
112 sci.magFilter = filter(state->mag_img_filter);
113 sci.minFilter = filter(state->min_img_filter);
114
115 if (state->min_mip_filter != PIPE_TEX_MIPFILTER_NONE) {
116 sci.mipmapMode = sampler_mipmap_mode(state->min_mip_filter);
117 sci.minLod = state->min_lod;
118 sci.maxLod = state->max_lod;
119 } else {
120 sci.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
121 sci.minLod = 0;
122 sci.maxLod = 0;
123 }
124
125 sci.addressModeU = sampler_address_mode(state->wrap_s);
126 sci.addressModeV = sampler_address_mode(state->wrap_t);
127 sci.addressModeW = sampler_address_mode(state->wrap_r);
128 sci.mipLodBias = state->lod_bias;
129 sci.compareOp = VK_COMPARE_OP_NEVER; // TODO
130 sci.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // TODO
131
132 if (state->max_anisotropy > 1) {
133 sci.maxAnisotropy = state->max_anisotropy;
134 sci.anisotropyEnable = VK_TRUE;
135 }
136
137 VkSampler sampler;
138 VkResult err = vkCreateSampler(screen->dev, &sci, NULL, &sampler);
139 if (err != VK_SUCCESS)
140 return NULL;
141
142 return sampler;
143 }
144
145 static void
146 zink_bind_sampler_states(struct pipe_context *pctx,
147 enum pipe_shader_type shader,
148 unsigned start_slot,
149 unsigned num_samplers,
150 void **samplers)
151 {
152 struct zink_context *ctx = zink_context(pctx);
153 for (unsigned i = 0; i < num_samplers; ++i)
154 ctx->samplers[shader][start_slot + i] = (VkSampler)samplers[i];
155 }
156
157 static void
158 zink_delete_sampler_state(struct pipe_context *pctx,
159 void *sampler_state)
160 {
161 struct zink_screen *screen = zink_screen(pctx->screen);
162 vkDestroySampler(screen->dev, sampler_state, NULL);
163 }
164
165
166 static VkImageViewType
167 image_view_type(enum pipe_texture_target target)
168 {
169 switch (target) {
170 case PIPE_TEXTURE_1D: return VK_IMAGE_VIEW_TYPE_1D;
171 case PIPE_TEXTURE_1D_ARRAY: return VK_IMAGE_VIEW_TYPE_1D_ARRAY;
172 case PIPE_TEXTURE_2D: return VK_IMAGE_VIEW_TYPE_2D;
173 case PIPE_TEXTURE_2D_ARRAY: return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
174 case PIPE_TEXTURE_CUBE: return VK_IMAGE_VIEW_TYPE_CUBE;
175 case PIPE_TEXTURE_CUBE_ARRAY: return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
176 case PIPE_TEXTURE_3D: return VK_IMAGE_VIEW_TYPE_3D;
177 case PIPE_TEXTURE_RECT: return VK_IMAGE_VIEW_TYPE_2D; /* not sure */
178 default:
179 unreachable("unexpected target");
180 }
181 }
182
183 static VkComponentSwizzle
184 component_mapping(enum pipe_swizzle swizzle)
185 {
186 switch (swizzle) {
187 case PIPE_SWIZZLE_X: return VK_COMPONENT_SWIZZLE_R;
188 case PIPE_SWIZZLE_Y: return VK_COMPONENT_SWIZZLE_G;
189 case PIPE_SWIZZLE_Z: return VK_COMPONENT_SWIZZLE_B;
190 case PIPE_SWIZZLE_W: return VK_COMPONENT_SWIZZLE_A;
191 case PIPE_SWIZZLE_0: return VK_COMPONENT_SWIZZLE_ZERO;
192 case PIPE_SWIZZLE_1: return VK_COMPONENT_SWIZZLE_ONE;
193 case PIPE_SWIZZLE_NONE: return VK_COMPONENT_SWIZZLE_IDENTITY; // ???
194 default:
195 unreachable("unexpected swizzle");
196 }
197 }
198
199 static struct pipe_sampler_view *
200 zink_create_sampler_view(struct pipe_context *pctx, struct pipe_resource *pres,
201 const struct pipe_sampler_view *state)
202 {
203 struct zink_screen *screen = zink_screen(pctx->screen);
204 struct zink_resource *res = zink_resource(pres);
205 struct zink_sampler_view *sampler_view = CALLOC_STRUCT(zink_sampler_view);
206
207 sampler_view->base = *state;
208 sampler_view->base.texture = NULL;
209 pipe_resource_reference(&sampler_view->base.texture, pres);
210 sampler_view->base.reference.count = 1;
211 sampler_view->base.context = pctx;
212
213 VkImageViewCreateInfo ivci = {};
214 ivci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
215 ivci.image = res->image;
216 ivci.viewType = image_view_type(state->target);
217 ivci.format = zink_get_format(state->format);
218 ivci.components.r = component_mapping(state->swizzle_r);
219 ivci.components.g = component_mapping(state->swizzle_g);
220 ivci.components.b = component_mapping(state->swizzle_b);
221 ivci.components.a = component_mapping(state->swizzle_a);
222 ivci.subresourceRange.aspectMask = zink_aspect_from_format(state->format);
223 ivci.subresourceRange.baseMipLevel = state->u.tex.first_level;
224 ivci.subresourceRange.baseArrayLayer = state->u.tex.first_layer;
225 ivci.subresourceRange.levelCount = state->u.tex.last_level - state->u.tex.first_level + 1;
226 ivci.subresourceRange.layerCount = state->u.tex.last_layer - state->u.tex.first_layer + 1;
227
228 VkResult err = vkCreateImageView(screen->dev, &ivci, NULL, &sampler_view->image_view);
229 if (err != VK_SUCCESS) {
230 FREE(sampler_view);
231 return NULL;
232 }
233
234 return &sampler_view->base;
235 }
236
237 static void
238 zink_destroy_sampler_view(struct pipe_context *pctx,
239 struct pipe_sampler_view *view)
240 {
241 FREE(view);
242 }
243
244 static void *
245 zink_create_vs_state(struct pipe_context *pctx,
246 const struct pipe_shader_state *shader)
247 {
248 struct nir_shader *nir;
249 if (shader->type != PIPE_SHADER_IR_NIR)
250 nir = zink_tgsi_to_nir(pctx->screen, shader->tokens);
251 else
252 nir = (struct nir_shader *)shader->ir.nir;
253
254 return zink_compile_nir(zink_screen(pctx->screen), nir);
255 }
256
257 static void
258 zink_bind_vs_state(struct pipe_context *pctx,
259 void *cso)
260 {
261 struct zink_context *ctx = zink_context(pctx);
262 ctx->gfx_stages[PIPE_SHADER_VERTEX] = cso;
263 }
264
265 static void
266 zink_delete_vs_state(struct pipe_context *pctx,
267 void *cso)
268 {
269 zink_shader_free(zink_screen(pctx->screen), cso);
270 }
271
272 static void *
273 zink_create_fs_state(struct pipe_context *pctx,
274 const struct pipe_shader_state *shader)
275 {
276 struct nir_shader *nir;
277 if (shader->type != PIPE_SHADER_IR_NIR)
278 nir = zink_tgsi_to_nir(pctx->screen, shader->tokens);
279 else
280 nir = (struct nir_shader *)shader->ir.nir;
281
282 return zink_compile_nir(zink_screen(pctx->screen), nir);
283 }
284
285 static void
286 zink_bind_fs_state(struct pipe_context *pctx,
287 void *cso)
288 {
289 struct zink_context *ctx = zink_context(pctx);
290 ctx->gfx_stages[PIPE_SHADER_FRAGMENT] = cso;
291 }
292
293 static void
294 zink_delete_fs_state(struct pipe_context *pctx,
295 void *cso)
296 {
297 zink_shader_free(zink_screen(pctx->screen), cso);
298 }
299
300 static void
301 zink_set_polygon_stipple(struct pipe_context *pctx,
302 const struct pipe_poly_stipple *ps)
303 {
304 }
305
306 static void
307 zink_set_vertex_buffers(struct pipe_context *pctx,
308 unsigned start_slot,
309 unsigned num_buffers,
310 const struct pipe_vertex_buffer *buffers)
311 {
312 struct zink_context *ctx = zink_context(pctx);
313
314 if (buffers) {
315 for (int i = 0; i < num_buffers; ++i) {
316 const struct pipe_vertex_buffer *vb = buffers + i;
317 ctx->gfx_pipeline_state.bindings[start_slot + i].stride = vb->stride;
318 }
319 }
320
321 util_set_vertex_buffers_mask(ctx->buffers, &ctx->buffers_enabled_mask,
322 buffers, start_slot, num_buffers);
323 }
324
325 static void
326 zink_set_viewport_states(struct pipe_context *pctx,
327 unsigned start_slot,
328 unsigned num_viewports,
329 const struct pipe_viewport_state *state)
330 {
331 struct zink_context *ctx = zink_context(pctx);
332
333 for (unsigned i = 0; i < num_viewports; ++i) {
334 VkViewport viewport = {
335 state[i].translate[0] - state[i].scale[0],
336 state[i].translate[1] - state[i].scale[1],
337 state[i].scale[0] * 2,
338 state[i].scale[1] * 2,
339 state[i].translate[2] - state[i].scale[2],
340 state[i].translate[2] + state[i].scale[2]
341 };
342 ctx->viewports[start_slot + i] = viewport;
343 }
344 ctx->num_viewports = start_slot + num_viewports;
345 }
346
347 static void
348 zink_set_scissor_states(struct pipe_context *pctx,
349 unsigned start_slot, unsigned num_scissors,
350 const struct pipe_scissor_state *states)
351 {
352 struct zink_context *ctx = zink_context(pctx);
353
354 for (unsigned i = 0; i < num_scissors; i++) {
355 VkRect2D scissor;
356
357 scissor.offset.x = states[i].minx;
358 scissor.offset.y = states[i].miny;
359 scissor.extent.width = states[i].maxx - states[i].minx;
360 scissor.extent.height = states[i].maxy - states[i].miny;
361 ctx->scissors[start_slot + i] = scissor;
362 }
363 ctx->num_scissors = start_slot + num_scissors;
364 }
365
366 static void
367 zink_set_constant_buffer(struct pipe_context *pctx,
368 enum pipe_shader_type shader, uint index,
369 const struct pipe_constant_buffer *cb)
370 {
371 struct zink_context *ctx = zink_context(pctx);
372
373 if (cb) {
374 struct pipe_resource *buffer = cb->buffer;
375 unsigned offset = cb->buffer_offset;
376 if (cb->user_buffer)
377 u_upload_data(ctx->base.const_uploader, 0, cb->buffer_size, 64,
378 cb->user_buffer, &offset, &buffer);
379
380 pipe_resource_reference(&ctx->ubos[shader][index].buffer, buffer);
381 ctx->ubos[shader][index].buffer_offset = offset;
382 ctx->ubos[shader][index].buffer_size = cb->buffer_size;
383 ctx->ubos[shader][index].user_buffer = NULL;
384
385 if (cb->user_buffer)
386 pipe_resource_reference(&buffer, NULL);
387 } else {
388 pipe_resource_reference(&ctx->ubos[shader][index].buffer, NULL);
389 ctx->ubos[shader][index].buffer_offset = 0;
390 ctx->ubos[shader][index].buffer_size = 0;
391 ctx->ubos[shader][index].user_buffer = NULL;
392 }
393 }
394
395 static void
396 zink_set_sampler_views(struct pipe_context *pctx,
397 enum pipe_shader_type shader_type,
398 unsigned start_slot,
399 unsigned num_views,
400 struct pipe_sampler_view **views)
401 {
402 struct zink_context *ctx = zink_context(pctx);
403 assert(views);
404 for (unsigned i = 0; i < num_views; ++i) {
405 pipe_sampler_view_reference(
406 &ctx->image_views[shader_type][start_slot + i],
407 views[i]);
408 }
409 }
410
411 static void
412 zink_set_stencil_ref(struct pipe_context *pctx,
413 const struct pipe_stencil_ref *ref)
414 {
415 struct zink_context *ctx = zink_context(pctx);
416 ctx->stencil_ref[0] = ref->ref_value[0];
417 ctx->stencil_ref[1] = ref->ref_value[1];
418 }
419
420 static void
421 zink_set_clip_state(struct pipe_context *pctx,
422 const struct pipe_clip_state *pcs)
423 {
424 }
425
426 static struct zink_render_pass *
427 get_render_pass(struct zink_screen *screen,
428 const struct pipe_framebuffer_state *fb)
429 {
430 struct zink_render_pass_state state;
431
432 for (int i = 0; i < fb->nr_cbufs; i++) {
433 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
434 state.rts[i].format = cbuf->format;
435 }
436 state.num_cbufs = fb->nr_cbufs;
437
438 if (fb->zsbuf) {
439 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
440 state.rts[fb->nr_cbufs].format = zsbuf->format;
441 }
442 state.have_zsbuf = fb->zsbuf != NULL;
443
444 return zink_create_render_pass(screen, &state);
445 }
446
447 static void
448 zink_set_framebuffer_state(struct pipe_context *pctx,
449 const struct pipe_framebuffer_state *state)
450 {
451 struct zink_context *ctx = zink_context(pctx);
452 struct zink_screen *screen = zink_screen(pctx->screen);
453
454 struct zink_render_pass *rp = get_render_pass(screen, state);
455 zink_render_pass_reference(screen, &ctx->render_pass, rp);
456
457 struct zink_framebuffer *fb = zink_create_framebuffer(screen, state, rp);
458 zink_framebuffer_reference(screen, &ctx->framebuffer, fb);
459 zink_framebuffer_reference(screen, &fb, NULL);
460 zink_render_pass_reference(screen, &rp, NULL);
461
462 ctx->gfx_pipeline_state.num_attachments = state->nr_cbufs;
463
464 util_copy_framebuffer_state(&ctx->fb_state, state);
465
466 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
467 if (!cmdbuf)
468 return;
469
470 for (int i = 0; i < state->nr_cbufs; i++) {
471 struct zink_resource *res = zink_resource(state->cbufs[i]->texture);
472 if (res->layout != VK_IMAGE_LAYOUT_GENERAL &&
473 res->layout != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
474 zink_resource_barrier(cmdbuf->cmdbuf, res, res->aspect,
475 VK_IMAGE_LAYOUT_GENERAL);
476 }
477
478 if (state->zsbuf) {
479 struct zink_resource *res = zink_resource(state->zsbuf->texture);
480 if (res->layout != VK_IMAGE_LAYOUT_GENERAL &&
481 res->layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
482 zink_resource_barrier(cmdbuf->cmdbuf, res, res->aspect,
483 VK_IMAGE_LAYOUT_GENERAL);
484 }
485
486 zink_end_cmdbuf(ctx, cmdbuf);
487 }
488
489 static void
490 zink_set_active_query_state(struct pipe_context *pctx, bool enable)
491 {
492 }
493
494 static void
495 zink_set_blend_color(struct pipe_context *pctx,
496 const struct pipe_blend_color *color)
497 {
498 struct zink_context *ctx = zink_context(pctx);
499 memcpy(ctx->blend_constants, color->color, sizeof(float) * 4);
500 }
501
502 static VkAccessFlags
503 access_flags(VkImageLayout layout)
504 {
505 switch (layout) {
506 case VK_IMAGE_LAYOUT_UNDEFINED:
507 case VK_IMAGE_LAYOUT_GENERAL:
508 return 0;
509
510 case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
511 return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
512 case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
513 return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
514
515 case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
516 return VK_ACCESS_SHADER_READ_BIT;
517
518 case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
519 return VK_ACCESS_TRANSFER_READ_BIT;
520
521 case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
522 return VK_ACCESS_TRANSFER_WRITE_BIT;
523
524 case VK_IMAGE_LAYOUT_PREINITIALIZED:
525 return VK_ACCESS_HOST_WRITE_BIT;
526
527 default:
528 unreachable("unexpected layout");
529 }
530 }
531
532 void
533 zink_resource_barrier(VkCommandBuffer cmdbuf, struct zink_resource *res,
534 VkImageAspectFlags aspect, VkImageLayout new_layout)
535 {
536 VkImageSubresourceRange isr = {
537 aspect,
538 0, VK_REMAINING_MIP_LEVELS,
539 0, VK_REMAINING_ARRAY_LAYERS
540 };
541
542 VkImageMemoryBarrier imb = {
543 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
544 NULL,
545 access_flags(res->layout),
546 access_flags(new_layout),
547 res->layout,
548 new_layout,
549 VK_QUEUE_FAMILY_IGNORED,
550 VK_QUEUE_FAMILY_IGNORED,
551 res->image,
552 isr
553 };
554 vkCmdPipelineBarrier(
555 cmdbuf,
556 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
557 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
558 0,
559 0, NULL,
560 0, NULL,
561 1, &imb
562 );
563
564 res->layout = new_layout;
565 }
566
567 static void
568 zink_clear(struct pipe_context *pctx,
569 unsigned buffers,
570 const union pipe_color_union *pcolor,
571 double depth, unsigned stencil)
572 {
573 struct zink_context *ctx = zink_context(pctx);
574 struct pipe_framebuffer_state *fb = &ctx->fb_state;
575
576 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
577 if (!cmdbuf)
578 return;
579
580 // first transition all images to a compatible layout
581 if (buffers & PIPE_CLEAR_COLOR) {
582 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
583 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
584 continue;
585
586 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
587
588 if (cbuf->layout != VK_IMAGE_LAYOUT_GENERAL &&
589 cbuf->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
590 zink_resource_barrier(cmdbuf->cmdbuf, cbuf, cbuf->aspect,
591 VK_IMAGE_LAYOUT_GENERAL);
592 }
593 }
594
595 VkImageAspectFlags depthStencilAspect = 0;
596 if (buffers & PIPE_CLEAR_DEPTHSTENCIL && fb->zsbuf) {
597 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
598 if (buffers & PIPE_CLEAR_DEPTH)
599 depthStencilAspect |= VK_IMAGE_ASPECT_DEPTH_BIT;
600 if (buffers & PIPE_CLEAR_STENCIL)
601 depthStencilAspect |= VK_IMAGE_ASPECT_STENCIL_BIT;
602
603 if (zsbuf->layout != VK_IMAGE_LAYOUT_GENERAL &&
604 zsbuf->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
605 zink_resource_barrier(cmdbuf->cmdbuf, zsbuf, depthStencilAspect,
606 VK_IMAGE_LAYOUT_GENERAL);
607 }
608
609 VkClearColorValue color;
610 color.float32[0] = pcolor->f[0];
611 color.float32[1] = pcolor->f[1];
612 color.float32[2] = pcolor->f[2];
613 color.float32[3] = pcolor->f[3];
614
615 if (buffers & PIPE_CLEAR_COLOR) {
616 for (unsigned i = 0; i < fb->nr_cbufs; i++) {
617 if (!(buffers & (PIPE_CLEAR_COLOR0 << i)) || !fb->cbufs[i])
618 continue;
619
620 struct zink_resource *cbuf = zink_resource(fb->cbufs[i]->texture);
621
622 VkImageSubresourceRange range;
623 range.aspectMask = cbuf->aspect;
624 range.baseMipLevel = 0;
625 range.levelCount = VK_REMAINING_MIP_LEVELS;
626 range.baseArrayLayer = 0;
627 range.layerCount = VK_REMAINING_ARRAY_LAYERS;
628 vkCmdClearColorImage(cmdbuf->cmdbuf,
629 cbuf->image, VK_IMAGE_LAYOUT_GENERAL,
630 &color,
631 1, &range);
632 }
633 }
634
635 if (depthStencilAspect) {
636 struct zink_resource *zsbuf = zink_resource(fb->zsbuf->texture);
637
638 VkClearDepthStencilValue zsvalue = { depth, stencil };
639
640 VkImageSubresourceRange range;
641 range.aspectMask = depthStencilAspect;
642 range.baseMipLevel = 0;
643 range.levelCount = VK_REMAINING_MIP_LEVELS;
644 range.baseArrayLayer = 0;
645 range.layerCount = VK_REMAINING_ARRAY_LAYERS;
646
647 vkCmdClearDepthStencilImage(cmdbuf->cmdbuf,
648 zsbuf->image, VK_IMAGE_LAYOUT_GENERAL,
649 &zsvalue,
650 1, &range);
651 }
652
653 zink_end_cmdbuf(ctx, cmdbuf);
654 }
655
656 VkShaderStageFlagBits
657 zink_shader_stage(enum pipe_shader_type type)
658 {
659 VkShaderStageFlagBits stages[] = {
660 [PIPE_SHADER_VERTEX] = VK_SHADER_STAGE_VERTEX_BIT,
661 [PIPE_SHADER_FRAGMENT] = VK_SHADER_STAGE_FRAGMENT_BIT,
662 [PIPE_SHADER_GEOMETRY] = VK_SHADER_STAGE_GEOMETRY_BIT,
663 [PIPE_SHADER_TESS_CTRL] = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
664 [PIPE_SHADER_TESS_EVAL] = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
665 [PIPE_SHADER_COMPUTE] = VK_SHADER_STAGE_COMPUTE_BIT,
666 };
667 return stages[type];
668 }
669
670 static VkDescriptorSet
671 allocate_descriptor_set(struct zink_context *ctx, VkDescriptorSetLayout dsl)
672 {
673 struct zink_screen *screen = zink_screen(ctx->base.screen);
674 VkDescriptorSetAllocateInfo dsai;
675 memset((void *)&dsai, 0, sizeof(dsai));
676 dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
677 dsai.pNext = NULL;
678 dsai.descriptorPool = ctx->descpool;
679 dsai.descriptorSetCount = 1;
680 dsai.pSetLayouts = &dsl;
681
682 VkDescriptorSet desc_set;
683 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
684 if (vkResetDescriptorPool(screen->dev, ctx->descpool, 0) != VK_SUCCESS) {
685 fprintf(stderr, "vkResetDescriptorPool failed\n");
686 return VK_NULL_HANDLE;
687 }
688 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
689 fprintf(stderr, "vkAllocateDescriptorSets failed\n");
690 return VK_NULL_HANDLE;
691 }
692 }
693
694 return desc_set;
695 }
696
697 static VkPrimitiveTopology
698 zink_primitive_topology(enum pipe_prim_type mode)
699 {
700 switch (mode) {
701 case PIPE_PRIM_POINTS:
702 return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
703
704 case PIPE_PRIM_LINES:
705 return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
706
707 case PIPE_PRIM_LINE_STRIP:
708 return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
709
710 case PIPE_PRIM_TRIANGLES:
711 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
712
713 case PIPE_PRIM_TRIANGLE_STRIP:
714 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
715
716 case PIPE_PRIM_TRIANGLE_FAN:
717 return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
718
719 default:
720 unreachable("unexpected enum pipe_prim_type");
721 }
722 }
723
724 static void
725 zink_bind_vertex_buffers(VkCommandBuffer cmdbuf, struct zink_context *ctx)
726 {
727 VkBuffer buffers[PIPE_MAX_ATTRIBS];
728 VkDeviceSize buffer_offsets[PIPE_MAX_ATTRIBS];
729 struct zink_vertex_elements_state *elems = ctx->gfx_pipeline_state.element_state;
730 for (unsigned i = 0; i < elems->num_bindings; i++) {
731 struct pipe_vertex_buffer *vb = ctx->buffers + elems->binding_map[i];
732 assert(vb && vb->buffer.resource);
733 struct zink_resource *res = zink_resource(vb->buffer.resource);
734 buffers[i] = res->buffer;
735 buffer_offsets[i] = vb->buffer_offset;
736 }
737
738 if (elems->num_bindings > 0)
739 vkCmdBindVertexBuffers(cmdbuf, 0, elems->num_bindings, buffers, buffer_offsets);
740 }
741
742 static void
743 zink_draw_vbo(struct pipe_context *pctx,
744 const struct pipe_draw_info *dinfo)
745 {
746 struct zink_context *ctx = zink_context(pctx);
747 struct zink_screen *screen = zink_screen(pctx->screen);
748 struct zink_rasterizer_state *rast_state = ctx->gfx_pipeline_state.rast_state;
749
750 if (dinfo->mode >= PIPE_PRIM_QUADS ||
751 dinfo->mode == PIPE_PRIM_LINE_LOOP) {
752 if (!u_trim_pipe_prim(dinfo->mode, (unsigned *)&dinfo->count))
753 return;
754
755 util_primconvert_save_rasterizer_state(ctx->primconvert, &rast_state->base);
756 util_primconvert_draw_vbo(ctx->primconvert, dinfo);
757 return;
758 }
759
760 struct zink_gfx_program *gfx_program = zink_create_gfx_program(screen->dev,
761 ctx->gfx_stages);
762 if (!gfx_program)
763 return;
764
765 ctx->gfx_pipeline_state.primitive_topology = zink_primitive_topology(dinfo->mode);
766
767 VkPipeline pipeline = zink_create_gfx_pipeline(screen->dev,
768 gfx_program,
769 &ctx->gfx_pipeline_state,
770 ctx->render_pass->render_pass);
771
772 bool depth_bias = false;
773 switch (u_reduced_prim(dinfo->mode)) {
774 case PIPE_PRIM_POINTS:
775 depth_bias = rast_state->offset_point;
776 break;
777
778 case PIPE_PRIM_LINES:
779 depth_bias = rast_state->offset_line;
780 break;
781
782 case PIPE_PRIM_TRIANGLES:
783 depth_bias = rast_state->offset_tri;
784 break;
785
786 default:
787 unreachable("unexpected reduced prim");
788 }
789
790 unsigned index_offset = 0;
791 struct pipe_resource *index_buffer = NULL;
792 if (dinfo->index_size > 0) {
793 if (dinfo->has_user_indices) {
794 if (!util_upload_index_buffer(pctx, dinfo, &index_buffer, &index_offset)) {
795 debug_printf("util_upload_index_buffer() failed\n");
796 return;
797 }
798 } else
799 index_buffer = dinfo->index.resource;
800 }
801
802 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
803 if (!cmdbuf)
804 return;
805
806 VkRenderPassBeginInfo rpbi = {};
807 rpbi.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
808 rpbi.renderPass = ctx->render_pass->render_pass;
809 rpbi.renderArea.offset.x = 0;
810 rpbi.renderArea.offset.y = 0;
811 rpbi.renderArea.extent.width = ctx->fb_state.width;
812 rpbi.renderArea.extent.height = ctx->fb_state.height;
813 rpbi.clearValueCount = 0;
814 rpbi.pClearValues = NULL;
815 rpbi.framebuffer = ctx->framebuffer->fb;
816
817 vkCmdBeginRenderPass(cmdbuf->cmdbuf, &rpbi, VK_SUBPASS_CONTENTS_INLINE);
818
819 vkCmdSetViewport(cmdbuf->cmdbuf, 0, ctx->num_viewports, ctx->viewports);
820
821 if (ctx->num_scissors)
822 vkCmdSetScissor(cmdbuf->cmdbuf, 0, ctx->num_scissors, ctx->scissors);
823 else if (ctx->fb_state.width && ctx->fb_state.height) {
824 VkRect2D fb_scissor = {};
825 fb_scissor.extent.width = ctx->fb_state.width;
826 fb_scissor.extent.height = ctx->fb_state.height;
827 vkCmdSetScissor(cmdbuf->cmdbuf, 0, 1, &fb_scissor);
828 }
829
830 vkCmdSetStencilReference(cmdbuf->cmdbuf, VK_STENCIL_FACE_FRONT_BIT, ctx->stencil_ref[0]);
831 vkCmdSetStencilReference(cmdbuf->cmdbuf, VK_STENCIL_FACE_BACK_BIT, ctx->stencil_ref[1]);
832
833 if (depth_bias)
834 vkCmdSetDepthBias(cmdbuf->cmdbuf, rast_state->offset_units, rast_state->offset_clamp, rast_state->offset_scale);
835 else
836 vkCmdSetDepthBias(cmdbuf->cmdbuf, 0.0f, 0.0f, 0.0f);
837
838 if (ctx->gfx_pipeline_state.blend_state->need_blend_constants)
839 vkCmdSetBlendConstants(cmdbuf->cmdbuf, ctx->blend_constants);
840
841 VkDescriptorSet desc_set = allocate_descriptor_set(ctx, gfx_program->dsl);
842
843 VkWriteDescriptorSet wds[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS + PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
844 VkDescriptorBufferInfo buffer_infos[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS];
845 VkDescriptorImageInfo image_infos[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
846 int num_wds = 0, num_buffer_info = 0, num_image_info = 0;
847
848 for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
849 struct zink_shader *shader = ctx->gfx_stages[i];
850 if (!shader)
851 continue;
852
853 for (int j = 0; j < shader->num_bindings; j++) {
854 int index = shader->bindings[j].index;
855 if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
856 assert(ctx->ubos[i][index].buffer_size > 0);
857 assert(ctx->ubos[i][index].buffer);
858 buffer_infos[num_buffer_info].buffer = zink_resource(ctx->ubos[i][index].buffer)->buffer;
859 buffer_infos[num_buffer_info].offset = ctx->ubos[i][index].buffer_offset;
860 buffer_infos[num_buffer_info].range = VK_WHOLE_SIZE;
861 wds[num_wds].pBufferInfo = buffer_infos + num_buffer_info;
862 ++num_buffer_info;
863 } else {
864 struct pipe_sampler_view *psampler_view = ctx->image_views[i][index];
865 assert(psampler_view);
866 struct zink_sampler_view *sampler_view = (struct zink_sampler_view *)psampler_view;
867 struct zink_resource *resource = zink_resource(psampler_view->texture);
868 image_infos[num_image_info].imageLayout = resource->layout;
869 image_infos[num_image_info].imageView = sampler_view->image_view;
870 image_infos[num_image_info].sampler = ctx->samplers[i][index];
871 wds[num_wds].pImageInfo = image_infos + num_image_info;
872 ++num_image_info;
873 }
874
875 wds[num_wds].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
876 wds[num_wds].pNext = NULL;
877 wds[num_wds].dstSet = desc_set;
878 wds[num_wds].dstBinding = shader->bindings[j].binding;
879 wds[num_wds].dstArrayElement = 0;
880 wds[num_wds].descriptorCount = 1;
881 wds[num_wds].descriptorType = shader->bindings[j].type;
882 ++num_wds;
883 }
884 }
885
886 vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
887
888 vkCmdBindPipeline(cmdbuf->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
889 vkCmdBindDescriptorSets(cmdbuf->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS,
890 gfx_program->layout, 0, 1, &desc_set, 0, NULL);
891 zink_bind_vertex_buffers(cmdbuf->cmdbuf, ctx);
892
893 if (dinfo->index_size > 0) {
894 assert(dinfo->index_size != 1);
895 VkIndexType index_type = dinfo->index_size == 2 ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
896 vkCmdBindIndexBuffer(cmdbuf->cmdbuf, zink_resource(index_buffer)->buffer, index_offset, index_type);
897 vkCmdDrawIndexed(cmdbuf->cmdbuf,
898 dinfo->count, dinfo->instance_count,
899 dinfo->start, dinfo->index_bias, dinfo->start_instance);
900 } else
901 vkCmdDraw(cmdbuf->cmdbuf, dinfo->count, dinfo->instance_count, dinfo->start, dinfo->start_instance);
902
903 vkCmdEndRenderPass(cmdbuf->cmdbuf);
904
905 zink_end_cmdbuf(ctx, cmdbuf);
906
907 vkDestroyPipeline(screen->dev, pipeline, NULL);
908
909 if (dinfo->index_size > 0 && dinfo->has_user_indices)
910 pipe_resource_reference(&index_buffer, NULL);
911 }
912
913 static void
914 zink_flush(struct pipe_context *pctx,
915 struct pipe_fence_handle **pfence,
916 enum pipe_flush_flags flags)
917 {
918 }
919
920 static void
921 zink_blit(struct pipe_context *pctx,
922 const struct pipe_blit_info *info)
923 {
924 struct zink_context *ctx = zink_context(pctx);
925 bool is_resolve = false;
926 if (info->mask != PIPE_MASK_RGBA ||
927 info->scissor_enable ||
928 info->alpha_blend) {
929 if (!util_blitter_is_blit_supported(ctx->blitter, info)) {
930 debug_printf("blit unsupported %s -> %s\n",
931 util_format_short_name(info->src.resource->format),
932 util_format_short_name(info->dst.resource->format));
933 return;
934 }
935
936 util_blitter_save_fragment_constant_buffer_slot(ctx->blitter, ctx->ubos[PIPE_SHADER_FRAGMENT]);
937 util_blitter_save_vertex_buffer_slot(ctx->blitter, ctx->buffers);
938 util_blitter_save_vertex_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_VERTEX]);
939 util_blitter_save_fragment_shader(ctx->blitter, ctx->gfx_stages[PIPE_SHADER_FRAGMENT]);
940 util_blitter_save_rasterizer(ctx->blitter, ctx->gfx_pipeline_state.rast_state);
941
942 util_blitter_blit(ctx->blitter, info);
943 }
944
945 struct zink_resource *src = zink_resource(info->src.resource);
946 struct zink_resource *dst = zink_resource(info->dst.resource);
947
948 if (src->base.nr_samples > 1 && dst->base.nr_samples <= 1)
949 is_resolve = true;
950
951 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
952 if (!cmdbuf)
953 return;
954
955 if (is_resolve) {
956 VkImageResolve region = {};
957
958 region.srcSubresource.aspectMask = src->aspect;
959 region.srcSubresource.mipLevel = info->src.level;
960 region.srcSubresource.baseArrayLayer = 0; // no clue
961 region.srcSubresource.layerCount = 1; // no clue
962 region.srcOffset.x = info->src.box.x;
963 region.srcOffset.y = info->src.box.y;
964 region.srcOffset.z = info->src.box.z;
965
966 region.dstSubresource.aspectMask = dst->aspect;
967 region.dstSubresource.mipLevel = info->dst.level;
968 region.dstSubresource.baseArrayLayer = 0; // no clue
969 region.dstSubresource.layerCount = 1; // no clue
970 region.dstOffset.x = info->dst.box.x;
971 region.dstOffset.y = info->dst.box.y;
972 region.dstOffset.z = info->dst.box.z;
973
974 region.extent.width = info->dst.box.width;
975 region.extent.height = info->dst.box.height;
976 region.extent.depth = info->dst.box.depth;
977 vkCmdResolveImage(cmdbuf->cmdbuf, src->image, src->layout,
978 dst->image, dst->layout,
979 1, &region);
980
981 } else {
982 if (dst->layout != VK_IMAGE_LAYOUT_GENERAL &&
983 dst->layout != VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
984 zink_resource_barrier(cmdbuf->cmdbuf, dst, dst->aspect,
985 VK_IMAGE_LAYOUT_GENERAL);
986
987 VkImageBlit region = {};
988 region.srcSubresource.aspectMask = src->aspect;
989 region.srcSubresource.mipLevel = info->src.level;
990 region.srcOffsets[0].x = info->src.box.x;
991 region.srcOffsets[0].y = info->src.box.y;
992 region.srcOffsets[1].x = info->src.box.x + info->src.box.width;
993 region.srcOffsets[1].y = info->src.box.y + info->src.box.height;
994
995 if (src->base.array_size > 1) {
996 region.srcOffsets[0].z = 0;
997 region.srcOffsets[1].z = 1;
998 region.srcSubresource.baseArrayLayer = info->src.box.z;
999 region.srcSubresource.layerCount = info->src.box.depth;
1000 } else {
1001 region.srcOffsets[0].z = info->src.box.z;
1002 region.srcOffsets[1].z = info->src.box.z + info->src.box.depth;
1003 region.srcSubresource.baseArrayLayer = 0;
1004 region.srcSubresource.layerCount = 1;
1005 }
1006
1007 region.dstSubresource.aspectMask = dst->aspect;
1008 region.dstSubresource.mipLevel = info->dst.level;
1009 region.dstOffsets[0].x = info->dst.box.x;
1010 region.dstOffsets[0].y = info->dst.box.y;
1011 region.dstOffsets[1].x = info->dst.box.x + info->dst.box.width;
1012 region.dstOffsets[1].y = info->dst.box.y + info->dst.box.height;
1013
1014 if (dst->base.array_size > 1) {
1015 region.dstOffsets[0].z = 0;
1016 region.dstOffsets[1].z = 1;
1017 region.dstSubresource.baseArrayLayer = info->dst.box.z;
1018 region.dstSubresource.layerCount = info->dst.box.depth;
1019 } else {
1020 region.dstOffsets[0].z = info->dst.box.z;
1021 region.dstOffsets[1].z = info->dst.box.z + info->dst.box.depth;
1022 region.dstSubresource.baseArrayLayer = 0;
1023 region.dstSubresource.layerCount = 1;
1024 }
1025
1026 vkCmdBlitImage(cmdbuf->cmdbuf, src->image, src->layout,
1027 dst->image, dst->layout,
1028 1, &region,
1029 filter(info->filter));
1030 }
1031 zink_end_cmdbuf(ctx, cmdbuf);
1032 }
1033
1034 static void
1035 zink_flush_resource(struct pipe_context *pipe,
1036 struct pipe_resource *resource)
1037 {
1038 }
1039
1040 static void
1041 zink_resource_copy_region(struct pipe_context *pctx,
1042 struct pipe_resource *pdst,
1043 unsigned dst_level, unsigned dstx, unsigned dsty, unsigned dstz,
1044 struct pipe_resource *psrc,
1045 unsigned src_level, const struct pipe_box *src_box)
1046 {
1047 struct zink_resource *dst = zink_resource(pdst);
1048 struct zink_resource *src = zink_resource(psrc);
1049 struct zink_context *ctx = zink_context(pctx);
1050 if (dst->base.target != PIPE_BUFFER && src->base.target != PIPE_BUFFER) {
1051 VkImageCopy region = {};
1052
1053 region.srcSubresource.aspectMask = src->aspect;
1054 region.srcSubresource.mipLevel = src_level;
1055 region.srcSubresource.layerCount = 1;
1056 if (src->base.array_size > 1) {
1057 region.srcSubresource.baseArrayLayer = src_box->z;
1058 region.srcSubresource.layerCount = src_box->depth;
1059 region.extent.depth = 1;
1060 } else {
1061 region.srcOffset.z = src_box->z;
1062 region.srcSubresource.layerCount = 1;
1063 region.extent.depth = src_box->depth;
1064 }
1065
1066 region.srcOffset.x = src_box->x;
1067 region.srcOffset.y = src_box->y;
1068
1069 region.dstSubresource.aspectMask = dst->aspect;
1070 region.dstSubresource.mipLevel = dst_level;
1071 if (dst->base.array_size > 1) {
1072 region.dstSubresource.baseArrayLayer = dstz;
1073 region.dstSubresource.layerCount = src_box->depth;
1074 } else {
1075 region.dstOffset.z = dstz;
1076 region.dstSubresource.layerCount = 1;
1077 }
1078
1079 region.dstOffset.x = dstx;
1080 region.dstOffset.y = dsty;
1081 region.extent.width = src_box->width;
1082 region.extent.height = src_box->height;
1083
1084 struct zink_cmdbuf *cmdbuf = zink_start_cmdbuf(ctx);
1085 if (!cmdbuf)
1086 return;
1087
1088 vkCmdCopyImage(cmdbuf->cmdbuf, src->image, src->layout,
1089 dst->image, dst->layout,
1090 1, &region);
1091 zink_end_cmdbuf(ctx, cmdbuf);
1092 } else
1093 debug_printf("zink: TODO resource copy\n");
1094 }
1095
1096 struct pipe_context *
1097 zink_context_create(struct pipe_screen *pscreen, void *priv, unsigned flags)
1098 {
1099 struct zink_screen *screen = zink_screen(pscreen);
1100 struct zink_context *ctx = CALLOC_STRUCT(zink_context);
1101
1102 ctx->base.screen = pscreen;
1103 ctx->base.priv = priv;
1104
1105 ctx->base.destroy = zink_context_destroy;
1106
1107 zink_context_state_init(&ctx->base);
1108
1109 ctx->base.create_sampler_state = zink_create_sampler_state;
1110 ctx->base.bind_sampler_states = zink_bind_sampler_states;
1111 ctx->base.delete_sampler_state = zink_delete_sampler_state;
1112
1113 ctx->base.create_sampler_view = zink_create_sampler_view;
1114 ctx->base.set_sampler_views = zink_set_sampler_views;
1115 ctx->base.sampler_view_destroy = zink_destroy_sampler_view;
1116
1117 ctx->base.create_vs_state = zink_create_vs_state;
1118 ctx->base.bind_vs_state = zink_bind_vs_state;
1119 ctx->base.delete_vs_state = zink_delete_vs_state;
1120
1121 ctx->base.create_fs_state = zink_create_fs_state;
1122 ctx->base.bind_fs_state = zink_bind_fs_state;
1123 ctx->base.delete_fs_state = zink_delete_fs_state;
1124
1125 ctx->base.set_polygon_stipple = zink_set_polygon_stipple;
1126 ctx->base.set_vertex_buffers = zink_set_vertex_buffers;
1127 ctx->base.set_viewport_states = zink_set_viewport_states;
1128 ctx->base.set_scissor_states = zink_set_scissor_states;
1129 ctx->base.set_constant_buffer = zink_set_constant_buffer;
1130 ctx->base.set_framebuffer_state = zink_set_framebuffer_state;
1131 ctx->base.set_stencil_ref = zink_set_stencil_ref;
1132 ctx->base.set_clip_state = zink_set_clip_state;
1133 ctx->base.set_active_query_state = zink_set_active_query_state;
1134 ctx->base.set_blend_color = zink_set_blend_color;
1135
1136 ctx->base.clear = zink_clear;
1137 ctx->base.draw_vbo = zink_draw_vbo;
1138 ctx->base.flush = zink_flush;
1139
1140 ctx->base.resource_copy_region = zink_resource_copy_region;
1141 ctx->base.blit = zink_blit;
1142
1143 ctx->base.flush_resource = zink_flush_resource;
1144 zink_context_surface_init(&ctx->base);
1145 zink_context_resource_init(&ctx->base);
1146
1147 slab_create_child(&ctx->transfer_pool, &screen->transfer_pool);
1148
1149 ctx->base.stream_uploader = u_upload_create_default(&ctx->base);
1150 ctx->base.const_uploader = ctx->base.stream_uploader;
1151
1152 int prim_hwsupport = 1 << PIPE_PRIM_POINTS |
1153 1 << PIPE_PRIM_LINES |
1154 1 << PIPE_PRIM_LINE_STRIP |
1155 1 << PIPE_PRIM_TRIANGLES |
1156 1 << PIPE_PRIM_TRIANGLE_STRIP |
1157 1 << PIPE_PRIM_TRIANGLE_FAN;
1158
1159 ctx->primconvert = util_primconvert_create(&ctx->base, prim_hwsupport);
1160 if (!ctx->primconvert)
1161 goto fail;
1162
1163 ctx->blitter = util_blitter_create(&ctx->base);
1164 if (!ctx->blitter)
1165 goto fail;
1166
1167 VkCommandPoolCreateInfo cpci = {};
1168 cpci.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
1169 cpci.queueFamilyIndex = screen->gfx_queue;
1170 cpci.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1171 if (vkCreateCommandPool(screen->dev, &cpci, NULL, &ctx->cmdpool) != VK_SUCCESS)
1172 goto fail;
1173
1174 VkCommandBufferAllocateInfo cbai = {};
1175 cbai.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
1176 cbai.commandPool = ctx->cmdpool;
1177 cbai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1178 cbai.commandBufferCount = 1;
1179 if (vkAllocateCommandBuffers(screen->dev, &cbai, &ctx->cmdbuf.cmdbuf) != VK_SUCCESS)
1180 goto fail;
1181
1182 VkDescriptorPoolSize sizes[] = {
1183 {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1000}
1184 };
1185 VkDescriptorPoolCreateInfo dpci = {};
1186 dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
1187 dpci.pPoolSizes = sizes;
1188 dpci.poolSizeCount = ARRAY_SIZE(sizes);
1189 dpci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
1190 dpci.maxSets = 1000;
1191
1192 if(vkCreateDescriptorPool(screen->dev, &dpci, 0, &ctx->descpool) != VK_SUCCESS)
1193 goto fail;
1194
1195 vkGetDeviceQueue(screen->dev, screen->gfx_queue, 0, &ctx->queue);
1196
1197 return &ctx->base;
1198
1199 fail:
1200 if (ctx) {
1201 vkDestroyCommandPool(screen->dev, ctx->cmdpool, NULL);
1202 FREE(ctx);
1203 }
1204 return NULL;
1205 }