anv/meta: Remove unneeded resolve pipeline
[mesa.git] / src / vulkan / anv_meta_resolve.c
1 /*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26
27 #include "anv_meta.h"
28 #include "anv_private.h"
29 #include "glsl/nir/nir_builder.h"
30
31 /**
32 * Vertex attributes used by all pipelines.
33 */
34 struct vertex_attrs {
35 struct anv_vue_header vue_header;
36 float position[2]; /**< 3DPRIM_RECTLIST */
37 float tex_position[2];
38 };
39
40 static void
41 meta_resolve_save(struct anv_meta_saved_state *saved_state,
42 struct anv_cmd_buffer *cmd_buffer)
43 {
44 anv_meta_save(saved_state, cmd_buffer,
45 (1 << VK_DYNAMIC_STATE_VIEWPORT) |
46 (1 << VK_DYNAMIC_STATE_SCISSOR));
47
48 cmd_buffer->state.dynamic.viewport.count = 0;
49 cmd_buffer->state.dynamic.scissor.count = 0;
50 }
51
52 static void
53 meta_resolve_restore(struct anv_meta_saved_state *saved_state,
54 struct anv_cmd_buffer *cmd_buffer)
55 {
56 anv_meta_restore(saved_state, cmd_buffer);
57 }
58
59 static VkPipeline *
60 get_pipeline_h(struct anv_device *device, uint32_t samples)
61 {
62 uint32_t i = ffs(samples) - 2; /* log2(samples) - 1 */
63
64 assert(samples >= 2);
65 assert(i < ARRAY_SIZE(device->meta_state.resolve.pipelines));
66
67 return &device->meta_state.resolve.pipelines[i];
68 }
69
70 static nir_shader *
71 build_nir_vs(void)
72 {
73 const struct glsl_type *vec4 = glsl_vec4_type();
74
75 nir_builder b;
76 nir_variable *a_position;
77 nir_variable *v_position;
78 nir_variable *a_tex_position;
79 nir_variable *v_tex_position;
80
81 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_VERTEX, NULL);
82 b.shader->info.name = ralloc_strdup(b.shader, "meta_resolve_vs");
83
84 a_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
85 "a_position");
86 a_position->data.location = VERT_ATTRIB_GENERIC0;
87
88 v_position = nir_variable_create(b.shader, nir_var_shader_out, vec4,
89 "gl_Position");
90 v_position->data.location = VARYING_SLOT_POS;
91
92 a_tex_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
93 "a_tex_position");
94 a_tex_position->data.location = VERT_ATTRIB_GENERIC1;
95
96 v_tex_position = nir_variable_create(b.shader, nir_var_shader_out, vec4,
97 "v_tex_position");
98 v_tex_position->data.location = VARYING_SLOT_VAR0;
99
100 nir_copy_var(&b, v_position, a_position);
101 nir_copy_var(&b, v_tex_position, a_tex_position);
102
103 return b.shader;
104 }
105
106 static nir_shader *
107 build_nir_fs(uint32_t num_samples)
108 {
109 const struct glsl_type *vec4 = glsl_vec4_type();
110
111 const struct glsl_type *sampler2DMS =
112 glsl_sampler_type(GLSL_SAMPLER_DIM_MS,
113 /*is_shadow*/ false,
114 /*is_array*/ false,
115 GLSL_TYPE_FLOAT);
116
117 nir_builder b;
118 nir_variable *u_tex; /* uniform sampler */
119 nir_variable *v_position; /* vec4, varying fragment position */
120 nir_variable *v_tex_position; /* vec4, varying texture coordinate */
121 nir_variable *f_color; /* vec4, fragment output color */
122 nir_ssa_def *accum; /* vec4, accumulation of sample values */
123
124 nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
125 b.shader->info.name = ralloc_asprintf(b.shader,
126 "meta_resolve_fs_samples%02d",
127 num_samples);
128
129 u_tex = nir_variable_create(b.shader, nir_var_uniform, sampler2DMS,
130 "u_tex");
131 u_tex->data.descriptor_set = 0;
132 u_tex->data.binding = 0;
133
134 v_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
135 "v_position");
136 v_position->data.location = VARYING_SLOT_POS;
137
138 v_tex_position = nir_variable_create(b.shader, nir_var_shader_in, vec4,
139 "v_tex_position");
140 v_tex_position->data.location = VARYING_SLOT_VAR0;
141
142 f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
143 "f_color");
144 f_color->data.location = FRAG_RESULT_DATA0;
145
146 accum = nir_imm_vec4(&b, 0, 0, 0, 0);
147
148 nir_ssa_def *tex_position_ivec =
149 nir_f2i(&b, nir_load_var(&b, v_tex_position));
150
151 for (uint32_t i = 0; i < num_samples; ++i) {
152 nir_tex_instr *tex;
153
154 tex = nir_tex_instr_create(b.shader, /*num_srcs*/ 2);
155 tex->sampler = nir_deref_var_create(tex, u_tex);
156 tex->sampler_dim = GLSL_SAMPLER_DIM_MS;
157 tex->op = nir_texop_txf_ms;
158 tex->src[0].src = nir_src_for_ssa(tex_position_ivec);
159 tex->src[0].src_type = nir_tex_src_coord;
160 tex->src[1].src = nir_src_for_ssa(nir_imm_int(&b, i));
161 tex->src[1].src_type = nir_tex_src_ms_index;
162 tex->dest_type = nir_type_float;
163 tex->is_array = false;
164 tex->coord_components = 3;
165 nir_ssa_dest_init(&tex->instr, &tex->dest, /*num_components*/ 4, "tex");
166 nir_builder_instr_insert(&b, &tex->instr);
167
168 accum = nir_fadd(&b, accum, &tex->dest.ssa);
169 }
170
171 accum = nir_fdiv(&b, accum, nir_imm_float(&b, num_samples));
172 nir_store_var(&b, f_color, accum, /*writemask*/ 4);
173
174 return b.shader;
175 }
176
177 static VkResult
178 create_pass(struct anv_device *device)
179 {
180 VkResult result;
181 VkDevice device_h = anv_device_to_handle(device);
182 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
183
184 result = anv_CreateRenderPass(device_h,
185 &(VkRenderPassCreateInfo) {
186 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
187 .attachmentCount = 1,
188 .pAttachments = &(VkAttachmentDescription) {
189 .format = VK_FORMAT_UNDEFINED, /* Our shaders don't care */
190 .samples = 1,
191 .loadOp = VK_ATTACHMENT_LOAD_OP_LOAD,
192 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
193 .initialLayout = VK_IMAGE_LAYOUT_GENERAL,
194 .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
195 },
196 .subpassCount = 1,
197 .pSubpasses = &(VkSubpassDescription) {
198 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
199 .inputAttachmentCount = 0,
200 .colorAttachmentCount = 1,
201 .pColorAttachments = &(VkAttachmentReference) {
202 .attachment = 0,
203 .layout = VK_IMAGE_LAYOUT_GENERAL,
204 },
205 .pResolveAttachments = NULL,
206 .pDepthStencilAttachment = &(VkAttachmentReference) {
207 .attachment = VK_ATTACHMENT_UNUSED,
208 },
209 .preserveAttachmentCount = 0,
210 .pPreserveAttachments = NULL,
211 },
212 .dependencyCount = 0,
213 },
214 alloc,
215 &device->meta_state.resolve.pass);
216
217 return result;
218 }
219
220 static VkResult
221 create_pipeline(struct anv_device *device,
222 uint32_t num_samples,
223 VkShaderModule vs_module_h)
224 {
225 VkResult result;
226 VkDevice device_h = anv_device_to_handle(device);
227
228 struct anv_shader_module fs_module = {
229 .nir = build_nir_fs(num_samples),
230 };
231
232 if (!fs_module.nir) {
233 /* XXX: Need more accurate error */
234 result = VK_ERROR_OUT_OF_HOST_MEMORY;
235 goto cleanup;
236 }
237
238 result = anv_graphics_pipeline_create(device_h,
239 VK_NULL_HANDLE,
240 &(VkGraphicsPipelineCreateInfo) {
241 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
242 .stageCount = 2,
243 .pStages = (VkPipelineShaderStageCreateInfo[]) {
244 {
245 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
246 .stage = VK_SHADER_STAGE_VERTEX_BIT,
247 .module = vs_module_h,
248 .pName = "main",
249 },
250 {
251 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
252 .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
253 .module = anv_shader_module_to_handle(&fs_module),
254 .pName = "main",
255 },
256 },
257 .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
258 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
259 .vertexBindingDescriptionCount = 1,
260 .pVertexBindingDescriptions = (VkVertexInputBindingDescription[]) {
261 {
262 .binding = 0,
263 .stride = sizeof(struct vertex_attrs),
264 .inputRate = VK_VERTEX_INPUT_RATE_VERTEX
265 },
266 },
267 .vertexAttributeDescriptionCount = 3,
268 .pVertexAttributeDescriptions = (VkVertexInputAttributeDescription[]) {
269 {
270 /* VUE Header */
271 .location = 0,
272 .binding = 0,
273 .format = VK_FORMAT_R32G32B32A32_UINT,
274 .offset = offsetof(struct vertex_attrs, vue_header),
275 },
276 {
277 /* Position */
278 .location = 1,
279 .binding = 0,
280 .format = VK_FORMAT_R32G32_SFLOAT,
281 .offset = offsetof(struct vertex_attrs, position),
282 },
283 {
284 /* Texture Coordinate */
285 .location = 2,
286 .binding = 0,
287 .format = VK_FORMAT_R32G32_SFLOAT,
288 .offset = offsetof(struct vertex_attrs, tex_position),
289 },
290 },
291 },
292 .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
293 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
294 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
295 .primitiveRestartEnable = false,
296 },
297 .pViewportState = &(VkPipelineViewportStateCreateInfo) {
298 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
299 .viewportCount = 1,
300 .scissorCount = 1,
301 },
302 .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
303 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
304 .depthClampEnable = false,
305 .rasterizerDiscardEnable = false,
306 .polygonMode = VK_POLYGON_MODE_FILL,
307 .cullMode = VK_CULL_MODE_NONE,
308 .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
309 },
310 .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
311 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
312 .rasterizationSamples = 1,
313 .sampleShadingEnable = false,
314 .pSampleMask = (VkSampleMask[]) { 0x1 },
315 .alphaToCoverageEnable = false,
316 .alphaToOneEnable = false,
317 },
318 .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
319 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
320 .logicOpEnable = false,
321 .attachmentCount = 1,
322 .pAttachments = (VkPipelineColorBlendAttachmentState []) {
323 {
324 .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
325 VK_COLOR_COMPONENT_G_BIT |
326 VK_COLOR_COMPONENT_B_BIT |
327 VK_COLOR_COMPONENT_A_BIT,
328 },
329 },
330 },
331 .pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
332 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
333 .dynamicStateCount = 2,
334 .pDynamicStates = (VkDynamicState[]) {
335 VK_DYNAMIC_STATE_VIEWPORT,
336 VK_DYNAMIC_STATE_SCISSOR,
337 },
338 },
339 .layout = device->meta_state.resolve.pipeline_layout,
340 .renderPass = device->meta_state.resolve.pass,
341 .subpass = 0,
342 },
343 &(struct anv_graphics_pipeline_create_info) {
344 .color_attachment_count = -1,
345 .use_repclear = false,
346 .disable_viewport = true,
347 .disable_scissor = true,
348 .disable_vs = true,
349 .use_rectlist = true
350 },
351 &device->meta_state.alloc,
352 get_pipeline_h(device, num_samples));
353 if (result != VK_SUCCESS)
354 goto cleanup;
355
356 goto cleanup;
357
358 cleanup:
359 ralloc_free(fs_module.nir);
360 return result;
361 }
362
363 void
364 anv_device_finish_meta_resolve_state(struct anv_device *device)
365 {
366 struct anv_meta_state *state = &device->meta_state;
367 VkDevice device_h = anv_device_to_handle(device);
368 VkRenderPass pass_h = device->meta_state.resolve.pass;
369 VkPipelineLayout pipeline_layout_h = device->meta_state.resolve.pipeline_layout;
370 VkDescriptorSetLayout ds_layout_h = device->meta_state.resolve.ds_layout;
371 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
372
373 if (pass_h)
374 ANV_CALL(DestroyRenderPass)(device_h, pass_h,
375 &device->meta_state.alloc);
376
377 if (pipeline_layout_h)
378 ANV_CALL(DestroyPipelineLayout)(device_h, pipeline_layout_h, alloc);
379
380 if (ds_layout_h)
381 ANV_CALL(DestroyDescriptorSetLayout)(device_h, ds_layout_h, alloc);
382
383 for (uint32_t i = 0; i < ARRAY_SIZE(state->resolve.pipelines); ++i) {
384 VkPipeline pipeline_h = state->resolve.pipelines[i];
385
386 if (pipeline_h) {
387 ANV_CALL(DestroyPipeline)(device_h, pipeline_h, alloc);
388 }
389 }
390 }
391
392 VkResult
393 anv_device_init_meta_resolve_state(struct anv_device *device)
394 {
395 VkResult res = VK_SUCCESS;
396 VkDevice device_h = anv_device_to_handle(device);
397 const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
398
399 const isl_sample_count_mask_t sample_count_mask =
400 isl_device_get_sample_counts(&device->isl_dev);
401
402 zero(device->meta_state.resolve);
403
404 struct anv_shader_module vs_module = { .nir = build_nir_vs() };
405 if (!vs_module.nir) {
406 /* XXX: Need more accurate error */
407 res = VK_ERROR_OUT_OF_HOST_MEMORY;
408 goto fail;
409 }
410
411 VkShaderModule vs_module_h = anv_shader_module_to_handle(&vs_module);
412
413 res = anv_CreateDescriptorSetLayout(device_h,
414 &(VkDescriptorSetLayoutCreateInfo) {
415 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
416 .bindingCount = 1,
417 .pBindings = (VkDescriptorSetLayoutBinding[]) {
418 {
419 .binding = 0,
420 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
421 .descriptorCount = 1,
422 .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
423 },
424 },
425 },
426 alloc,
427 &device->meta_state.resolve.ds_layout);
428 if (res != VK_SUCCESS)
429 goto fail;
430
431 res = anv_CreatePipelineLayout(device_h,
432 &(VkPipelineLayoutCreateInfo) {
433 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
434 .setLayoutCount = 1,
435 .pSetLayouts = (VkDescriptorSetLayout[]) {
436 device->meta_state.resolve.ds_layout,
437 },
438 },
439 alloc,
440 &device->meta_state.resolve.pipeline_layout);
441 if (res != VK_SUCCESS)
442 goto fail;
443
444 res = create_pass(device);
445 if (res != VK_SUCCESS)
446 goto fail;
447
448 for (uint32_t i = 0;
449 i < ARRAY_SIZE(device->meta_state.resolve.pipelines); ++i) {
450
451 uint32_t sample_count = 1 << (1 + i);
452 if (!(sample_count_mask & sample_count))
453 continue;
454
455 res = create_pipeline(device, sample_count, vs_module_h);
456 if (res != VK_SUCCESS)
457 goto fail;
458 }
459
460 goto cleanup;
461
462 fail:
463 anv_device_finish_meta_resolve_state(device);
464
465 cleanup:
466 ralloc_free(vs_module.nir);
467
468 return res;
469 }
470
471 static void
472 emit_resolve(struct anv_cmd_buffer *cmd_buffer,
473 struct anv_image_view *src_iview,
474 const VkOffset2D *src_offset,
475 struct anv_image_view *dest_iview,
476 const VkOffset2D *dest_offset,
477 const VkExtent2D *resolve_extent)
478 {
479 struct anv_device *device = cmd_buffer->device;
480 VkDevice device_h = anv_device_to_handle(device);
481 VkCommandBuffer cmd_buffer_h = anv_cmd_buffer_to_handle(cmd_buffer);
482 const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer;
483 const struct anv_image *src_image = src_iview->image;
484 VkDescriptorPool dummy_desc_pool_h = (VkDescriptorPool) 1;
485
486 const struct vertex_attrs vertex_data[3] = {
487 {
488 .vue_header = {0},
489 .position = {
490 dest_offset->x + resolve_extent->width,
491 dest_offset->y + resolve_extent->height,
492 },
493 .tex_position = {
494 src_offset->x + resolve_extent->width,
495 src_offset->y + resolve_extent->height,
496 },
497 },
498 {
499 .vue_header = {0},
500 .position = {
501 dest_offset->x,
502 dest_offset->y + resolve_extent->height,
503 },
504 .tex_position = {
505 src_offset->x,
506 src_offset->y + resolve_extent->height,
507 },
508 },
509 {
510 .vue_header = {0},
511 .position = {
512 dest_offset->x,
513 dest_offset->y,
514 },
515 .tex_position = {
516 src_offset->x,
517 src_offset->y,
518 },
519 },
520 };
521
522 struct anv_state vertex_mem =
523 anv_cmd_buffer_emit_dynamic(cmd_buffer, vertex_data,
524 sizeof(vertex_data), 16);
525
526 struct anv_buffer vertex_buffer = {
527 .device = device,
528 .size = sizeof(vertex_data),
529 .bo = &cmd_buffer->dynamic_state_stream.block_pool->bo,
530 .offset = vertex_mem.offset,
531 };
532
533 VkBuffer vertex_buffer_h = anv_buffer_to_handle(&vertex_buffer);
534
535 anv_CmdBindVertexBuffers(cmd_buffer_h,
536 /*firstBinding*/ 0,
537 /*bindingCount*/ 1,
538 (VkBuffer[]) { vertex_buffer_h },
539 (VkDeviceSize[]) { 0 });
540
541 VkSampler sampler_h;
542 ANV_CALL(CreateSampler)(device_h,
543 &(VkSamplerCreateInfo) {
544 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
545 .magFilter = VK_FILTER_NEAREST,
546 .minFilter = VK_FILTER_NEAREST,
547 .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
548 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
549 .addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
550 .addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
551 .mipLodBias = 0.0,
552 .anisotropyEnable = false,
553 .compareEnable = false,
554 .minLod = 0.0,
555 .maxLod = 0.0,
556 .unnormalizedCoordinates = false,
557 },
558 &cmd_buffer->pool->alloc,
559 &sampler_h);
560
561 VkDescriptorSet desc_set_h;
562 anv_AllocateDescriptorSets(device_h,
563 &(VkDescriptorSetAllocateInfo) {
564 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
565 .descriptorPool = dummy_desc_pool_h,
566 .descriptorSetCount = 1,
567 .pSetLayouts = (VkDescriptorSetLayout[]) {
568 device->meta_state.blit.ds_layout,
569 },
570 },
571 &desc_set_h);
572
573 ANV_FROM_HANDLE(anv_descriptor_set, desc_set, desc_set_h);
574
575 anv_UpdateDescriptorSets(device_h,
576 /*writeCount*/ 1,
577 (VkWriteDescriptorSet[]) {
578 {
579 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
580 .dstSet = desc_set_h,
581 .dstBinding = 0,
582 .dstArrayElement = 0,
583 .descriptorCount = 1,
584 .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
585 .pImageInfo = (VkDescriptorImageInfo[]) {
586 {
587 .sampler = sampler_h,
588 .imageView = anv_image_view_to_handle(src_iview),
589 .imageLayout = VK_IMAGE_LAYOUT_GENERAL,
590 },
591 },
592 },
593 },
594 /*copyCount*/ 0,
595 /*copies */ NULL);
596
597 ANV_CALL(CmdSetViewport)(cmd_buffer_h,
598 /*firstViewport*/ 0,
599 /*viewportCount*/ 1,
600 (VkViewport[]) {
601 {
602 .x = 0,
603 .y = 0,
604 .width = fb->width,
605 .height = fb->height,
606 .minDepth = 0.0,
607 .maxDepth = 1.0,
608 },
609 });
610
611 ANV_CALL(CmdSetScissor)(cmd_buffer_h,
612 /*firstScissor*/ 0,
613 /*scissorCount*/ 1,
614 (VkRect2D[]) {
615 {
616 .offset = { 0, 0 },
617 .extent = (VkExtent2D) { fb->width, fb->height },
618 },
619 });
620
621 VkPipeline pipeline_h = *get_pipeline_h(device, src_image->samples);
622 ANV_FROM_HANDLE(anv_pipeline, pipeline, pipeline_h);
623
624 if (cmd_buffer->state.pipeline != pipeline) {
625 anv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
626 pipeline_h);
627 }
628
629 anv_CmdBindDescriptorSets(cmd_buffer_h,
630 VK_PIPELINE_BIND_POINT_GRAPHICS,
631 device->meta_state.resolve.pipeline_layout,
632 /*firstSet*/ 0,
633 /* setCount */ 1,
634 (VkDescriptorSet[]) {
635 desc_set_h,
636 },
637 /*copyCount*/ 0,
638 /*copies */ NULL);
639
640 ANV_CALL(CmdDraw)(cmd_buffer_h, 3, 1, 0, 0);
641
642 /* All objects below are consumed by the draw call. We may safely destroy
643 * them.
644 */
645 anv_descriptor_set_destroy(device, desc_set);
646 anv_DestroySampler(device_h, sampler_h,
647 &cmd_buffer->pool->alloc);
648 }
649
650 void anv_CmdResolveImage(
651 VkCommandBuffer cmd_buffer_h,
652 VkImage src_image_h,
653 VkImageLayout src_image_layout,
654 VkImage dest_image_h,
655 VkImageLayout dest_image_layout,
656 uint32_t region_count,
657 const VkImageResolve* regions)
658 {
659 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmd_buffer_h);
660 ANV_FROM_HANDLE(anv_image, src_image, src_image_h);
661 ANV_FROM_HANDLE(anv_image, dest_image, dest_image_h);
662 struct anv_device *device = cmd_buffer->device;
663 struct anv_meta_saved_state state;
664 VkDevice device_h = anv_device_to_handle(device);
665
666 meta_resolve_save(&state, cmd_buffer);
667
668 assert(src_image->samples > 1);
669 assert(dest_image->samples == 1);
670
671 if (src_image->samples >= 16) {
672 /* See commit aa3f9aaf31e9056a255f9e0472ebdfdaa60abe54 for the
673 * glBlitFramebuffer workaround for samples >= 16.
674 */
675 anv_finishme("vkCmdResolveImage: need interpolation workaround when "
676 "samples >= 16");
677 }
678
679 if (src_image->array_size > 1)
680 anv_finishme("vkCmdResolveImage: multisample array images");
681
682 for (uint32_t r = 0; r < region_count; ++r) {
683 const VkImageResolve *region = &regions[r];
684
685 /* From the Vulkan 1.0 spec:
686 *
687 * - The aspectMask member of srcSubresource and dstSubresource must
688 * only contain VK_IMAGE_ASPECT_COLOR_BIT
689 *
690 * - The layerCount member of srcSubresource and dstSubresource must
691 * match
692 */
693 assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
694 assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
695 assert(region->srcSubresource.layerCount ==
696 region->dstSubresource.layerCount);
697
698 const uint32_t src_base_layer =
699 anv_meta_get_iview_layer(src_image, &region->srcSubresource,
700 &region->srcOffset);
701
702 const uint32_t dest_base_layer =
703 anv_meta_get_iview_layer(dest_image, &region->dstSubresource,
704 &region->dstOffset);
705
706 for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
707 ++layer) {
708
709 struct anv_image_view src_iview;
710 anv_image_view_init(&src_iview, cmd_buffer->device,
711 &(VkImageViewCreateInfo) {
712 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
713 .image = src_image_h,
714 .viewType = anv_meta_get_view_type(src_image),
715 .format = src_image->format->vk_format,
716 .subresourceRange = {
717 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
718 .baseMipLevel = region->srcSubresource.mipLevel,
719 .levelCount = 1,
720 .baseArrayLayer = src_base_layer + layer,
721 .layerCount = 1,
722 },
723 },
724 cmd_buffer, 0);
725
726 struct anv_image_view dest_iview;
727 anv_image_view_init(&dest_iview, cmd_buffer->device,
728 &(VkImageViewCreateInfo) {
729 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
730 .image = dest_image_h,
731 .viewType = anv_meta_get_view_type(dest_image),
732 .format = dest_image->format->vk_format,
733 .subresourceRange = {
734 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
735 .baseMipLevel = region->dstSubresource.mipLevel,
736 .levelCount = 1,
737 .baseArrayLayer = dest_base_layer + layer,
738 .layerCount = 1,
739 },
740 },
741 cmd_buffer, 0);
742
743 VkFramebuffer fb_h;
744 anv_CreateFramebuffer(device_h,
745 &(VkFramebufferCreateInfo) {
746 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
747 .attachmentCount = 1,
748 .pAttachments = (VkImageView[]) {
749 anv_image_view_to_handle(&dest_iview),
750 },
751 .width = anv_minify(dest_image->extent.width,
752 region->dstSubresource.mipLevel),
753 .height = anv_minify(dest_image->extent.height,
754 region->dstSubresource.mipLevel),
755 .layers = 1
756 },
757 &cmd_buffer->pool->alloc,
758 &fb_h);
759
760 ANV_CALL(CmdBeginRenderPass)(cmd_buffer_h,
761 &(VkRenderPassBeginInfo) {
762 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
763 .renderPass = device->meta_state.resolve.pass,
764 .framebuffer = fb_h,
765 .renderArea = {
766 .offset = {
767 region->dstOffset.x,
768 region->dstOffset.y,
769 },
770 .extent = {
771 region->extent.width,
772 region->extent.height,
773 }
774 },
775 .clearValueCount = 0,
776 .pClearValues = NULL,
777 },
778 VK_SUBPASS_CONTENTS_INLINE);
779
780 emit_resolve(cmd_buffer,
781 &src_iview,
782 &(VkOffset2D) {
783 .x = region->srcOffset.x,
784 .y = region->srcOffset.y,
785 },
786 &dest_iview,
787 &(VkOffset2D) {
788 .x = region->dstOffset.x,
789 .y = region->dstOffset.y,
790 },
791 &(VkExtent2D) {
792 .width = region->extent.width,
793 .height = region->extent.height,
794 });
795
796 ANV_CALL(CmdEndRenderPass)(cmd_buffer_h);
797
798 anv_DestroyFramebuffer(device_h, fb_h,
799 &cmd_buffer->pool->alloc);
800 }
801 }
802
803 meta_resolve_restore(&state, cmd_buffer);
804 }