2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
25 #include "nir/nir_builder.h"
28 VkOffset3D src_offset
;
29 VkExtent3D src_extent
;
30 VkOffset3D dest_offset
;
31 VkExtent3D dest_extent
;
35 build_nir_vertex_shader(void)
37 const struct glsl_type
*vec4
= glsl_vec4_type();
40 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_VERTEX
, NULL
);
41 b
.shader
->info
.name
= ralloc_strdup(b
.shader
, "meta_blit_vs");
43 nir_variable
*pos_in
= nir_variable_create(b
.shader
, nir_var_shader_in
,
45 pos_in
->data
.location
= VERT_ATTRIB_GENERIC0
;
46 nir_variable
*pos_out
= nir_variable_create(b
.shader
, nir_var_shader_out
,
48 pos_out
->data
.location
= VARYING_SLOT_POS
;
49 nir_copy_var(&b
, pos_out
, pos_in
);
51 nir_variable
*tex_pos_in
= nir_variable_create(b
.shader
, nir_var_shader_in
,
53 tex_pos_in
->data
.location
= VERT_ATTRIB_GENERIC1
;
54 nir_variable
*tex_pos_out
= nir_variable_create(b
.shader
, nir_var_shader_out
,
56 tex_pos_out
->data
.location
= VARYING_SLOT_VAR0
;
57 tex_pos_out
->data
.interpolation
= INTERP_QUALIFIER_SMOOTH
;
58 nir_copy_var(&b
, tex_pos_out
, tex_pos_in
);
64 build_nir_copy_fragment_shader(enum glsl_sampler_dim tex_dim
)
66 const struct glsl_type
*vec4
= glsl_vec4_type();
69 nir_builder_init_simple_shader(&b
, NULL
, MESA_SHADER_FRAGMENT
, NULL
);
70 b
.shader
->info
.name
= ralloc_strdup(b
.shader
, "meta_blit_fs");
72 nir_variable
*tex_pos_in
= nir_variable_create(b
.shader
, nir_var_shader_in
,
74 tex_pos_in
->data
.location
= VARYING_SLOT_VAR0
;
76 /* Swizzle the array index which comes in as Z coordinate into the right
79 unsigned swz
[] = { 0, (tex_dim
== GLSL_SAMPLER_DIM_1D
? 2 : 1), 2 };
80 nir_ssa_def
*const tex_pos
=
81 nir_swizzle(&b
, nir_load_var(&b
, tex_pos_in
), swz
,
82 (tex_dim
== GLSL_SAMPLER_DIM_1D
? 2 : 3), false);
84 const struct glsl_type
*sampler_type
=
85 glsl_sampler_type(tex_dim
, false, tex_dim
!= GLSL_SAMPLER_DIM_3D
,
86 glsl_get_base_type(vec4
));
87 nir_variable
*sampler
= nir_variable_create(b
.shader
, nir_var_uniform
,
88 sampler_type
, "s_tex");
89 sampler
->data
.descriptor_set
= 0;
90 sampler
->data
.binding
= 0;
92 nir_tex_instr
*tex
= nir_tex_instr_create(b
.shader
, 1);
93 tex
->sampler_dim
= tex_dim
;
94 tex
->op
= nir_texop_tex
;
95 tex
->src
[0].src_type
= nir_tex_src_coord
;
96 tex
->src
[0].src
= nir_src_for_ssa(tex_pos
);
97 tex
->dest_type
= nir_type_float
; /* TODO */
98 tex
->is_array
= glsl_sampler_type_is_array(sampler_type
);
99 tex
->coord_components
= tex_pos
->num_components
;
100 tex
->texture
= nir_deref_var_create(tex
, sampler
);
101 tex
->sampler
= nir_deref_var_create(tex
, sampler
);
103 nir_ssa_dest_init(&tex
->instr
, &tex
->dest
, 4, 32, "tex");
104 nir_builder_instr_insert(&b
, &tex
->instr
);
106 nir_variable
*color_out
= nir_variable_create(b
.shader
, nir_var_shader_out
,
108 color_out
->data
.location
= FRAG_RESULT_DATA0
;
109 nir_store_var(&b
, color_out
, &tex
->dest
.ssa
, 4);
115 meta_prepare_blit(struct anv_cmd_buffer
*cmd_buffer
,
116 struct anv_meta_saved_state
*saved_state
)
118 anv_meta_save(saved_state
, cmd_buffer
, 0);
122 meta_emit_blit(struct anv_cmd_buffer
*cmd_buffer
,
123 struct anv_image
*src_image
,
124 struct anv_image_view
*src_iview
,
125 VkOffset3D src_offset
,
126 VkExtent3D src_extent
,
127 struct anv_image
*dest_image
,
128 struct anv_image_view
*dest_iview
,
129 VkOffset3D dest_offset
,
130 VkExtent3D dest_extent
,
131 VkFilter blit_filter
)
133 struct anv_device
*device
= cmd_buffer
->device
;
135 struct blit_vb_data
{
140 assert(src_image
->samples
== dest_image
->samples
);
142 unsigned vb_size
= sizeof(struct anv_vue_header
) + 3 * sizeof(*vb_data
);
144 struct anv_state vb_state
=
145 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer
, vb_size
, 16);
146 memset(vb_state
.map
, 0, sizeof(struct anv_vue_header
));
147 vb_data
= vb_state
.map
+ sizeof(struct anv_vue_header
);
149 vb_data
[0] = (struct blit_vb_data
) {
151 dest_offset
.x
+ dest_extent
.width
,
152 dest_offset
.y
+ dest_extent
.height
,
155 (float)(src_offset
.x
+ src_extent
.width
)
156 / (float)src_iview
->extent
.width
,
157 (float)(src_offset
.y
+ src_extent
.height
)
158 / (float)src_iview
->extent
.height
,
159 (float)src_offset
.z
/ (float)src_iview
->extent
.depth
,
163 vb_data
[1] = (struct blit_vb_data
) {
166 dest_offset
.y
+ dest_extent
.height
,
169 (float)src_offset
.x
/ (float)src_iview
->extent
.width
,
170 (float)(src_offset
.y
+ src_extent
.height
) /
171 (float)src_iview
->extent
.height
,
172 (float)src_offset
.z
/ (float)src_iview
->extent
.depth
,
176 vb_data
[2] = (struct blit_vb_data
) {
182 (float)src_offset
.x
/ (float)src_iview
->extent
.width
,
183 (float)src_offset
.y
/ (float)src_iview
->extent
.height
,
184 (float)src_offset
.z
/ (float)src_iview
->extent
.depth
,
188 if (!device
->info
.has_llc
)
189 anv_state_clflush(vb_state
);
191 struct anv_buffer vertex_buffer
= {
194 .bo
= &device
->dynamic_state_block_pool
.bo
,
195 .offset
= vb_state
.offset
,
198 anv_CmdBindVertexBuffers(anv_cmd_buffer_to_handle(cmd_buffer
), 0, 2,
200 anv_buffer_to_handle(&vertex_buffer
),
201 anv_buffer_to_handle(&vertex_buffer
)
205 sizeof(struct anv_vue_header
),
209 ANV_CALL(CreateSampler
)(anv_device_to_handle(device
),
210 &(VkSamplerCreateInfo
) {
211 .sType
= VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO
,
212 .magFilter
= blit_filter
,
213 .minFilter
= blit_filter
,
214 }, &cmd_buffer
->pool
->alloc
, &sampler
);
216 VkDescriptorPool desc_pool
;
217 anv_CreateDescriptorPool(anv_device_to_handle(device
),
218 &(const VkDescriptorPoolCreateInfo
) {
219 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO
,
224 .pPoolSizes
= (VkDescriptorPoolSize
[]) {
226 .type
= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
,
230 }, &cmd_buffer
->pool
->alloc
, &desc_pool
);
233 anv_AllocateDescriptorSets(anv_device_to_handle(device
),
234 &(VkDescriptorSetAllocateInfo
) {
235 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO
,
236 .descriptorPool
= desc_pool
,
237 .descriptorSetCount
= 1,
238 .pSetLayouts
= &device
->meta_state
.blit
.ds_layout
241 anv_UpdateDescriptorSets(anv_device_to_handle(device
),
243 (VkWriteDescriptorSet
[]) {
245 .sType
= VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
,
248 .dstArrayElement
= 0,
249 .descriptorCount
= 1,
250 .descriptorType
= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
,
251 .pImageInfo
= (VkDescriptorImageInfo
[]) {
254 .imageView
= anv_image_view_to_handle(src_iview
),
255 .imageLayout
= VK_IMAGE_LAYOUT_GENERAL
,
262 anv_CreateFramebuffer(anv_device_to_handle(device
),
263 &(VkFramebufferCreateInfo
) {
264 .sType
= VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
,
265 .attachmentCount
= 1,
266 .pAttachments
= (VkImageView
[]) {
267 anv_image_view_to_handle(dest_iview
),
269 .width
= dest_iview
->extent
.width
,
270 .height
= dest_iview
->extent
.height
,
272 }, &cmd_buffer
->pool
->alloc
, &fb
);
274 ANV_CALL(CmdBeginRenderPass
)(anv_cmd_buffer_to_handle(cmd_buffer
),
275 &(VkRenderPassBeginInfo
) {
276 .sType
= VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
,
277 .renderPass
= device
->meta_state
.blit
.render_pass
,
280 .offset
= { dest_offset
.x
, dest_offset
.y
},
281 .extent
= { dest_extent
.width
, dest_extent
.height
},
283 .clearValueCount
= 0,
284 .pClearValues
= NULL
,
285 }, VK_SUBPASS_CONTENTS_INLINE
);
289 switch (src_image
->type
) {
290 case VK_IMAGE_TYPE_1D
:
291 pipeline
= device
->meta_state
.blit
.pipeline_1d_src
;
293 case VK_IMAGE_TYPE_2D
:
294 pipeline
= device
->meta_state
.blit
.pipeline_2d_src
;
296 case VK_IMAGE_TYPE_3D
:
297 pipeline
= device
->meta_state
.blit
.pipeline_3d_src
;
300 unreachable(!"bad VkImageType");
303 if (cmd_buffer
->state
.pipeline
!= anv_pipeline_from_handle(pipeline
)) {
304 anv_CmdBindPipeline(anv_cmd_buffer_to_handle(cmd_buffer
),
305 VK_PIPELINE_BIND_POINT_GRAPHICS
, pipeline
);
308 anv_CmdBindDescriptorSets(anv_cmd_buffer_to_handle(cmd_buffer
),
309 VK_PIPELINE_BIND_POINT_GRAPHICS
,
310 device
->meta_state
.blit
.pipeline_layout
, 0, 1,
313 ANV_CALL(CmdDraw
)(anv_cmd_buffer_to_handle(cmd_buffer
), 3, 1, 0, 0);
315 ANV_CALL(CmdEndRenderPass
)(anv_cmd_buffer_to_handle(cmd_buffer
));
317 /* At the point where we emit the draw call, all data from the
318 * descriptor sets, etc. has been used. We are free to delete it.
320 anv_DestroyDescriptorPool(anv_device_to_handle(device
),
321 desc_pool
, &cmd_buffer
->pool
->alloc
);
322 anv_DestroySampler(anv_device_to_handle(device
), sampler
,
323 &cmd_buffer
->pool
->alloc
);
324 anv_DestroyFramebuffer(anv_device_to_handle(device
), fb
,
325 &cmd_buffer
->pool
->alloc
);
329 meta_finish_blit(struct anv_cmd_buffer
*cmd_buffer
,
330 const struct anv_meta_saved_state
*saved_state
)
332 anv_meta_restore(saved_state
, cmd_buffer
);
335 void anv_CmdBlitImage(
336 VkCommandBuffer commandBuffer
,
338 VkImageLayout srcImageLayout
,
340 VkImageLayout destImageLayout
,
341 uint32_t regionCount
,
342 const VkImageBlit
* pRegions
,
346 ANV_FROM_HANDLE(anv_cmd_buffer
, cmd_buffer
, commandBuffer
);
347 ANV_FROM_HANDLE(anv_image
, src_image
, srcImage
);
348 ANV_FROM_HANDLE(anv_image
, dest_image
, destImage
);
349 struct anv_meta_saved_state saved_state
;
351 /* From the Vulkan 1.0 spec:
353 * vkCmdBlitImage must not be used for multisampled source or
354 * destination images. Use vkCmdResolveImage for this purpose.
356 assert(src_image
->samples
== 1);
357 assert(dest_image
->samples
== 1);
359 meta_prepare_blit(cmd_buffer
, &saved_state
);
361 for (unsigned r
= 0; r
< regionCount
; r
++) {
362 struct anv_image_view src_iview
;
363 anv_image_view_init(&src_iview
, cmd_buffer
->device
,
364 &(VkImageViewCreateInfo
) {
365 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
367 .viewType
= anv_meta_get_view_type(src_image
),
368 .format
= src_image
->vk_format
,
369 .subresourceRange
= {
370 .aspectMask
= pRegions
[r
].srcSubresource
.aspectMask
,
371 .baseMipLevel
= pRegions
[r
].srcSubresource
.mipLevel
,
373 .baseArrayLayer
= pRegions
[r
].srcSubresource
.baseArrayLayer
,
377 cmd_buffer
, VK_IMAGE_USAGE_SAMPLED_BIT
);
379 const VkOffset3D dest_offset
= {
380 .x
= pRegions
[r
].dstOffsets
[0].x
,
381 .y
= pRegions
[r
].dstOffsets
[0].y
,
385 if (pRegions
[r
].dstOffsets
[1].x
< pRegions
[r
].dstOffsets
[0].x
||
386 pRegions
[r
].dstOffsets
[1].y
< pRegions
[r
].dstOffsets
[0].y
||
387 pRegions
[r
].srcOffsets
[1].x
< pRegions
[r
].srcOffsets
[0].x
||
388 pRegions
[r
].srcOffsets
[1].y
< pRegions
[r
].srcOffsets
[0].y
)
389 anv_finishme("FINISHME: Allow flipping in blits");
391 const VkExtent3D dest_extent
= {
392 .width
= pRegions
[r
].dstOffsets
[1].x
- pRegions
[r
].dstOffsets
[0].x
,
393 .height
= pRegions
[r
].dstOffsets
[1].y
- pRegions
[r
].dstOffsets
[0].y
,
396 const VkExtent3D src_extent
= {
397 .width
= pRegions
[r
].srcOffsets
[1].x
- pRegions
[r
].srcOffsets
[0].x
,
398 .height
= pRegions
[r
].srcOffsets
[1].y
- pRegions
[r
].srcOffsets
[0].y
,
401 const uint32_t dest_array_slice
=
402 anv_meta_get_iview_layer(dest_image
, &pRegions
[r
].dstSubresource
,
403 &pRegions
[r
].dstOffsets
[0]);
405 if (pRegions
[r
].srcSubresource
.layerCount
> 1)
406 anv_finishme("FINISHME: copy multiple array layers");
408 if (pRegions
[r
].srcOffsets
[0].z
+ 1 != pRegions
[r
].srcOffsets
[1].z
||
409 pRegions
[r
].dstOffsets
[0].z
+ 1 != pRegions
[r
].dstOffsets
[1].z
)
410 anv_finishme("FINISHME: copy multiple depth layers");
412 struct anv_image_view dest_iview
;
413 anv_image_view_init(&dest_iview
, cmd_buffer
->device
,
414 &(VkImageViewCreateInfo
) {
415 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
417 .viewType
= anv_meta_get_view_type(dest_image
),
418 .format
= dest_image
->vk_format
,
419 .subresourceRange
= {
420 .aspectMask
= VK_IMAGE_ASPECT_COLOR_BIT
,
421 .baseMipLevel
= pRegions
[r
].dstSubresource
.mipLevel
,
423 .baseArrayLayer
= dest_array_slice
,
427 cmd_buffer
, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
);
429 meta_emit_blit(cmd_buffer
,
430 src_image
, &src_iview
,
431 pRegions
[r
].srcOffsets
[0], src_extent
,
432 dest_image
, &dest_iview
,
433 dest_offset
, dest_extent
,
437 meta_finish_blit(cmd_buffer
, &saved_state
);
441 anv_device_finish_meta_blit_state(struct anv_device
*device
)
443 anv_DestroyRenderPass(anv_device_to_handle(device
),
444 device
->meta_state
.blit
.render_pass
,
445 &device
->meta_state
.alloc
);
446 anv_DestroyPipeline(anv_device_to_handle(device
),
447 device
->meta_state
.blit
.pipeline_1d_src
,
448 &device
->meta_state
.alloc
);
449 anv_DestroyPipeline(anv_device_to_handle(device
),
450 device
->meta_state
.blit
.pipeline_2d_src
,
451 &device
->meta_state
.alloc
);
452 anv_DestroyPipeline(anv_device_to_handle(device
),
453 device
->meta_state
.blit
.pipeline_3d_src
,
454 &device
->meta_state
.alloc
);
455 anv_DestroyPipelineLayout(anv_device_to_handle(device
),
456 device
->meta_state
.blit
.pipeline_layout
,
457 &device
->meta_state
.alloc
);
458 anv_DestroyDescriptorSetLayout(anv_device_to_handle(device
),
459 device
->meta_state
.blit
.ds_layout
,
460 &device
->meta_state
.alloc
);
464 anv_device_init_meta_blit_state(struct anv_device
*device
)
468 result
= anv_CreateRenderPass(anv_device_to_handle(device
),
469 &(VkRenderPassCreateInfo
) {
470 .sType
= VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
,
471 .attachmentCount
= 1,
472 .pAttachments
= &(VkAttachmentDescription
) {
473 .format
= VK_FORMAT_UNDEFINED
, /* Our shaders don't care */
474 .loadOp
= VK_ATTACHMENT_LOAD_OP_LOAD
,
475 .storeOp
= VK_ATTACHMENT_STORE_OP_STORE
,
476 .initialLayout
= VK_IMAGE_LAYOUT_GENERAL
,
477 .finalLayout
= VK_IMAGE_LAYOUT_GENERAL
,
480 .pSubpasses
= &(VkSubpassDescription
) {
481 .pipelineBindPoint
= VK_PIPELINE_BIND_POINT_GRAPHICS
,
482 .inputAttachmentCount
= 0,
483 .colorAttachmentCount
= 1,
484 .pColorAttachments
= &(VkAttachmentReference
) {
486 .layout
= VK_IMAGE_LAYOUT_GENERAL
,
488 .pResolveAttachments
= NULL
,
489 .pDepthStencilAttachment
= &(VkAttachmentReference
) {
490 .attachment
= VK_ATTACHMENT_UNUSED
,
491 .layout
= VK_IMAGE_LAYOUT_GENERAL
,
493 .preserveAttachmentCount
= 1,
494 .pPreserveAttachments
= (uint32_t[]) { 0 },
496 .dependencyCount
= 0,
497 }, &device
->meta_state
.alloc
, &device
->meta_state
.blit
.render_pass
);
498 if (result
!= VK_SUCCESS
)
501 /* We don't use a vertex shader for blitting, but instead build and pass
502 * the VUEs directly to the rasterization backend. However, we do need
503 * to provide GLSL source for the vertex shader so that the compiler
504 * does not dead-code our inputs.
506 struct anv_shader_module vs
= {
507 .nir
= build_nir_vertex_shader(),
510 struct anv_shader_module fs_1d
= {
511 .nir
= build_nir_copy_fragment_shader(GLSL_SAMPLER_DIM_1D
),
514 struct anv_shader_module fs_2d
= {
515 .nir
= build_nir_copy_fragment_shader(GLSL_SAMPLER_DIM_2D
),
518 struct anv_shader_module fs_3d
= {
519 .nir
= build_nir_copy_fragment_shader(GLSL_SAMPLER_DIM_3D
),
522 VkPipelineVertexInputStateCreateInfo vi_create_info
= {
523 .sType
= VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO
,
524 .vertexBindingDescriptionCount
= 2,
525 .pVertexBindingDescriptions
= (VkVertexInputBindingDescription
[]) {
529 .inputRate
= VK_VERTEX_INPUT_RATE_INSTANCE
533 .stride
= 5 * sizeof(float),
534 .inputRate
= VK_VERTEX_INPUT_RATE_VERTEX
537 .vertexAttributeDescriptionCount
= 3,
538 .pVertexAttributeDescriptions
= (VkVertexInputAttributeDescription
[]) {
543 .format
= VK_FORMAT_R32G32B32A32_UINT
,
550 .format
= VK_FORMAT_R32G32_SFLOAT
,
554 /* Texture Coordinate */
557 .format
= VK_FORMAT_R32G32B32_SFLOAT
,
563 VkDescriptorSetLayoutCreateInfo ds_layout_info
= {
564 .sType
= VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
,
566 .pBindings
= (VkDescriptorSetLayoutBinding
[]) {
569 .descriptorType
= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
,
570 .descriptorCount
= 1,
571 .stageFlags
= VK_SHADER_STAGE_FRAGMENT_BIT
,
572 .pImmutableSamplers
= NULL
576 result
= anv_CreateDescriptorSetLayout(anv_device_to_handle(device
),
578 &device
->meta_state
.alloc
,
579 &device
->meta_state
.blit
.ds_layout
);
580 if (result
!= VK_SUCCESS
)
581 goto fail_render_pass
;
583 result
= anv_CreatePipelineLayout(anv_device_to_handle(device
),
584 &(VkPipelineLayoutCreateInfo
) {
585 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
587 .pSetLayouts
= &device
->meta_state
.blit
.ds_layout
,
589 &device
->meta_state
.alloc
, &device
->meta_state
.blit
.pipeline_layout
);
590 if (result
!= VK_SUCCESS
)
591 goto fail_descriptor_set_layout
;
593 VkPipelineShaderStageCreateInfo pipeline_shader_stages
[] = {
595 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
596 .stage
= VK_SHADER_STAGE_VERTEX_BIT
,
597 .module
= anv_shader_module_to_handle(&vs
),
599 .pSpecializationInfo
= NULL
601 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
602 .stage
= VK_SHADER_STAGE_FRAGMENT_BIT
,
603 .module
= VK_NULL_HANDLE
, /* TEMPLATE VALUE! FILL ME IN! */
605 .pSpecializationInfo
= NULL
609 const VkGraphicsPipelineCreateInfo vk_pipeline_info
= {
610 .sType
= VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO
,
611 .stageCount
= ARRAY_SIZE(pipeline_shader_stages
),
612 .pStages
= pipeline_shader_stages
,
613 .pVertexInputState
= &vi_create_info
,
614 .pInputAssemblyState
= &(VkPipelineInputAssemblyStateCreateInfo
) {
615 .sType
= VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO
,
616 .topology
= VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
,
617 .primitiveRestartEnable
= false,
619 .pViewportState
= &(VkPipelineViewportStateCreateInfo
) {
620 .sType
= VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO
,
624 .pRasterizationState
= &(VkPipelineRasterizationStateCreateInfo
) {
625 .sType
= VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO
,
626 .rasterizerDiscardEnable
= false,
627 .polygonMode
= VK_POLYGON_MODE_FILL
,
628 .cullMode
= VK_CULL_MODE_NONE
,
629 .frontFace
= VK_FRONT_FACE_COUNTER_CLOCKWISE
631 .pMultisampleState
= &(VkPipelineMultisampleStateCreateInfo
) {
632 .sType
= VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO
,
633 .rasterizationSamples
= 1,
634 .sampleShadingEnable
= false,
635 .pSampleMask
= (VkSampleMask
[]) { UINT32_MAX
},
637 .pColorBlendState
= &(VkPipelineColorBlendStateCreateInfo
) {
638 .sType
= VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO
,
639 .attachmentCount
= 1,
640 .pAttachments
= (VkPipelineColorBlendAttachmentState
[]) {
642 VK_COLOR_COMPONENT_A_BIT
|
643 VK_COLOR_COMPONENT_R_BIT
|
644 VK_COLOR_COMPONENT_G_BIT
|
645 VK_COLOR_COMPONENT_B_BIT
},
648 .pDynamicState
= &(VkPipelineDynamicStateCreateInfo
) {
649 .sType
= VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO
,
650 .dynamicStateCount
= 9,
651 .pDynamicStates
= (VkDynamicState
[]) {
652 VK_DYNAMIC_STATE_VIEWPORT
,
653 VK_DYNAMIC_STATE_SCISSOR
,
654 VK_DYNAMIC_STATE_LINE_WIDTH
,
655 VK_DYNAMIC_STATE_DEPTH_BIAS
,
656 VK_DYNAMIC_STATE_BLEND_CONSTANTS
,
657 VK_DYNAMIC_STATE_DEPTH_BOUNDS
,
658 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK
,
659 VK_DYNAMIC_STATE_STENCIL_WRITE_MASK
,
660 VK_DYNAMIC_STATE_STENCIL_REFERENCE
,
664 .layout
= device
->meta_state
.blit
.pipeline_layout
,
665 .renderPass
= device
->meta_state
.blit
.render_pass
,
669 const struct anv_graphics_pipeline_create_info anv_pipeline_info
= {
670 .color_attachment_count
= -1,
671 .use_repclear
= false,
676 pipeline_shader_stages
[1].module
= anv_shader_module_to_handle(&fs_1d
);
677 result
= anv_graphics_pipeline_create(anv_device_to_handle(device
),
679 &vk_pipeline_info
, &anv_pipeline_info
,
680 &device
->meta_state
.alloc
, &device
->meta_state
.blit
.pipeline_1d_src
);
681 if (result
!= VK_SUCCESS
)
682 goto fail_pipeline_layout
;
684 pipeline_shader_stages
[1].module
= anv_shader_module_to_handle(&fs_2d
);
685 result
= anv_graphics_pipeline_create(anv_device_to_handle(device
),
687 &vk_pipeline_info
, &anv_pipeline_info
,
688 &device
->meta_state
.alloc
, &device
->meta_state
.blit
.pipeline_2d_src
);
689 if (result
!= VK_SUCCESS
)
690 goto fail_pipeline_1d
;
692 pipeline_shader_stages
[1].module
= anv_shader_module_to_handle(&fs_3d
);
693 result
= anv_graphics_pipeline_create(anv_device_to_handle(device
),
695 &vk_pipeline_info
, &anv_pipeline_info
,
696 &device
->meta_state
.alloc
, &device
->meta_state
.blit
.pipeline_3d_src
);
697 if (result
!= VK_SUCCESS
)
698 goto fail_pipeline_2d
;
701 ralloc_free(fs_1d
.nir
);
702 ralloc_free(fs_2d
.nir
);
703 ralloc_free(fs_3d
.nir
);
708 anv_DestroyPipeline(anv_device_to_handle(device
),
709 device
->meta_state
.blit
.pipeline_2d_src
,
710 &device
->meta_state
.alloc
);
713 anv_DestroyPipeline(anv_device_to_handle(device
),
714 device
->meta_state
.blit
.pipeline_1d_src
,
715 &device
->meta_state
.alloc
);
717 fail_pipeline_layout
:
718 anv_DestroyPipelineLayout(anv_device_to_handle(device
),
719 device
->meta_state
.blit
.pipeline_layout
,
720 &device
->meta_state
.alloc
);
721 fail_descriptor_set_layout
:
722 anv_DestroyDescriptorSetLayout(anv_device_to_handle(device
),
723 device
->meta_state
.blit
.ds_layout
,
724 &device
->meta_state
.alloc
);
726 anv_DestroyRenderPass(anv_device_to_handle(device
),
727 device
->meta_state
.blit
.render_pass
,
728 &device
->meta_state
.alloc
);
731 ralloc_free(fs_1d
.nir
);
732 ralloc_free(fs_2d
.nir
);
733 ralloc_free(fs_3d
.nir
);