2 * Copyright © 2016 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
27 #include "radv_meta.h"
28 #include "radv_private.h"
36 enum radv_depth_decompress
{
37 DECOMPRESS_DEPTH_STENCIL
,
43 create_pass(struct radv_device
*device
,
48 VkDevice device_h
= radv_device_to_handle(device
);
49 const VkAllocationCallbacks
*alloc
= &device
->meta_state
.alloc
;
50 VkAttachmentDescription attachment
;
53 attachment
.format
= VK_FORMAT_D32_SFLOAT_S8_UINT
;
54 attachment
.samples
= samples
;
55 attachment
.loadOp
= VK_ATTACHMENT_LOAD_OP_LOAD
;
56 attachment
.storeOp
= VK_ATTACHMENT_STORE_OP_STORE
;
57 attachment
.stencilLoadOp
= VK_ATTACHMENT_LOAD_OP_LOAD
;
58 attachment
.stencilStoreOp
= VK_ATTACHMENT_STORE_OP_STORE
;
59 attachment
.initialLayout
= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
;
60 attachment
.finalLayout
= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
;
62 result
= radv_CreateRenderPass(device_h
,
63 &(VkRenderPassCreateInfo
) {
64 .sType
= VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO
,
66 .pAttachments
= &attachment
,
68 .pSubpasses
= &(VkSubpassDescription
) {
69 .pipelineBindPoint
= VK_PIPELINE_BIND_POINT_GRAPHICS
,
70 .inputAttachmentCount
= 0,
71 .colorAttachmentCount
= 0,
72 .pColorAttachments
= NULL
,
73 .pResolveAttachments
= NULL
,
74 .pDepthStencilAttachment
= &(VkAttachmentReference
) {
76 .layout
= VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
,
78 .preserveAttachmentCount
= 0,
79 .pPreserveAttachments
= NULL
,
82 .pDependencies
= (VkSubpassDependency
[]) {
84 .srcSubpass
= VK_SUBPASS_EXTERNAL
,
86 .srcStageMask
= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
,
87 .dstStageMask
= VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
,
94 .dstSubpass
= VK_SUBPASS_EXTERNAL
,
95 .srcStageMask
= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
,
96 .dstStageMask
= VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
,
110 create_pipeline_layout(struct radv_device
*device
, VkPipelineLayout
*layout
)
112 VkPipelineLayoutCreateInfo pl_create_info
= {
113 .sType
= VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
,
116 .pushConstantRangeCount
= 0,
117 .pPushConstantRanges
= NULL
,
120 return radv_CreatePipelineLayout(radv_device_to_handle(device
),
122 &device
->meta_state
.alloc
,
127 create_pipeline(struct radv_device
*device
,
128 VkShaderModule vs_module_h
,
131 VkPipelineLayout layout
,
132 enum radv_depth_op op
,
133 enum radv_depth_decompress decompress
,
134 VkPipeline
*pipeline
)
137 VkDevice device_h
= radv_device_to_handle(device
);
138 struct radv_shader_module vs_module
= {0};
140 mtx_lock(&device
->meta_state
.mtx
);
142 mtx_unlock(&device
->meta_state
.mtx
);
147 vs_module
.nir
= radv_meta_build_nir_vs_generate_vertices();
148 vs_module_h
= radv_shader_module_to_handle(&vs_module
);
151 struct radv_shader_module fs_module
= {
152 .nir
= radv_meta_build_nir_fs_noop(),
155 if (!fs_module
.nir
) {
156 /* XXX: Need more accurate error */
157 result
= VK_ERROR_OUT_OF_HOST_MEMORY
;
161 const VkPipelineSampleLocationsStateCreateInfoEXT sample_locs_create_info
= {
162 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT
,
163 .sampleLocationsEnable
= false,
166 const VkGraphicsPipelineCreateInfo pipeline_create_info
= {
167 .sType
= VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO
,
169 .pStages
= (VkPipelineShaderStageCreateInfo
[]) {
171 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
172 .stage
= VK_SHADER_STAGE_VERTEX_BIT
,
173 .module
= vs_module_h
,
177 .sType
= VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO
,
178 .stage
= VK_SHADER_STAGE_FRAGMENT_BIT
,
179 .module
= radv_shader_module_to_handle(&fs_module
),
183 .pVertexInputState
= &(VkPipelineVertexInputStateCreateInfo
) {
184 .sType
= VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO
,
185 .vertexBindingDescriptionCount
= 0,
186 .vertexAttributeDescriptionCount
= 0,
188 .pInputAssemblyState
= &(VkPipelineInputAssemblyStateCreateInfo
) {
189 .sType
= VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO
,
190 .topology
= VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
,
191 .primitiveRestartEnable
= false,
193 .pViewportState
= &(VkPipelineViewportStateCreateInfo
) {
194 .sType
= VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO
,
198 .pRasterizationState
= &(VkPipelineRasterizationStateCreateInfo
) {
199 .sType
= VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO
,
200 .depthClampEnable
= false,
201 .rasterizerDiscardEnable
= false,
202 .polygonMode
= VK_POLYGON_MODE_FILL
,
203 .cullMode
= VK_CULL_MODE_NONE
,
204 .frontFace
= VK_FRONT_FACE_COUNTER_CLOCKWISE
,
206 .pMultisampleState
= &(VkPipelineMultisampleStateCreateInfo
) {
207 .sType
= VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO
,
208 .pNext
= &sample_locs_create_info
,
209 .rasterizationSamples
= samples
,
210 .sampleShadingEnable
= false,
212 .alphaToCoverageEnable
= false,
213 .alphaToOneEnable
= false,
215 .pColorBlendState
= &(VkPipelineColorBlendStateCreateInfo
) {
216 .sType
= VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO
,
217 .logicOpEnable
= false,
218 .attachmentCount
= 0,
219 .pAttachments
= NULL
,
221 .pDepthStencilState
= &(VkPipelineDepthStencilStateCreateInfo
) {
222 .sType
= VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO
,
223 .depthTestEnable
= false,
224 .depthWriteEnable
= false,
225 .depthBoundsTestEnable
= false,
226 .stencilTestEnable
= false,
228 .pDynamicState
= &(VkPipelineDynamicStateCreateInfo
) {
229 .sType
= VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO
,
230 .dynamicStateCount
= 3,
231 .pDynamicStates
= (VkDynamicState
[]) {
232 VK_DYNAMIC_STATE_VIEWPORT
,
233 VK_DYNAMIC_STATE_SCISSOR
,
234 VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT
,
242 struct radv_graphics_pipeline_create_info extra
= {
243 .use_rectlist
= true,
244 .db_flush_depth_inplace
= decompress
== DECOMPRESS_DEPTH_STENCIL
||
245 decompress
== DECOMPRESS_DEPTH
,
246 .db_flush_stencil_inplace
= decompress
== DECOMPRESS_DEPTH_STENCIL
||
247 decompress
== DECOMPRESS_STENCIL
,
248 .db_resummarize
= op
== DEPTH_RESUMMARIZE
,
251 result
= radv_graphics_pipeline_create(device_h
,
252 radv_pipeline_cache_to_handle(&device
->meta_state
.cache
),
253 &pipeline_create_info
, &extra
,
254 &device
->meta_state
.alloc
,
258 ralloc_free(fs_module
.nir
);
260 ralloc_free(vs_module
.nir
);
261 mtx_unlock(&device
->meta_state
.mtx
);
266 radv_device_finish_meta_depth_decomp_state(struct radv_device
*device
)
268 struct radv_meta_state
*state
= &device
->meta_state
;
270 for (uint32_t i
= 0; i
< ARRAY_SIZE(state
->depth_decomp
); ++i
) {
271 radv_DestroyRenderPass(radv_device_to_handle(device
),
272 state
->depth_decomp
[i
].pass
,
274 radv_DestroyPipelineLayout(radv_device_to_handle(device
),
275 state
->depth_decomp
[i
].p_layout
,
278 for (uint32_t j
= 0; j
< NUM_DEPTH_DECOMPRESS_PIPELINES
; j
++) {
279 radv_DestroyPipeline(radv_device_to_handle(device
),
280 state
->depth_decomp
[i
].decompress_pipeline
[j
],
283 radv_DestroyPipeline(radv_device_to_handle(device
),
284 state
->depth_decomp
[i
].resummarize_pipeline
,
290 radv_device_init_meta_depth_decomp_state(struct radv_device
*device
, bool on_demand
)
292 struct radv_meta_state
*state
= &device
->meta_state
;
293 VkResult res
= VK_SUCCESS
;
295 struct radv_shader_module vs_module
= { .nir
= radv_meta_build_nir_vs_generate_vertices() };
296 if (!vs_module
.nir
) {
297 /* XXX: Need more accurate error */
298 res
= VK_ERROR_OUT_OF_HOST_MEMORY
;
302 VkShaderModule vs_module_h
= radv_shader_module_to_handle(&vs_module
);
304 for (uint32_t i
= 0; i
< ARRAY_SIZE(state
->depth_decomp
); ++i
) {
305 uint32_t samples
= 1 << i
;
307 res
= create_pass(device
, samples
, &state
->depth_decomp
[i
].pass
);
308 if (res
!= VK_SUCCESS
)
311 res
= create_pipeline_layout(device
,
312 &state
->depth_decomp
[i
].p_layout
);
313 if (res
!= VK_SUCCESS
)
319 for (uint32_t j
= 0; j
< NUM_DEPTH_DECOMPRESS_PIPELINES
; j
++) {
320 res
= create_pipeline(device
, vs_module_h
, samples
,
321 state
->depth_decomp
[i
].pass
,
322 state
->depth_decomp
[i
].p_layout
,
325 &state
->depth_decomp
[i
].decompress_pipeline
[j
]);
326 if (res
!= VK_SUCCESS
)
330 res
= create_pipeline(device
, vs_module_h
, samples
,
331 state
->depth_decomp
[i
].pass
,
332 state
->depth_decomp
[i
].p_layout
,
335 &state
->depth_decomp
[i
].resummarize_pipeline
);
336 if (res
!= VK_SUCCESS
)
343 radv_device_finish_meta_depth_decomp_state(device
);
346 ralloc_free(vs_module
.nir
);
352 radv_get_depth_pipeline(struct radv_cmd_buffer
*cmd_buffer
,
353 struct radv_image
*image
,
354 const VkImageSubresourceRange
*subresourceRange
,
355 enum radv_depth_op op
)
357 struct radv_meta_state
*state
= &cmd_buffer
->device
->meta_state
;
358 uint32_t samples
= image
->info
.samples
;
359 uint32_t samples_log2
= ffs(samples
) - 1;
360 enum radv_depth_decompress decompress
;
361 VkPipeline
*pipeline
;
363 if (subresourceRange
->aspectMask
== VK_IMAGE_ASPECT_DEPTH_BIT
) {
364 decompress
= DECOMPRESS_DEPTH
;
365 } else if (subresourceRange
->aspectMask
== VK_IMAGE_ASPECT_STENCIL_BIT
) {
366 decompress
= DECOMPRESS_STENCIL
;
368 decompress
= DECOMPRESS_DEPTH_STENCIL
;
371 if (!state
->depth_decomp
[samples_log2
].decompress_pipeline
[decompress
]) {
374 for (uint32_t i
= 0; i
< NUM_DEPTH_DECOMPRESS_PIPELINES
; i
++) {
375 ret
= create_pipeline(cmd_buffer
->device
, VK_NULL_HANDLE
, samples
,
376 state
->depth_decomp
[samples_log2
].pass
,
377 state
->depth_decomp
[samples_log2
].p_layout
,
380 &state
->depth_decomp
[samples_log2
].decompress_pipeline
[i
]);
381 if (ret
!= VK_SUCCESS
) {
382 cmd_buffer
->record_result
= ret
;
387 ret
= create_pipeline(cmd_buffer
->device
, VK_NULL_HANDLE
, samples
,
388 state
->depth_decomp
[samples_log2
].pass
,
389 state
->depth_decomp
[samples_log2
].p_layout
,
392 &state
->depth_decomp
[samples_log2
].resummarize_pipeline
);
393 if (ret
!= VK_SUCCESS
) {
394 cmd_buffer
->record_result
= ret
;
400 case DEPTH_DECOMPRESS
:
401 pipeline
= &state
->depth_decomp
[samples_log2
].decompress_pipeline
[decompress
];
403 case DEPTH_RESUMMARIZE
:
404 pipeline
= &state
->depth_decomp
[samples_log2
].resummarize_pipeline
;
407 unreachable("unknown operation");
414 radv_process_depth_image_layer(struct radv_cmd_buffer
*cmd_buffer
,
415 struct radv_image
*image
,
416 const VkImageSubresourceRange
*range
,
417 int level
, int layer
)
419 struct radv_device
*device
= cmd_buffer
->device
;
420 struct radv_meta_state
*state
= &device
->meta_state
;
421 uint32_t samples_log2
= ffs(image
->info
.samples
) - 1;
422 struct radv_image_view iview
;
423 uint32_t width
, height
;
425 width
= radv_minify(image
->info
.width
, range
->baseMipLevel
+ level
);
426 height
= radv_minify(image
->info
.height
, range
->baseMipLevel
+ level
);
428 radv_image_view_init(&iview
, device
,
429 &(VkImageViewCreateInfo
) {
430 .sType
= VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO
,
431 .image
= radv_image_to_handle(image
),
432 .viewType
= radv_meta_get_view_type(image
),
433 .format
= image
->vk_format
,
434 .subresourceRange
= {
435 .aspectMask
= VK_IMAGE_ASPECT_DEPTH_BIT
,
436 .baseMipLevel
= range
->baseMipLevel
+ level
,
438 .baseArrayLayer
= range
->baseArrayLayer
+ layer
,
445 radv_CreateFramebuffer(radv_device_to_handle(device
),
446 &(VkFramebufferCreateInfo
) {
447 .sType
= VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO
,
448 .attachmentCount
= 1,
449 .pAttachments
= (VkImageView
[]) {
450 radv_image_view_to_handle(&iview
)
455 }, &cmd_buffer
->pool
->alloc
, &fb_h
);
457 radv_cmd_buffer_begin_render_pass(cmd_buffer
,
458 &(VkRenderPassBeginInfo
) {
459 .sType
= VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
,
460 .renderPass
= state
->depth_decomp
[samples_log2
].pass
,
472 .clearValueCount
= 0,
473 .pClearValues
= NULL
,
475 radv_cmd_buffer_set_subpass(cmd_buffer
,
476 &cmd_buffer
->state
.pass
->subpasses
[0]);
478 radv_CmdDraw(radv_cmd_buffer_to_handle(cmd_buffer
), 3, 1, 0, 0);
479 radv_cmd_buffer_end_render_pass(cmd_buffer
);
481 radv_DestroyFramebuffer(radv_device_to_handle(device
), fb_h
,
482 &cmd_buffer
->pool
->alloc
);
485 static void radv_process_depth_image_inplace(struct radv_cmd_buffer
*cmd_buffer
,
486 struct radv_image
*image
,
487 const VkImageSubresourceRange
*subresourceRange
,
488 struct radv_sample_locations_state
*sample_locs
,
489 enum radv_depth_op op
)
491 struct radv_meta_saved_state saved_state
;
492 VkCommandBuffer cmd_buffer_h
= radv_cmd_buffer_to_handle(cmd_buffer
);
493 VkPipeline
*pipeline
;
495 if (!radv_image_has_htile(image
))
498 radv_meta_save(&saved_state
, cmd_buffer
,
499 RADV_META_SAVE_GRAPHICS_PIPELINE
|
500 RADV_META_SAVE_SAMPLE_LOCATIONS
|
501 RADV_META_SAVE_PASS
);
503 pipeline
= radv_get_depth_pipeline(cmd_buffer
, image
,
504 subresourceRange
, op
);
506 radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer
),
507 VK_PIPELINE_BIND_POINT_GRAPHICS
, *pipeline
);
510 assert(image
->flags
& VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT
);
512 /* Set the sample locations specified during explicit or
513 * automatic layout transitions, otherwise the depth decompress
514 * pass uses the default HW locations.
516 radv_CmdSetSampleLocationsEXT(cmd_buffer_h
, &(VkSampleLocationsInfoEXT
) {
517 .sampleLocationsPerPixel
= sample_locs
->per_pixel
,
518 .sampleLocationGridSize
= sample_locs
->grid_size
,
519 .sampleLocationsCount
= sample_locs
->count
,
520 .pSampleLocations
= sample_locs
->locations
,
524 for (uint32_t l
= 0; l
< radv_get_levelCount(image
, subresourceRange
); ++l
) {
526 radv_minify(image
->info
.width
,
527 subresourceRange
->baseMipLevel
+ l
);
529 radv_minify(image
->info
.height
,
530 subresourceRange
->baseMipLevel
+ l
);
532 radv_CmdSetViewport(cmd_buffer_h
, 0, 1,
542 radv_CmdSetScissor(cmd_buffer_h
, 0, 1,
545 .extent
= { width
, height
},
548 for (uint32_t s
= 0; s
< radv_get_layerCount(image
, subresourceRange
); s
++) {
549 radv_process_depth_image_layer(cmd_buffer
, image
,
550 subresourceRange
, l
, s
);
554 radv_meta_restore(&saved_state
, cmd_buffer
);
557 void radv_decompress_depth_image_inplace(struct radv_cmd_buffer
*cmd_buffer
,
558 struct radv_image
*image
,
559 const VkImageSubresourceRange
*subresourceRange
,
560 struct radv_sample_locations_state
*sample_locs
)
562 assert(cmd_buffer
->queue_family_index
== RADV_QUEUE_GENERAL
);
563 radv_process_depth_image_inplace(cmd_buffer
, image
, subresourceRange
,
564 sample_locs
, DEPTH_DECOMPRESS
);
567 void radv_resummarize_depth_image_inplace(struct radv_cmd_buffer
*cmd_buffer
,
568 struct radv_image
*image
,
569 const VkImageSubresourceRange
*subresourceRange
,
570 struct radv_sample_locations_state
*sample_locs
)
572 assert(cmd_buffer
->queue_family_index
== RADV_QUEUE_GENERAL
);
573 radv_process_depth_image_inplace(cmd_buffer
, image
, subresourceRange
,
574 sample_locs
, DEPTH_RESUMMARIZE
);