vulkan: Combine wsi and util makefiles
[mesa.git] / src / amd / vulkan / radv_cmd_buffer.c
index 75149c5ba1a522c45f9d59974738356ab30d4925..4aa5df69674b6a623dd681583c26e0873d9586ca 100644 (file)
@@ -40,7 +40,7 @@ static void radv_handle_image_transition(struct radv_cmd_buffer *cmd_buffer,
                                         VkImageLayout dst_layout,
                                         uint32_t src_family,
                                         uint32_t dst_family,
-                                        VkImageSubresourceRange range,
+                                        const VkImageSubresourceRange *range,
                                         VkImageAspectFlags pending_clears);
 
 const struct radv_dynamic_state default_dynamic_state = {
@@ -1278,7 +1278,6 @@ radv_cmd_buffer_flush_state(struct radv_cmd_buffer *cmd_buffer, bool instanced_o
        MAYBE_UNUSED unsigned cdw_max = radeon_check_space(cmd_buffer->device->ws,
                                                           cmd_buffer->cs, 4096);
 
-       cmd_buffer->no_draws = false;
        if ((cmd_buffer->state.vertex_descriptors_dirty || cmd_buffer->state.vb_dirty) &&
            cmd_buffer->state.pipeline->num_vertex_attribs) {
                unsigned vb_offset;
@@ -1404,11 +1403,67 @@ static void radv_stage_flush(struct radv_cmd_buffer *cmd_buffer,
        }
 }
 
+static void radv_src_access_flush(struct radv_cmd_buffer *cmd_buffer,
+                                 VkAccessFlags src_flags)
+{
+       enum radv_cmd_flush_bits flush_bits = 0;
+       uint32_t b;
+       for_each_bit(b, src_flags) {
+               switch ((VkAccessFlagBits)(1 << b)) {
+               case VK_ACCESS_SHADER_WRITE_BIT:
+                       flush_bits |= RADV_CMD_FLAG_INV_GLOBAL_L2;
+                       break;
+               case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:
+                       flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
+                       break;
+               case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
+                       flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_DB;
+                       break;
+               case VK_ACCESS_TRANSFER_WRITE_BIT:
+                       flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
+                       break;
+               default:
+                       break;
+               }
+       }
+       cmd_buffer->state.flush_bits |= flush_bits;
+}
+
+static void radv_dst_access_flush(struct radv_cmd_buffer *cmd_buffer,
+                                 VkAccessFlags dst_flags)
+{
+       enum radv_cmd_flush_bits flush_bits = 0;
+       uint32_t b;
+       for_each_bit(b, dst_flags) {
+               switch ((VkAccessFlagBits)(1 << b)) {
+               case VK_ACCESS_INDIRECT_COMMAND_READ_BIT:
+               case VK_ACCESS_INDEX_READ_BIT:
+               case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:
+                       flush_bits |= RADV_CMD_FLAG_INV_VMEM_L1;
+                       break;
+               case VK_ACCESS_UNIFORM_READ_BIT:
+                       flush_bits |= RADV_CMD_FLAG_INV_VMEM_L1 | RADV_CMD_FLAG_INV_SMEM_L1;
+                       break;
+               case VK_ACCESS_SHADER_READ_BIT:
+                       flush_bits |= RADV_CMD_FLAG_INV_GLOBAL_L2;
+                       break;
+               case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:
+               case VK_ACCESS_TRANSFER_READ_BIT:
+               case VK_ACCESS_TRANSFER_WRITE_BIT:
+               case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:
+                       flush_bits |= RADV_CMD_FLUSH_AND_INV_FRAMEBUFFER | RADV_CMD_FLAG_INV_GLOBAL_L2;
+               default:
+                       break;
+               }
+       }
+       cmd_buffer->state.flush_bits |= flush_bits;
+}
+
 static void radv_subpass_barrier(struct radv_cmd_buffer *cmd_buffer, const struct radv_subpass_barrier *barrier)
 {
+       radv_src_access_flush(cmd_buffer, barrier->src_access_mask);
        radv_stage_flush(cmd_buffer, barrier->src_stage_mask);
-
-       /* TODO: actual cache flushes */
+       radv_dst_access_flush(cmd_buffer, barrier->dst_access_mask);
 }
 
 static void radv_handle_subpass_image_transition(struct radv_cmd_buffer *cmd_buffer,
@@ -1426,7 +1481,7 @@ static void radv_handle_subpass_image_transition(struct radv_cmd_buffer *cmd_buf
        radv_handle_image_transition(cmd_buffer,
                                     view->image,
                                     cmd_buffer->state.attachments[idx].current_layout,
-                                    att.layout, 0, 0, range,
+                                    att.layout, 0, 0, &range,
                                     cmd_buffer->state.attachments[idx].pending_clear_aspects);
 
        cmd_buffer->state.attachments[idx].current_layout = att.layout;
@@ -1600,7 +1655,6 @@ static void  radv_reset_cmd_buffer(struct radv_cmd_buffer *cmd_buffer)
        cmd_buffer->record_fail = false;
 
        cmd_buffer->ring_offsets_idx = -1;
-       cmd_buffer->no_draws = true;
 }
 
 VkResult radv_ResetCommandBuffer(
@@ -1639,26 +1693,11 @@ VkResult radv_BeginCommandBuffer(
        if (cmd_buffer->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
                switch (cmd_buffer->queue_family_index) {
                case RADV_QUEUE_GENERAL:
-                       /* Flush read caches at the beginning of CS not flushed by the kernel. */
-                       cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_INV_ICACHE |
-                               RADV_CMD_FLAG_PS_PARTIAL_FLUSH |
-                               RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
-                               RADV_CMD_FLAG_INV_VMEM_L1 |
-                               RADV_CMD_FLAG_INV_SMEM_L1 |
-                               RADV_CMD_FLUSH_AND_INV_FRAMEBUFFER |
-                               RADV_CMD_FLAG_INV_GLOBAL_L2;
                        emit_gfx_buffer_state(cmd_buffer);
                        radv_set_db_count_control(cmd_buffer);
-                       si_emit_cache_flush(cmd_buffer);
                        break;
                case RADV_QUEUE_COMPUTE:
-                       cmd_buffer->state.flush_bits = RADV_CMD_FLAG_INV_ICACHE |
-                               RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
-                               RADV_CMD_FLAG_INV_VMEM_L1 |
-                               RADV_CMD_FLAG_INV_SMEM_L1 |
-                               RADV_CMD_FLAG_INV_GLOBAL_L2;
                        si_init_compute(cmd_buffer);
-                       si_emit_cache_flush(cmd_buffer);
                        break;
                case RADV_QUEUE_TRANSFER:
                default:
@@ -1900,7 +1939,7 @@ void radv_CmdBindPipeline(
                        if (cmd_buffer->ring_offsets_idx == -1)
                                cmd_buffer->ring_offsets_idx = loc->sgpr_idx;
                        else if (loc->sgpr_idx != -1)
-                               assert(loc->sgpr_idx != cmd_buffer->ring_offsets_idx);
+                               assert(loc->sgpr_idx == cmd_buffer->ring_offsets_idx);
                }
                break;
        default:
@@ -2447,7 +2486,6 @@ void radv_CmdDrawIndexedIndirectCountAMD(
 static void
 radv_flush_compute_state(struct radv_cmd_buffer *cmd_buffer)
 {
-       cmd_buffer->no_draws = false;
        radv_emit_compute_pipeline(cmd_buffer);
        radv_flush_descriptors(cmd_buffer, cmd_buffer->state.compute_pipeline,
                               VK_SHADER_STAGE_COMPUTE_BIT);
@@ -2641,7 +2679,7 @@ static void radv_handle_depth_image_transition(struct radv_cmd_buffer *cmd_buffe
                                               struct radv_image *image,
                                               VkImageLayout src_layout,
                                               VkImageLayout dst_layout,
-                                              VkImageSubresourceRange range,
+                                              const VkImageSubresourceRange *range,
                                               VkImageAspectFlags pending_clears)
 {
        if (dst_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL &&
@@ -2662,12 +2700,12 @@ static void radv_handle_depth_image_transition(struct radv_cmd_buffer *cmd_buffe
                    !radv_layout_has_htile(image, dst_layout)) ||
                   (radv_layout_is_htile_compressed(image, src_layout) &&
                    !radv_layout_is_htile_compressed(image, dst_layout))) {
+               VkImageSubresourceRange local_range = *range;
+               local_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+               local_range.baseMipLevel = 0;
+               local_range.levelCount = 1;
 
-               range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
-               range.baseMipLevel = 0;
-               range.levelCount = 1;
-
-               radv_decompress_depth_image_inplace(cmd_buffer, image, &range);
+               radv_decompress_depth_image_inplace(cmd_buffer, image, &local_range);
        }
 }
 
@@ -2692,7 +2730,7 @@ static void radv_handle_cmask_image_transition(struct radv_cmd_buffer *cmd_buffe
                                               VkImageLayout dst_layout,
                                               unsigned src_queue_mask,
                                               unsigned dst_queue_mask,
-                                              VkImageSubresourceRange range,
+                                              const VkImageSubresourceRange *range,
                                               VkImageAspectFlags pending_clears)
 {
        if (src_layout == VK_IMAGE_LAYOUT_UNDEFINED) {
@@ -2702,7 +2740,7 @@ static void radv_handle_cmask_image_transition(struct radv_cmd_buffer *cmd_buffe
                        radv_initialise_cmask(cmd_buffer, image, 0xffffffffu);
        } else if (radv_layout_can_fast_clear(image, src_layout, src_queue_mask) &&
                   !radv_layout_can_fast_clear(image, dst_layout, dst_queue_mask)) {
-               radv_fast_clear_flush_image_inplace(cmd_buffer, image);
+               radv_fast_clear_flush_image_inplace(cmd_buffer, image, range);
        }
 }
 
@@ -2729,14 +2767,14 @@ static void radv_handle_dcc_image_transition(struct radv_cmd_buffer *cmd_buffer,
                                             VkImageLayout dst_layout,
                                             unsigned src_queue_mask,
                                             unsigned dst_queue_mask,
-                                            VkImageSubresourceRange range,
+                                            const VkImageSubresourceRange *range,
                                             VkImageAspectFlags pending_clears)
 {
        if (src_layout == VK_IMAGE_LAYOUT_UNDEFINED) {
                radv_initialize_dcc(cmd_buffer, image, 0x20202020u);
        } else if (radv_layout_can_fast_clear(image, src_layout, src_queue_mask) &&
                   !radv_layout_can_fast_clear(image, dst_layout, dst_queue_mask)) {
-               radv_fast_clear_flush_image_inplace(cmd_buffer, image);
+               radv_fast_clear_flush_image_inplace(cmd_buffer, image, range);
        }
 }
 
@@ -2746,7 +2784,7 @@ static void radv_handle_image_transition(struct radv_cmd_buffer *cmd_buffer,
                                         VkImageLayout dst_layout,
                                         uint32_t src_family,
                                         uint32_t dst_family,
-                                        VkImageSubresourceRange range,
+                                        const VkImageSubresourceRange *range,
                                         VkImageAspectFlags pending_clears)
 {
        if (image->exclusive && src_family != dst_family) {
@@ -2801,7 +2839,7 @@ void radv_CmdPipelineBarrier(
        RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
        VkAccessFlags src_flags = 0;
        VkAccessFlags dst_flags = 0;
-       uint32_t b;
+
        for (uint32_t i = 0; i < memoryBarrierCount; i++) {
                src_flags |= pMemoryBarriers[i].srcAccessMask;
                dst_flags |= pMemoryBarriers[i].dstAccessMask;
@@ -2817,26 +2855,7 @@ void radv_CmdPipelineBarrier(
                dst_flags |= pImageMemoryBarriers[i].dstAccessMask;
        }
 
-       enum radv_cmd_flush_bits flush_bits = 0;
-       for_each_bit(b, src_flags) {
-               switch ((VkAccessFlagBits)(1 << b)) {
-               case VK_ACCESS_SHADER_WRITE_BIT:
-                       flush_bits |= RADV_CMD_FLAG_INV_GLOBAL_L2;
-                       break;
-               case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:
-                       flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
-                       break;
-               case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:
-                       flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_DB;
-                       break;
-               case VK_ACCESS_TRANSFER_WRITE_BIT:
-                       flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
-                       break;
-               default:
-                       break;
-               }
-       }
-       cmd_buffer->state.flush_bits |= flush_bits;
+       radv_src_access_flush(cmd_buffer, src_flags);
 
        for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
                RADV_FROM_HANDLE(radv_image, image, pImageMemoryBarriers[i].image);
@@ -2845,36 +2864,14 @@ void radv_CmdPipelineBarrier(
                                             pImageMemoryBarriers[i].newLayout,
                                             pImageMemoryBarriers[i].srcQueueFamilyIndex,
                                             pImageMemoryBarriers[i].dstQueueFamilyIndex,
-                                            pImageMemoryBarriers[i].subresourceRange,
+                                            &pImageMemoryBarriers[i].subresourceRange,
                                             0);
        }
 
-       flush_bits = 0;
-
-       for_each_bit(b, dst_flags) {
-               switch ((VkAccessFlagBits)(1 << b)) {
-               case VK_ACCESS_INDIRECT_COMMAND_READ_BIT:
-               case VK_ACCESS_INDEX_READ_BIT:
-               case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:
-                       flush_bits |= RADV_CMD_FLAG_INV_VMEM_L1;
-                       break;
-               case VK_ACCESS_UNIFORM_READ_BIT:
-                       flush_bits |= RADV_CMD_FLAG_INV_VMEM_L1 | RADV_CMD_FLAG_INV_SMEM_L1;
-                       break;
-               case VK_ACCESS_SHADER_READ_BIT:
-                       flush_bits |= RADV_CMD_FLAG_INV_GLOBAL_L2;
-                       break;
-               case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:
-               case VK_ACCESS_TRANSFER_READ_BIT:
-               case VK_ACCESS_TRANSFER_WRITE_BIT:
-               case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:
-                       flush_bits |= RADV_CMD_FLUSH_AND_INV_FRAMEBUFFER | RADV_CMD_FLAG_INV_GLOBAL_L2;
-               default:
-                       break;
-               }
-       }
+       radv_dst_access_flush(cmd_buffer, dst_flags);
 
-       flush_bits |= RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
+       /* TODO reduce this */
+       enum radv_cmd_flush_bits flush_bits = RADV_CMD_FLAG_CS_PARTIAL_FLUSH |
                RADV_CMD_FLAG_PS_PARTIAL_FLUSH;
 
        cmd_buffer->state.flush_bits |= flush_bits;
@@ -2890,7 +2887,6 @@ static void write_event(struct radv_cmd_buffer *cmd_buffer,
        uint64_t va = cmd_buffer->device->ws->buffer_get_va(event->bo);
 
        cmd_buffer->device->ws->cs_add_buffer(cs, event->bo, 8);
-       cmd_buffer->no_draws = false;
 
        MAYBE_UNUSED unsigned cdw_max = radeon_check_space(cmd_buffer->device->ws, cs, 12);
 
@@ -2981,7 +2977,7 @@ void radv_CmdWaitEvents(VkCommandBuffer commandBuffer,
                                             pImageMemoryBarriers[i].newLayout,
                                             pImageMemoryBarriers[i].srcQueueFamilyIndex,
                                             pImageMemoryBarriers[i].dstQueueFamilyIndex,
-                                            pImageMemoryBarriers[i].subresourceRange,
+                                            &pImageMemoryBarriers[i].subresourceRange,
                                             0);
        }