static void
radv_meta_save_novertex(struct radv_meta_saved_state *state,
- const struct radv_cmd_buffer *cmd_buffer)
+ const struct radv_cmd_buffer *cmd_buffer,
+ uint32_t flags)
{
- state->old_pipeline = cmd_buffer->state.pipeline;
- state->old_descriptor_set0 = cmd_buffer->state.descriptors[0];
-
- /* Save all viewports. */
- state->viewport.count = cmd_buffer->state.dynamic.viewport.count;
- typed_memcpy(state->viewport.viewports,
- cmd_buffer->state.dynamic.viewport.viewports,
- MAX_VIEWPORTS);
+ state->flags = flags;
+
+ if (state->flags & RADV_META_SAVE_GRAPHICS_PIPELINE) {
+ state->old_pipeline = cmd_buffer->state.pipeline;
+
+ /* Save all viewports. */
+ state->viewport.count = cmd_buffer->state.dynamic.viewport.count;
+ typed_memcpy(state->viewport.viewports,
+ cmd_buffer->state.dynamic.viewport.viewports,
+ MAX_VIEWPORTS);
+
+ /* Save all scissors. */
+ state->scissor.count = cmd_buffer->state.dynamic.scissor.count;
+ typed_memcpy(state->scissor.scissors,
+ cmd_buffer->state.dynamic.scissor.scissors,
+ MAX_SCISSORS);
+ }
- /* Save all scissors. */
- state->scissor.count = cmd_buffer->state.dynamic.scissor.count;
- typed_memcpy(state->scissor.scissors,
- cmd_buffer->state.dynamic.scissor.scissors,
- MAX_SCISSORS);
+ if (state->flags & RADV_META_SAVE_DESCRIPTORS) {
+ state->old_descriptor_set0 = cmd_buffer->state.descriptors[0];
+ }
- memcpy(state->push_constants, cmd_buffer->push_constants, MAX_PUSH_CONSTANTS_SIZE);
+ if (state->flags & RADV_META_SAVE_CONSTANTS) {
+ memcpy(state->push_constants, cmd_buffer->push_constants,
+ MAX_PUSH_CONSTANTS_SIZE);
+ }
}
void
radv_meta_restore(const struct radv_meta_saved_state *state,
struct radv_cmd_buffer *cmd_buffer)
{
- radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer), VK_PIPELINE_BIND_POINT_GRAPHICS,
- radv_pipeline_to_handle(state->old_pipeline));
- cmd_buffer->state.descriptors[0] = state->old_descriptor_set0;
-
- cmd_buffer->state.dirty |= RADV_CMD_DIRTY_PIPELINE;
-
- /* Restore all viewports. */
- cmd_buffer->state.dynamic.viewport.count = state->viewport.count;
- typed_memcpy(cmd_buffer->state.dynamic.viewport.viewports,
- state->viewport.viewports,
- MAX_VIEWPORTS);
-
- /* Restore all scissors. */
- cmd_buffer->state.dynamic.scissor.count = state->scissor.count;
- typed_memcpy(cmd_buffer->state.dynamic.scissor.scissors,
- state->scissor.scissors,
- MAX_SCISSORS);
+ if (state->flags & RADV_META_SAVE_GRAPHICS_PIPELINE) {
+ radv_CmdBindPipeline(radv_cmd_buffer_to_handle(cmd_buffer),
+ VK_PIPELINE_BIND_POINT_GRAPHICS,
+ radv_pipeline_to_handle(state->old_pipeline));
+
+ cmd_buffer->state.dirty |= RADV_CMD_DIRTY_PIPELINE;
+
+ /* Restore all viewports. */
+ cmd_buffer->state.dynamic.viewport.count = state->viewport.count;
+ typed_memcpy(cmd_buffer->state.dynamic.viewport.viewports,
+ state->viewport.viewports,
+ MAX_VIEWPORTS);
+
+ /* Restore all scissors. */
+ cmd_buffer->state.dynamic.scissor.count = state->scissor.count;
+ typed_memcpy(cmd_buffer->state.dynamic.scissor.scissors,
+ state->scissor.scissors,
+ MAX_SCISSORS);
+
+ cmd_buffer->state.dirty |= 1 << VK_DYNAMIC_STATE_VIEWPORT |
+ 1 << VK_DYNAMIC_STATE_SCISSOR;
+ }
- cmd_buffer->state.dirty |= 1 << VK_DYNAMIC_STATE_VIEWPORT |
- 1 << VK_DYNAMIC_STATE_SCISSOR;
+ if (state->flags & RADV_META_SAVE_DESCRIPTORS) {
+ cmd_buffer->state.descriptors[0] = state->old_descriptor_set0;
+ cmd_buffer->state.descriptors_dirty |= (1 << 0);
+ }
- memcpy(cmd_buffer->push_constants, state->push_constants, MAX_PUSH_CONSTANTS_SIZE);
- cmd_buffer->push_constant_stages |= VK_SHADER_STAGE_ALL_GRAPHICS | VK_SHADER_STAGE_COMPUTE_BIT;
+ if (state->flags & RADV_META_SAVE_CONSTANTS) {
+ memcpy(cmd_buffer->push_constants, state->push_constants,
+ MAX_PUSH_CONSTANTS_SIZE);
+ cmd_buffer->push_constant_stages |= VK_SHADER_STAGE_ALL_GRAPHICS |
+ VK_SHADER_STAGE_COMPUTE_BIT;
+ }
}
void
*/
void
radv_meta_save_graphics_reset_vport_scissor_novertex(struct radv_meta_saved_state *saved_state,
- struct radv_cmd_buffer *cmd_buffer)
+ struct radv_cmd_buffer *cmd_buffer,
+ uint32_t flags)
{
- radv_meta_save_novertex(saved_state, cmd_buffer);
+ radv_meta_save_novertex(saved_state, cmd_buffer, flags);
cmd_buffer->state.dynamic.viewport.count = 0;
cmd_buffer->state.dynamic.scissor.count = 0;
cmd_buffer->state.dirty |= 1 << VK_DYNAMIC_STATE_VIEWPORT |
extern "C" {
#endif
+enum radv_meta_save_flags {
+ RADV_META_SAVE_PASS = (1 << 0),
+ RADV_META_SAVE_CONSTANTS = (1 << 1),
+ RADV_META_SAVE_DESCRIPTORS = (1 << 2),
+ RADV_META_SAVE_GRAPHICS_PIPELINE = (1 << 3),
+};
+
struct radv_meta_saved_state {
+ uint32_t flags;
+
struct radv_descriptor_set *old_descriptor_set0;
struct radv_pipeline *old_pipeline;
struct radv_viewport_state viewport;
const VkImageSubresourceRange *subresourceRange);
void radv_meta_save_graphics_reset_vport_scissor_novertex(struct radv_meta_saved_state *saved_state,
- struct radv_cmd_buffer *cmd_buffer);
+ struct radv_cmd_buffer *cmd_buffer,
+ uint32_t flags);
void radv_meta_resolve_compute_image(struct radv_cmd_buffer *cmd_buffer,
struct radv_image *src_image,
assert(src_image->info.samples == 1);
assert(dest_image->info.samples == 1);
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
for (unsigned r = 0; r < regionCount; r++) {
const VkImageSubresourceLayers *src_res = &pRegions[r].srcSubresource;
if (!radv_subpass_needs_clear(cmd_buffer))
return;
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
uint32_t a = cmd_state->subpass->color_attachments[i].attachment;
if (cs)
radv_meta_save_compute(&saved_state.compute, cmd_buffer, 16);
else
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer, ~0);
radv_cmd_clear_image(cmd_buffer, image, imageLayout,
(const VkClearValue *) pColor,
RADV_FROM_HANDLE(radv_image, image, image_h);
struct radv_meta_saved_state saved_state;
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
radv_cmd_clear_image(cmd_buffer, image, imageLayout,
(const VkClearValue *) pDepthStencil,
if (!cmd_buffer->state.subpass)
return;
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
/* FINISHME: We can do better than this dumb loop. It thrashes too much
* state.
if (cs)
radv_meta_save_compute(&saved_state.compute, cmd_buffer, 12);
else
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer, ~0);
for (unsigned r = 0; r < regionCount; r++) {
if (cs)
radv_meta_save_compute(&saved_state.compute, cmd_buffer, 16);
else
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer, ~0);
for (unsigned r = 0; r < regionCount; r++) {
assert(pRegions[r].srcSubresource.aspectMask ==
return;
radv_meta_save_pass(&saved_pass_state, cmd_buffer);
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
switch (op) {
case DEPTH_DECOMPRESS:
assert(cmd_buffer->queue_family_index == RADV_QUEUE_GENERAL);
radv_meta_save_pass(&saved_pass_state, cmd_buffer);
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
if (image->fmask.size > 0) {
pipeline = cmd_buffer->device->meta_state.fast_clear_flush.fmask_decompress_pipeline;
return;
}
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
assert(src_image->info.samples > 1);
if (src_image->info.samples <= 1) {
return;
}
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
for (uint32_t i = 0; i < subpass->color_count; ++i) {
VkAttachmentReference src_att = subpass->color_attachments[i];
}
rp = device->meta_state.resolve_fragment.rc[samples_log2].render_pass[fs_key];
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
for (uint32_t r = 0; r < region_count; ++r) {
const VkImageResolve *region = ®ions[r];
if (!subpass->has_resolve)
return;
- radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer);
+ radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer, ~0);
for (uint32_t i = 0; i < subpass->color_count; ++i) {
VkAttachmentReference src_att = subpass->color_attachments[i];