cb_color_info &= C_028C70_DCC_ENABLE;
}
+ if (!radv_layout_can_fast_clear(image, layout, in_render_loop,
+ radv_image_queue_family_mask(image,
+ cmd_buffer->queue_family_index,
+ cmd_buffer->queue_family_index))) {
+ cb_color_info &= C_028C70_COMPRESSION;
+ }
+
if (radv_image_is_tc_compat_cmask(image) &&
(radv_is_fmask_decompress_pipeline(cmd_buffer) ||
radv_is_dcc_decompress_pipeline(cmd_buffer))) {
cb_color_info &= C_028C70_FMASK_COMPRESS_1FRAG_ONLY;
}
+ if (radv_image_has_fmask(image) &&
+ (radv_is_fmask_decompress_pipeline(cmd_buffer) ||
+ radv_is_hw_resolve_pipeline(cmd_buffer))) {
+ /* Make sure FMASK is enabled if it has been cleared because:
+ *
+ * 1) it's required for FMASK_DECOMPRESS operations to avoid
+ * GPU hangs
+ * 2) it's necessary for CB_RESOLVE which can read compressed
+ * FMASK data anyways.
+ */
+ cb_color_info |= S_028C70_COMPRESSION(1);
+ }
+
if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX10) {
radeon_set_context_reg_seq(cmd_buffer->cs, R_028C60_CB_COLOR0_BASE + index * 0x3c, 11);
radeon_emit(cmd_buffer->cs, cb->cb_color_base);
meta_state->fast_clear_flush.dcc_decompress_pipeline;
}
+/**
+ * Return whether the bound pipeline is the hardware resolve path.
+ */
+static inline bool
+radv_is_hw_resolve_pipeline(struct radv_cmd_buffer *cmd_buffer)
+{
+ struct radv_meta_state *meta_state = &cmd_buffer->device->meta_state;
+ struct radv_pipeline *pipeline = cmd_buffer->state.pipeline;
+
+ for (uint32_t i = 0; i < NUM_META_FS_KEYS; ++i) {
+ VkFormat format = radv_fs_key_format_exemplars[i];
+ unsigned fs_key = radv_format_meta_fs_key(format);
+
+ if (radv_pipeline_to_handle(pipeline) == meta_state->resolve.pipeline[fs_key])
+ return true;
+ }
+ return false;
+}
+
/* common nir builder helpers */
#include "nir/nir_builder.h"