VkImageLayout current_layout;
};
+struct radv_descriptor_state {
+ struct radv_descriptor_set *sets[MAX_SETS];
+ uint32_t dirty;
+ uint32_t valid;
+ struct radv_push_descriptor_set push_set;
+ bool push_dirty;
+};
+
struct radv_cmd_state {
/* Vertex descriptors */
bool vb_prefetch_dirty;
uint64_t vb_va;
unsigned vb_size;
- bool push_descriptors_dirty;
bool predicating;
uint32_t dirty;
enum radv_cmd_flush_bits flush_bits;
unsigned active_occlusion_queries;
float offset_scale;
- uint32_t descriptors_dirty;
- uint32_t valid_descriptors;
uint32_t trace_id;
uint32_t last_ia_multi_vgt_param;
uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE];
uint32_t dynamic_buffers[4 * MAX_DYNAMIC_BUFFERS];
VkShaderStageFlags push_constant_stages;
- struct radv_push_descriptor_set push_descriptors;
struct radv_descriptor_set meta_push_descriptors;
- struct radv_descriptor_set *descriptors[MAX_SETS];
+
+ struct radv_descriptor_state descriptors[VK_PIPELINE_BIND_POINT_RANGE_SIZE];
struct radv_cmd_buffer_upload upload;
struct radv_device_memory *memory,
int *pFD);
+static inline struct radv_descriptor_state *
+radv_get_descriptors_state(struct radv_cmd_buffer *cmd_buffer,
+ VkPipelineBindPoint bind_point)
+{
+ assert(bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS ||
+ bind_point == VK_PIPELINE_BIND_POINT_COMPUTE);
+ return &cmd_buffer->descriptors[bind_point];
+}
+
/*
* Takes x,y,z as exact numbers of invocations, instead of blocks.
*
void radv_free_sem_info(struct radv_winsys_sem_info *sem_info);
void radv_set_descriptor_set(struct radv_cmd_buffer *cmd_buffer,
+ VkPipelineBindPoint bind_point,
struct radv_descriptor_set *set,
unsigned idx);