- switch (cmd_buffer->device->info.gen) {
- case 7:
- if (cmd_buffer->device->info.is_haswell)
- return gen75_cmd_buffer_emit_state_base_address(cmd_buffer);
- else
- return gen7_cmd_buffer_emit_state_base_address(cmd_buffer);
- case 8:
- return gen8_cmd_buffer_emit_state_base_address(cmd_buffer);
- case 9:
- return gen9_cmd_buffer_emit_state_base_address(cmd_buffer);
- case 10:
- return gen10_cmd_buffer_emit_state_base_address(cmd_buffer);
- default:
- unreachable("unsupported gen\n");
- }
+ anv_genX_call(&cmd_buffer->device->info,
+ cmd_buffer_emit_state_base_address,
+ cmd_buffer);
+}
+
+void
+anv_cmd_buffer_mark_image_written(struct anv_cmd_buffer *cmd_buffer,
+ const struct anv_image *image,
+ VkImageAspectFlagBits aspect,
+ enum isl_aux_usage aux_usage,
+ uint32_t level,
+ uint32_t base_layer,
+ uint32_t layer_count)
+{
+ anv_genX_call(&cmd_buffer->device->info,
+ cmd_buffer_mark_image_written,
+ cmd_buffer, image, aspect, aux_usage,
+ level, base_layer, layer_count);
+}
+
+void
+anv_cmd_emit_conditional_render_predicate(struct anv_cmd_buffer *cmd_buffer)
+{
+ anv_genX_call(&cmd_buffer->device->info,
+ cmd_emit_conditional_render_predicate,
+ cmd_buffer);
+}
+
+static bool
+mem_update(void *dst, const void *src, size_t size)
+{
+ if (memcmp(dst, src, size) == 0)
+ return false;
+
+ memcpy(dst, src, size);
+ return true;
+}
+
+static void
+set_dirty_for_bind_map(struct anv_cmd_buffer *cmd_buffer,
+ gl_shader_stage stage,
+ const struct anv_pipeline_bind_map *map)
+{
+ if (mem_update(cmd_buffer->state.surface_sha1s[stage],
+ map->surface_sha1, sizeof(map->surface_sha1)))
+ cmd_buffer->state.descriptors_dirty |= mesa_to_vk_shader_stage(stage);
+
+ if (mem_update(cmd_buffer->state.sampler_sha1s[stage],
+ map->sampler_sha1, sizeof(map->sampler_sha1)))
+ cmd_buffer->state.descriptors_dirty |= mesa_to_vk_shader_stage(stage);
+
+ if (mem_update(cmd_buffer->state.push_sha1s[stage],
+ map->push_sha1, sizeof(map->push_sha1)))
+ cmd_buffer->state.push_constants_dirty |= mesa_to_vk_shader_stage(stage);