VkDeviceSize size;
VkMemoryHeapFlags flags;
- /* Driver-internal book-keeping */
- VkDeviceSize used;
+ /** Driver-internal book-keeping.
+ *
+ * Align it to 64 bits to make atomic operations faster on 32 bit platforms.
+ */
+ VkDeviceSize used __attribute__ ((aligned (8)));
};
struct anv_physical_device {
bool use_softpin;
bool always_use_bindless;
+ bool use_call_secondary;
/** True if we can access buffers using A64 messages */
bool has_a64_buffer_access;
/** True if we can use bindless access for samplers */
bool has_bindless_samplers;
+ /** True if we can read the GPU timestamp register
+ *
+ * When running in a virtual context, the timestamp register is unreadable
+ * on Gen12+.
+ */
+ bool has_reg_timestamp;
+
/** True if this device has implicit AUX
*
* If true, CCS is handled as an implicit attachment to the BO rather than
struct hash_table * nir_cache;
struct hash_table * cache;
+
+ bool external_sync;
};
struct nir_xfb_info;
void anv_pipeline_cache_init(struct anv_pipeline_cache *cache,
struct anv_device *device,
- bool cache_enabled);
+ bool cache_enabled,
+ bool external_sync);
void anv_pipeline_cache_finish(struct anv_pipeline_cache *cache);
struct anv_shader_bin *
uint64_t perf_metric; /* 0 if unset */
struct gen_aux_map_context *aux_map_ctx;
+
+ struct gen_debug_block_frame *debug_frame_desc;
};
static inline struct anv_instance *
void *location, struct anv_bo *bo, uint32_t offset);
struct anv_address anv_batch_address(struct anv_batch *batch, void *batch_location);
+static inline void
+anv_batch_set_storage(struct anv_batch *batch, struct anv_address addr,
+ void *map, size_t size)
+{
+ batch->start_addr = addr;
+ batch->next = batch->start = map;
+ batch->end = map + size;
+}
+
static inline VkResult
anv_batch_set_error(struct anv_batch *batch, VkResult error)
{
struct anv_descriptor_pool *pool;
struct anv_descriptor_set_layout *layout;
+
+ /* Amount of space occupied in the the pool by this descriptor set. It can
+ * be larger than the size of the descriptor set.
+ */
uint32_t size;
/* State relative to anv_descriptor_pool::bo */
}
enum anv_cmd_dirty_bits {
- ANV_CMD_DIRTY_DYNAMIC_VIEWPORT = 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */
- ANV_CMD_DIRTY_DYNAMIC_SCISSOR = 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */
- ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH = 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */
- ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS = 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */
- ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS = 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */
- ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS = 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */
- ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK = 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */
- ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK = 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */
- ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE = 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */
- ANV_CMD_DIRTY_PIPELINE = 1 << 9,
- ANV_CMD_DIRTY_INDEX_BUFFER = 1 << 10,
- ANV_CMD_DIRTY_RENDER_TARGETS = 1 << 11,
- ANV_CMD_DIRTY_XFB_ENABLE = 1 << 12,
- ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE = 1 << 13, /* VK_DYNAMIC_STATE_LINE_STIPPLE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_VIEWPORT = 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */
+ ANV_CMD_DIRTY_DYNAMIC_SCISSOR = 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */
+ ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH = 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS = 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */
+ ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS = 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS = 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK = 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK = 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE = 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */
+ ANV_CMD_DIRTY_PIPELINE = 1 << 9,
+ ANV_CMD_DIRTY_INDEX_BUFFER = 1 << 10,
+ ANV_CMD_DIRTY_RENDER_TARGETS = 1 << 11,
+ ANV_CMD_DIRTY_XFB_ENABLE = 1 << 12,
+ ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE = 1 << 13, /* VK_DYNAMIC_STATE_LINE_STIPPLE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_CULL_MODE = 1 << 14, /* VK_DYNAMIC_STATE_CULL_MODE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE = 1 << 15, /* VK_DYNAMIC_STATE_FRONT_FACE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY = 1 << 16, /* VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_VERTEX_INPUT_BINDING_STRIDE = 1 << 17, /* VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE = 1 << 18, /* VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE = 1 << 19, /* VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP = 1 << 20, /* VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE = 1 << 21, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE = 1 << 22, /* VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT */
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP = 1 << 23, /* VK_DYNAMIC_STATE_STENCIL_OP_EXT */
};
typedef uint32_t anv_cmd_dirty_mask_t;
-#define ANV_CMD_DIRTY_DYNAMIC_ALL \
- (ANV_CMD_DIRTY_DYNAMIC_VIEWPORT | \
- ANV_CMD_DIRTY_DYNAMIC_SCISSOR | \
- ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH | \
- ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS | \
- ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | \
- ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS | \
- ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | \
- ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | \
- ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE | \
- ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE)
+#define ANV_CMD_DIRTY_DYNAMIC_ALL \
+ (ANV_CMD_DIRTY_DYNAMIC_VIEWPORT | \
+ ANV_CMD_DIRTY_DYNAMIC_SCISSOR | \
+ ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH | \
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS | \
+ ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | \
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS | \
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | \
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | \
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE | \
+ ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE | \
+ ANV_CMD_DIRTY_DYNAMIC_CULL_MODE | \
+ ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE | \
+ ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY | \
+ ANV_CMD_DIRTY_DYNAMIC_VERTEX_INPUT_BINDING_STRIDE | \
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | \
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | \
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | \
+ ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE | \
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | \
+ ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)
static inline enum anv_cmd_dirty_bits
anv_cmd_dirty_bit_for_vk_dynamic_state(VkDynamicState vk_state)
{
switch (vk_state) {
case VK_DYNAMIC_STATE_VIEWPORT:
+ case VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT:
return ANV_CMD_DIRTY_DYNAMIC_VIEWPORT;
case VK_DYNAMIC_STATE_SCISSOR:
+ case VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT:
return ANV_CMD_DIRTY_DYNAMIC_SCISSOR;
case VK_DYNAMIC_STATE_LINE_WIDTH:
return ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH;
return ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE;
case VK_DYNAMIC_STATE_LINE_STIPPLE_EXT:
return ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE;
+ case VK_DYNAMIC_STATE_CULL_MODE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_CULL_MODE;
+ case VK_DYNAMIC_STATE_FRONT_FACE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE;
+ case VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY;
+ case VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_VERTEX_INPUT_BINDING_STRIDE;
+ case VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE;
+ case VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE;
+ case VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP;
+ case VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE;
+ case VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE;
+ case VK_DYNAMIC_STATE_STENCIL_OP_EXT:
+ return ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP;
default:
assert(!"Unsupported dynamic state");
return 0;
struct anv_vertex_binding {
struct anv_buffer * buffer;
VkDeviceSize offset;
+ VkDeviceSize stride;
+ VkDeviceSize size;
};
struct anv_xfb_binding {
uint32_t back;
} stencil_reference;
+ struct {
+ struct {
+ VkStencilOp fail_op;
+ VkStencilOp pass_op;
+ VkStencilOp depth_fail_op;
+ VkCompareOp compare_op;
+ } front;
+ struct {
+ VkStencilOp fail_op;
+ VkStencilOp pass_op;
+ VkStencilOp depth_fail_op;
+ VkCompareOp compare_op;
+ } back;
+ } stencil_op;
+
struct {
uint32_t factor;
uint16_t pattern;
} line_stipple;
+
+ VkCullModeFlags cull_mode;
+ VkFrontFace front_face;
+ VkPrimitiveTopology primitive_topology;
+ bool depth_test_enable;
+ bool depth_write_enable;
+ VkCompareOp depth_compare_op;
+ bool depth_bounds_test_enable;
+ bool stencil_test_enable;
+ bool dyn_vbo_stride;
+ bool dyn_vbo_size;
};
extern const struct anv_dynamic_state default_dynamic_state;
struct anv_dynamic_state dynamic;
+ uint32_t primitive_topology;
+
struct {
struct anv_buffer *index_buffer;
uint32_t index_type; /**< 3DSTATE_INDEX_BUFFER.IndexFormat */
struct {
uint32_t sf[7];
uint32_t depth_stencil_state[3];
+ uint32_t clip[4];
} gen7;
struct {
}
VkResult
-anv_pipeline_init(struct anv_graphics_pipeline *pipeline, struct anv_device *device,
- struct anv_pipeline_cache *cache,
- const VkGraphicsPipelineCreateInfo *pCreateInfo,
- const VkAllocationCallbacks *alloc);
+anv_pipeline_init(struct anv_pipeline *pipeline,
+ struct anv_device *device,
+ enum anv_pipeline_type type,
+ VkPipelineCreateFlags flags,
+ const VkAllocationCallbacks *pAllocator);
+
+void
+anv_pipeline_finish(struct anv_pipeline *pipeline,
+ struct anv_device *device,
+ const VkAllocationCallbacks *pAllocator);
+
+VkResult
+anv_graphics_pipeline_init(struct anv_graphics_pipeline *pipeline, struct anv_device *device,
+ struct anv_pipeline_cache *cache,
+ const VkGraphicsPipelineCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *alloc);
VkResult
anv_pipeline_compile_cs(struct anv_compute_pipeline *pipeline,
const char *entrypoint,
const VkSpecializationInfo *spec_info);
-uint32_t
-anv_cs_workgroup_size(const struct anv_compute_pipeline *pipeline);
+struct anv_cs_parameters {
+ uint32_t group_size;
+ uint32_t simd_size;
+ uint32_t threads;
+};
-uint32_t
-anv_cs_threads(const struct anv_compute_pipeline *pipeline);
+struct anv_cs_parameters
+anv_cs_parameters(const struct anv_compute_pipeline *pipeline);
struct anv_format_plane {
enum isl_format isl_format:16;
const struct anv_instance_extension_table *instance,
const struct anv_device_extension_table *device);
+void *anv_resolve_device_entrypoint(const struct gen_device_info *devinfo,
+ uint32_t index);
void *anv_lookup_entrypoint(const struct gen_device_info *devinfo,
const char *name);