static_assert(sizeof(struct rgp_sqtt_marker_event_with_dims) == 24,
"rgp_sqtt_marker_event_with_dims doesn't match RGP spec");
+/**
+ * "Barrier Start" RGP SQTT instrumentation marker (Table 5)
+ */
+struct rgp_sqtt_marker_barrier_start {
+ union {
+ struct {
+ uint32_t identifier : 4;
+ uint32_t ext_dwords : 3;
+ uint32_t cb_id : 20;
+ uint32_t reserved : 5;
+ };
+ uint32_t dword01;
+ };
+ union {
+ struct {
+ uint32_t driver_reason : 31;
+ uint32_t internal : 1;
+ };
+ uint32_t dword02;
+ };
+};
+
+static_assert(sizeof(struct rgp_sqtt_marker_barrier_start) == 8,
+ "rgp_sqtt_marker_barrier_start doesn't match RGP spec");
+
+/**
+ * "Barrier End" RGP SQTT instrumentation marker (Table 6)
+ */
+struct rgp_sqtt_marker_barrier_end {
+ union {
+ struct {
+ uint32_t identifier : 4;
+ uint32_t ext_dwords : 3;
+ uint32_t cb_id : 20;
+ uint32_t wait_on_eop_ts : 1;
+ uint32_t vs_partial_flush : 1;
+ uint32_t ps_partial_flush : 1;
+ uint32_t cs_partial_flush : 1;
+ uint32_t pfp_sync_me : 1;
+ };
+ uint32_t dword01;
+ };
+ union {
+ struct {
+ uint32_t sync_cp_dma : 1;
+ uint32_t inval_ccp : 1;
+ uint32_t inval_sqI : 1;
+ uint32_t inval_sqK : 1;
+ uint32_t flush_tcc : 1;
+ uint32_t inval_tcc : 1;
+ uint32_t flush_cb : 1;
+ uint32_t inval_cb : 1;
+ uint32_t flush_db : 1;
+ uint32_t inval_db : 1;
+ uint32_t num_layout_transitions : 16;
+ uint32_t inval_gl1 : 1;
+ uint32_t reserved : 5;
+ };
+ uint32_t dword02;
+ };
+};
+
+static_assert(sizeof(struct rgp_sqtt_marker_barrier_end) == 8,
+ "rgp_sqtt_marker_barrier_end doesn't match RGP spec");
+
static void
radv_write_begin_general_api_marker(struct radv_cmd_buffer *cmd_buffer,
enum rgp_sqtt_marker_general_api_type api_type)
cmd_buffer->state.current_event_type = EventInternalUnknown;
}
+void
+radv_describe_barrier_start(struct radv_cmd_buffer *cmd_buffer,
+ enum rgp_barrier_reason reason)
+{
+ struct rgp_sqtt_marker_barrier_start marker = {};
+ struct radeon_cmdbuf *cs = cmd_buffer->cs;
+
+ if (likely(!cmd_buffer->device->thread_trace_bo))
+ return;
+
+ marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_BARRIER_START;
+ marker.cb_id = 0;
+ marker.dword02 = reason;
+
+ radv_emit_thread_trace_userdata(cs, &marker, sizeof(marker) / 4);
+}
+
+void
+radv_describe_barrier_end(struct radv_cmd_buffer *cmd_buffer)
+{
+ struct rgp_sqtt_marker_barrier_end marker = {};
+ struct radeon_cmdbuf *cs = cmd_buffer->cs;
+
+ if (likely(!cmd_buffer->device->thread_trace_bo))
+ return;
+
+ marker.identifier = RGP_SQTT_MARKER_IDENTIFIER_BARRIER_END;
+ marker.cb_id = 0;
+
+ /* TODO: fill pipeline stalls, cache flushes, etc */
+
+ radv_emit_thread_trace_userdata(cs, &marker, sizeof(marker) / 4);
+}
+
#define EVENT_MARKER(cmd_name, args...) \
RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer); \
radv_write_begin_general_api_marker(cmd_buffer, ApiCmd##cmd_name); \
}
struct radv_barrier_info {
+ enum rgp_barrier_reason reason;
uint32_t eventCount;
const VkEvent *pEvents;
VkPipelineStageFlags srcStageMask;
enum radv_cmd_flush_bits src_flush_bits = 0;
enum radv_cmd_flush_bits dst_flush_bits = 0;
+ radv_describe_barrier_start(cmd_buffer, info->reason);
+
for (unsigned i = 0; i < info->eventCount; ++i) {
RADV_FROM_HANDLE(radv_event, event, info->pEvents[i]);
uint64_t va = radv_buffer_get_va(event->bo);
si_cp_dma_wait_for_idle(cmd_buffer);
cmd_buffer->state.flush_bits |= dst_flush_bits;
+
+ radv_describe_barrier_end(cmd_buffer);
}
void radv_CmdPipelineBarrier(
RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
struct radv_barrier_info info;
+ info.reason = RGP_BARRIER_EXTERNAL_CMD_PIPELINE_BARRIER;
info.eventCount = 0;
info.pEvents = NULL;
info.srcStageMask = srcStageMask;
RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
struct radv_barrier_info info;
+ info.reason = RGP_BARRIER_EXTERNAL_CMD_WAIT_EVENTS;
info.eventCount = eventCount;
info.pEvents = pEvents;
info.srcStageMask = 0;
const struct radv_thread_trace *trace);
/* radv_sqtt_layer_.c */
+/**
+ * Value for the reason field of an RGP barrier start marker originating from
+ * the Vulkan client (does not include PAL-defined values). (Table 15)
+ */
+enum rgp_barrier_reason {
+ RGP_BARRIER_UNKNOWN_REASON = 0xFFFFFFFF,
+
+ /* External app-generated barrier reasons, i.e. API synchronization
+ * commands Range of valid values: [0x00000001 ... 0x7FFFFFFF].
+ */
+ RGP_BARRIER_EXTERNAL_CMD_PIPELINE_BARRIER = 0x00000001,
+ RGP_BARRIER_EXTERNAL_RENDER_PASS_SYNC = 0x00000002,
+ RGP_BARRIER_EXTERNAL_CMD_WAIT_EVENTS = 0x00000003,
+
+ /* Internal barrier reasons, i.e. implicit synchronization inserted by
+ * the Vulkan driver Range of valid values: [0xC0000000 ... 0xFFFFFFFE].
+ */
+ RGP_BARRIER_INTERNAL_BASE = 0xC0000000,
+ RGP_BARRIER_INTERNAL_PRE_RESET_QUERY_POOL_SYNC = RGP_BARRIER_INTERNAL_BASE + 0,
+ RGP_BARRIER_INTERNAL_POST_RESET_QUERY_POOL_SYNC = RGP_BARRIER_INTERNAL_BASE + 1,
+ RGP_BARRIER_INTERNAL_GPU_EVENT_RECYCLE_STALL = RGP_BARRIER_INTERNAL_BASE + 2,
+ RGP_BARRIER_INTERNAL_PRE_COPY_QUERY_POOL_RESULTS_SYNC = RGP_BARRIER_INTERNAL_BASE + 3
+};
+
void radv_describe_begin_cmd_buffer(struct radv_cmd_buffer *cmd_buffer);
void radv_describe_end_cmd_buffer(struct radv_cmd_buffer *cmd_buffer);
void radv_describe_draw(struct radv_cmd_buffer *cmd_buffer);
void radv_describe_begin_render_pass_clear(struct radv_cmd_buffer *cmd_buffer,
VkImageAspectFlagBits aspects);
void radv_describe_end_render_pass_clear(struct radv_cmd_buffer *cmd_buffer);
+void radv_describe_barrier_start(struct radv_cmd_buffer *cmd_buffer,
+ enum rgp_barrier_reason reason);
+void radv_describe_barrier_end(struct radv_cmd_buffer *cmd_buffer);
struct radeon_winsys_sem;