return VK_SUCCESS;
}
+static void emit_gfx_buffer_state(struct radv_cmd_buffer *cmd_buffer)
+{
+ struct radv_device *device = cmd_buffer->device;
+ if (device->gfx_init) {
+ uint64_t va = device->ws->buffer_get_va(device->gfx_init);
+ device->ws->cs_add_buffer(cmd_buffer->cs, device->gfx_init, 8);
+ radeon_emit(cmd_buffer->cs, PKT3(PKT3_INDIRECT_BUFFER_CIK, 2, 0));
+ radeon_emit(cmd_buffer->cs, va);
+ radeon_emit(cmd_buffer->cs, (va >> 32) & 0xffff);
+ radeon_emit(cmd_buffer->cs, device->gfx_init_size_dw & 0xffff);
+ } else
+ si_init_config(cmd_buffer);
+}
+
VkResult radv_BeginCommandBuffer(
VkCommandBuffer commandBuffer,
const VkCommandBufferBeginInfo *pBeginInfo)
RADV_CMD_FLAG_INV_SMEM_L1 |
RADV_CMD_FLUSH_AND_INV_FRAMEBUFFER |
RADV_CMD_FLAG_INV_GLOBAL_L2;
- si_init_config(cmd_buffer);
+ emit_gfx_buffer_state(cmd_buffer);
radv_set_db_count_control(cmd_buffer);
si_emit_cache_flush(cmd_buffer);
break;
goto fail;
}
+ if (device->physical_device->rad_info.chip_class >= CIK)
+ cik_create_gfx_config(device);
+
*pDevice = radv_device_to_handle(device);
return VK_SUCCESS;
if (device->trace_bo)
device->ws->buffer_destroy(device->trace_bo);
+ if (device->gfx_init)
+ device->ws->buffer_destroy(device->gfx_init);
+
for (unsigned i = 0; i < RADV_MAX_QUEUE_FAMILIES; i++) {
for (unsigned q = 0; q < device->queue_count[i]; q++)
radv_queue_finish(&device->queues[i][q]);
if (device->trace_bo)
device->ws->buffer_destroy(device->trace_bo);
+ if (device->gfx_init)
+ device->ws->buffer_destroy(device->gfx_init);
+
for (unsigned i = 0; i < RADV_MAX_QUEUE_FAMILIES; i++) {
for (unsigned q = 0; q < device->queue_count[i]; q++)
radv_queue_finish(&device->queues[i][q]);
float sample_locations_8x[8][2];
float sample_locations_16x[16][2];
+ /* CIK and later */
+ uint32_t gfx_init_size_dw;
+ struct radeon_winsys_bo *gfx_init;
+
struct radeon_winsys_bo *trace_bo;
uint32_t *trace_id_ptr;
void si_init_compute(struct radv_cmd_buffer *cmd_buffer);
void si_init_config(struct radv_cmd_buffer *cmd_buffer);
+
+void cik_create_gfx_config(struct radv_device *device);
+
void si_write_viewport(struct radeon_winsys_cs *cs, int first_vp,
int count, const VkViewport *viewports);
void si_write_scissors(struct radeon_winsys_cs *cs, int first,
void si_init_config(struct radv_cmd_buffer *cmd_buffer)
{
struct radv_physical_device *physical_device = cmd_buffer->device->physical_device;
+
si_emit_config(physical_device, cmd_buffer->cs);
}
+void
+cik_create_gfx_config(struct radv_device *device)
+{
+ struct radeon_winsys_cs *cs = device->ws->cs_create(device->ws, RING_GFX);
+ if (!cs)
+ return;
+
+ si_emit_config(device->physical_device, cs);
+
+ device->gfx_init = device->ws->buffer_create(device->ws,
+ cs->cdw * 4, 4096,
+ RADEON_DOMAIN_GTT,
+ RADEON_FLAG_CPU_ACCESS);
+ if (!device->gfx_init)
+ goto fail;
+
+ void *map = device->ws->buffer_map(device->gfx_init);
+ if (!map) {
+ device->ws->buffer_destroy(device->gfx_init);
+ device->gfx_init = NULL;
+ goto fail;
+ }
+ memcpy(map, cs->buf, cs->cdw * 4);
+
+ device->ws->buffer_unmap(device->gfx_init);
+ device->gfx_init_size_dw = cs->cdw;
+fail:
+ device->ws->cs_destroy(cs);
+}
+
static void
get_viewport_xform(const VkViewport *viewport,
float scale[3], float translate[3])