},
.cull_mode = 0u,
.front_face = 0u,
+ .primitive_topology = 0u,
};
static void
dest->sample_location.count = src->sample_location.count;
if (copy_mask & RADV_DYNAMIC_VIEWPORT) {
- dest->viewport.count = src->viewport.count;
+ if (dest->viewport.count != src->viewport.count) {
+ dest->viewport.count = src->viewport.count;
+ dest_mask |= RADV_DYNAMIC_VIEWPORT;
+ }
+
if (memcmp(&dest->viewport.viewports, &src->viewport.viewports,
src->viewport.count * sizeof(VkViewport))) {
typed_memcpy(dest->viewport.viewports,
}
if (copy_mask & RADV_DYNAMIC_SCISSOR) {
- dest->scissor.count = src->scissor.count;
+ if (dest->scissor.count != src->scissor.count) {
+ dest->scissor.count = src->scissor.count;
+ dest_mask |= RADV_DYNAMIC_SCISSOR;
+ }
+
if (memcmp(&dest->scissor.scissors, &src->scissor.scissors,
src->scissor.count * sizeof(VkRect2D))) {
typed_memcpy(dest->scissor.scissors,
}
}
+ if (copy_mask & RADV_DYNAMIC_PRIMITIVE_TOPOLOGY) {
+ if (dest->primitive_topology != src->primitive_topology) {
+ dest->primitive_topology = src->primitive_topology;
+ dest_mask |= RADV_DYNAMIC_PRIMITIVE_TOPOLOGY;
+ }
+ }
+
+ if (copy_mask & RADV_DYNAMIC_DEPTH_TEST_ENABLE) {
+ if (dest->depth_test_enable != src->depth_test_enable) {
+ dest->depth_test_enable = src->depth_test_enable;
+ dest_mask |= RADV_DYNAMIC_DEPTH_TEST_ENABLE;
+ }
+ }
+
+ if (copy_mask & RADV_DYNAMIC_DEPTH_WRITE_ENABLE) {
+ if (dest->depth_write_enable != src->depth_write_enable) {
+ dest->depth_write_enable = src->depth_write_enable;
+ dest_mask |= RADV_DYNAMIC_DEPTH_WRITE_ENABLE;
+ }
+ }
+
+ if (copy_mask & RADV_DYNAMIC_DEPTH_COMPARE_OP) {
+ if (dest->depth_compare_op != src->depth_compare_op) {
+ dest->depth_compare_op = src->depth_compare_op;
+ dest_mask |= RADV_DYNAMIC_DEPTH_COMPARE_OP;
+ }
+ }
+
+ if (copy_mask & RADV_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE) {
+ if (dest->depth_bounds_test_enable != src->depth_bounds_test_enable) {
+ dest->depth_bounds_test_enable = src->depth_bounds_test_enable;
+ dest_mask |= RADV_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE;
+ }
+ }
+
+ if (copy_mask & RADV_DYNAMIC_STENCIL_TEST_ENABLE) {
+ if (dest->stencil_test_enable != src->stencil_test_enable) {
+ dest->stencil_test_enable = src->stencil_test_enable;
+ dest_mask |= RADV_DYNAMIC_STENCIL_TEST_ENABLE;
+ }
+ }
+
+ if (copy_mask & RADV_DYNAMIC_STENCIL_OP) {
+ if (memcmp(&dest->stencil_op, &src->stencil_op,
+ sizeof(src->stencil_op))) {
+ dest->stencil_op = src->stencil_op;
+ dest_mask |= RADV_DYNAMIC_STENCIL_OP;
+ }
+ }
+
cmd_buffer->state.dirty |= dest_mask;
}
}
}
+static void
+radv_destroy_cmd_buffer(struct radv_cmd_buffer *cmd_buffer)
+{
+ list_del(&cmd_buffer->pool_link);
+
+ list_for_each_entry_safe(struct radv_cmd_buffer_upload, up,
+ &cmd_buffer->upload.list, list) {
+ cmd_buffer->device->ws->buffer_destroy(up->upload_bo);
+ list_del(&up->list);
+ free(up);
+ }
+
+ if (cmd_buffer->upload.upload_bo)
+ cmd_buffer->device->ws->buffer_destroy(cmd_buffer->upload.upload_bo);
+
+ if (cmd_buffer->cs)
+ cmd_buffer->device->ws->cs_destroy(cmd_buffer->cs);
+
+ for (unsigned i = 0; i < MAX_BIND_POINTS; i++)
+ free(cmd_buffer->descriptors[i].push_set.set.mapped_ptr);
+
+ vk_object_base_finish(&cmd_buffer->base);
+ vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
+}
+
static VkResult radv_create_cmd_buffer(
struct radv_device * device,
struct radv_cmd_pool * pool,
cmd_buffer->cs = device->ws->cs_create(device->ws, ring);
if (!cmd_buffer->cs) {
- vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
+ radv_destroy_cmd_buffer(cmd_buffer);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
}
return VK_SUCCESS;
}
-static void
-radv_cmd_buffer_destroy(struct radv_cmd_buffer *cmd_buffer)
-{
- list_del(&cmd_buffer->pool_link);
-
- list_for_each_entry_safe(struct radv_cmd_buffer_upload, up,
- &cmd_buffer->upload.list, list) {
- cmd_buffer->device->ws->buffer_destroy(up->upload_bo);
- list_del(&up->list);
- free(up);
- }
-
- if (cmd_buffer->upload.upload_bo)
- cmd_buffer->device->ws->buffer_destroy(cmd_buffer->upload.upload_bo);
- cmd_buffer->device->ws->cs_destroy(cmd_buffer->cs);
-
- for (unsigned i = 0; i < MAX_BIND_POINTS; i++)
- free(cmd_buffer->descriptors[i].push_set.set.mapped_ptr);
-
- vk_object_base_finish(&cmd_buffer->base);
-
- vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
-}
-
static VkResult
radv_reset_cmd_buffer(struct radv_cmd_buffer *cmd_buffer)
{
RADEON_DOMAIN_GTT,
RADEON_FLAG_CPU_ACCESS|
RADEON_FLAG_NO_INTERPROCESS_SHARING |
- RADEON_FLAG_32BIT,
+ RADEON_FLAG_32BIT |
+ RADEON_FLAG_GTT_WC,
RADV_BO_PRIORITY_UPLOAD_BUFFER);
if (!bo) {
cmd_buffer->state.dirty |= RADV_CMD_DIRTY_DYNAMIC_CULL_MODE |
RADV_CMD_DIRTY_DYNAMIC_FRONT_FACE;
+ if (!cmd_buffer->state.emitted_pipeline)
+ cmd_buffer->state.dirty |= RADV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY;
+
+ if (!cmd_buffer->state.emitted_pipeline ||
+ cmd_buffer->state.emitted_pipeline->graphics.db_depth_control !=
+ pipeline->graphics.db_depth_control)
+ cmd_buffer->state.dirty |= RADV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
+ RADV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_STENCIL_OP;
+
+ if (!cmd_buffer->state.emitted_pipeline)
+ cmd_buffer->state.dirty |= RADV_CMD_DIRTY_DYNAMIC_STENCIL_OP;
+
radeon_emit_array(cmd_buffer->cs, pipeline->cs.buf, pipeline->cs.cdw);
if (!cmd_buffer->state.emitted_pipeline ||
radv_emit_line_stipple(struct radv_cmd_buffer *cmd_buffer)
{
struct radv_dynamic_state *d = &cmd_buffer->state.dynamic;
- struct radv_pipeline *pipeline = cmd_buffer->state.pipeline;
uint32_t auto_reset_cntl = 1;
- if (pipeline->graphics.topology == V_008958_DI_PT_LINESTRIP)
+ if (d->primitive_topology == V_008958_DI_PT_LINESTRIP)
auto_reset_cntl = 2;
radeon_set_context_reg(cmd_buffer->cs, R_028A0C_PA_SC_LINE_STIPPLE,
pa_su_sc_mode_cntl);
}
+static void
+radv_emit_primitive_topology(struct radv_cmd_buffer *cmd_buffer)
+{
+ struct radv_dynamic_state *d = &cmd_buffer->state.dynamic;
+
+ if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX7) {
+ radeon_set_uconfig_reg_idx(cmd_buffer->device->physical_device,
+ cmd_buffer->cs,
+ R_030908_VGT_PRIMITIVE_TYPE, 1,
+ d->primitive_topology);
+ } else {
+ radeon_set_config_reg(cmd_buffer->cs,
+ R_008958_VGT_PRIMITIVE_TYPE,
+ d->primitive_topology);
+ }
+}
+
+static void
+radv_emit_depth_control(struct radv_cmd_buffer *cmd_buffer, uint32_t states)
+{
+ unsigned db_depth_control = cmd_buffer->state.pipeline->graphics.db_depth_control;
+ struct radv_dynamic_state *d = &cmd_buffer->state.dynamic;
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE) {
+ db_depth_control &= C_028800_Z_ENABLE;
+ db_depth_control |= S_028800_Z_ENABLE(d->depth_test_enable ? 1 : 0);
+ }
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE) {
+ db_depth_control &= C_028800_Z_WRITE_ENABLE;
+ db_depth_control |= S_028800_Z_WRITE_ENABLE(d->depth_write_enable ? 1 : 0);
+ }
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP) {
+ db_depth_control &= C_028800_ZFUNC;
+ db_depth_control |= S_028800_ZFUNC(d->depth_compare_op);
+ }
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE) {
+ db_depth_control &= C_028800_DEPTH_BOUNDS_ENABLE;
+ db_depth_control |= S_028800_DEPTH_BOUNDS_ENABLE(d->depth_bounds_test_enable ? 1 : 0);
+ }
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE) {
+ db_depth_control &= C_028800_STENCIL_ENABLE;
+ db_depth_control |= S_028800_STENCIL_ENABLE(d->stencil_test_enable ? 1 : 0);
+
+ db_depth_control &= C_028800_BACKFACE_ENABLE;
+ db_depth_control |= S_028800_BACKFACE_ENABLE(d->stencil_test_enable ? 1 : 0);
+ }
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_STENCIL_OP) {
+ db_depth_control &= C_028800_STENCILFUNC;
+ db_depth_control |= S_028800_STENCILFUNC(d->stencil_op.front.compare_op);
+
+ db_depth_control &= C_028800_STENCILFUNC_BF;
+ db_depth_control |= S_028800_STENCILFUNC_BF(d->stencil_op.back.compare_op);
+ }
+
+ radeon_set_context_reg(cmd_buffer->cs, R_028800_DB_DEPTH_CONTROL,
+ db_depth_control);
+}
+
+static void
+radv_emit_stencil_control(struct radv_cmd_buffer *cmd_buffer)
+{
+ struct radv_dynamic_state *d = &cmd_buffer->state.dynamic;
+
+ radeon_set_context_reg(cmd_buffer->cs, R_02842C_DB_STENCIL_CONTROL,
+ S_02842C_STENCILFAIL(si_translate_stencil_op(d->stencil_op.front.fail_op)) |
+ S_02842C_STENCILZPASS(si_translate_stencil_op(d->stencil_op.front.pass_op)) |
+ S_02842C_STENCILZFAIL(si_translate_stencil_op(d->stencil_op.front.depth_fail_op)) |
+ S_02842C_STENCILFAIL_BF(si_translate_stencil_op(d->stencil_op.back.fail_op)) |
+ S_02842C_STENCILZPASS_BF(si_translate_stencil_op(d->stencil_op.back.pass_op)) |
+ S_02842C_STENCILZFAIL_BF(si_translate_stencil_op(d->stencil_op.back.depth_fail_op)));
+}
+
static void
radv_emit_fb_color_state(struct radv_cmd_buffer *cmd_buffer,
int index,
RADV_CMD_DIRTY_DYNAMIC_FRONT_FACE))
radv_emit_culling(cmd_buffer, states);
+ if (states & RADV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)
+ radv_emit_primitive_topology(cmd_buffer);
+
+ if (states & (RADV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
+ RADV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
+ RADV_CMD_DIRTY_DYNAMIC_STENCIL_OP))
+ radv_emit_depth_control(cmd_buffer, states);
+
+ if (states & RADV_CMD_DIRTY_DYNAMIC_STENCIL_OP)
+ radv_emit_stencil_control(cmd_buffer);
+
cmd_buffer->state.dirty &= ~states;
}
uint32_t *desc = &((uint32_t *)vb_ptr)[i * 4];
uint32_t offset;
struct radv_buffer *buffer = cmd_buffer->vertex_bindings[i].buffer;
- uint32_t stride = cmd_buffer->state.pipeline->binding_stride[i];
unsigned num_records;
+ unsigned stride;
if (!buffer)
continue;
offset = cmd_buffer->vertex_bindings[i].offset;
va += offset + buffer->offset;
- num_records = buffer->size - offset;
+ if (cmd_buffer->vertex_bindings[i].size) {
+ num_records = cmd_buffer->vertex_bindings[i].size;
+ } else {
+ num_records = buffer->size - offset;
+ }
+
+ if (cmd_buffer->state.pipeline->graphics.uses_dynamic_stride) {
+ stride = cmd_buffer->vertex_bindings[i].stride;
+ } else {
+ stride = cmd_buffer->state.pipeline->binding_stride[i];
+ }
+
if (cmd_buffer->device->physical_device->rad_info.chip_class != GFX8 && stride)
num_records /= stride;
- desc[0] = va;
- desc[1] = S_008F04_BASE_ADDRESS_HI(va >> 32) | S_008F04_STRIDE(stride);
- desc[2] = num_records;
- desc[3] = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X) |
- S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y) |
- S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z) |
- S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W);
+ uint32_t rsrc_word3 = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X) |
+ S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y) |
+ S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z) |
+ S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W);
if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX10) {
/* OOB_SELECT chooses the out-of-bounds check:
*/
int oob_select = stride ? V_008F0C_OOB_SELECT_STRUCTURED : V_008F0C_OOB_SELECT_RAW;
- desc[3] |= S_008F0C_FORMAT(V_008F0C_IMG_FORMAT_32_UINT) |
- S_008F0C_OOB_SELECT(oob_select) |
- S_008F0C_RESOURCE_LEVEL(1);
+ rsrc_word3 |= S_008F0C_FORMAT(V_008F0C_IMG_FORMAT_32_UINT) |
+ S_008F0C_OOB_SELECT(oob_select) |
+ S_008F0C_RESOURCE_LEVEL(1);
} else {
- desc[3] |= S_008F0C_NUM_FORMAT(V_008F0C_BUF_NUM_FORMAT_UINT) |
- S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32);
+ rsrc_word3 |= S_008F0C_NUM_FORMAT(V_008F0C_BUF_NUM_FORMAT_UINT) |
+ S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32);
}
+
+ desc[0] = va;
+ desc[1] = S_008F04_BASE_ADDRESS_HI(va >> 32) | S_008F04_STRIDE(stride);
+ desc[2] = num_records;
+ desc[3] = rsrc_word3;
}
va = radv_buffer_get_va(cmd_buffer->upload.upload_bo);
if (cmd_buffer->device->physical_device->use_ngg_streamout)
size = buffer->size - sb[i].offset;
- desc[0] = va;
- desc[1] = S_008F04_BASE_ADDRESS_HI(va >> 32);
- desc[2] = size;
- desc[3] = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X) |
- S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y) |
- S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z) |
- S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W);
+ uint32_t rsrc_word3 = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X) |
+ S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y) |
+ S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z) |
+ S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W);
if (cmd_buffer->device->physical_device->rad_info.chip_class >= GFX10) {
- desc[3] |= S_008F0C_FORMAT(V_008F0C_IMG_FORMAT_32_FLOAT) |
- S_008F0C_OOB_SELECT(V_008F0C_OOB_SELECT_RAW) |
- S_008F0C_RESOURCE_LEVEL(1);
+ rsrc_word3 |= S_008F0C_FORMAT(V_008F0C_IMG_FORMAT_32_FLOAT) |
+ S_008F0C_OOB_SELECT(V_008F0C_OOB_SELECT_RAW) |
+ S_008F0C_RESOURCE_LEVEL(1);
} else {
- desc[3] |= S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32);
+ rsrc_word3 |= S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32);
}
+
+ desc[0] = va;
+ desc[1] = S_008F04_BASE_ADDRESS_HI(va >> 32);
+ desc[2] = size;
+ desc[3] = rsrc_word3;
}
va = radv_buffer_get_va(cmd_buffer->upload.upload_bo);
{
struct radeon_info *info = &cmd_buffer->device->physical_device->rad_info;
struct radv_cmd_state *state = &cmd_buffer->state;
+ unsigned topology = state->dynamic.primitive_topology;
struct radeon_cmdbuf *cs = cmd_buffer->cs;
unsigned ia_multi_vgt_param;
si_get_ia_multi_vgt_param(cmd_buffer, instanced_draw,
indirect_draw,
count_from_stream_output,
- draw_vertex_count);
+ draw_vertex_count,
+ topology);
if (state->last_ia_multi_vgt_param != ia_multi_vgt_param) {
if (info->chip_class == GFX9) {
}
state->attachments[i].current_layout = att->initial_layout;
+ state->attachments[i].current_in_render_loop = false;
state->attachments[i].current_stencil_layout = att->stencil_initial_layout;
state->attachments[i].sample_location.count = 0;
list_del(&cmd_buffer->pool_link);
list_addtail(&cmd_buffer->pool_link, &cmd_buffer->pool->free_cmd_buffers);
} else
- radv_cmd_buffer_destroy(cmd_buffer);
+ radv_destroy_cmd_buffer(cmd_buffer);
}
}
}
void radv_CmdBindVertexBuffers(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VkBuffer* pBuffers,
+ const VkDeviceSize* pOffsets)
+{
+ radv_CmdBindVertexBuffers2EXT(commandBuffer, firstBinding,
+ bindingCount, pBuffers, pOffsets,
+ NULL, NULL);
+}
+
+void radv_CmdBindVertexBuffers2EXT(
VkCommandBuffer commandBuffer,
uint32_t firstBinding,
uint32_t bindingCount,
const VkBuffer* pBuffers,
- const VkDeviceSize* pOffsets)
+ const VkDeviceSize* pOffsets,
+ const VkDeviceSize* pSizes,
+ const VkDeviceSize* pStrides)
{
RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
struct radv_vertex_binding *vb = cmd_buffer->vertex_bindings;
for (uint32_t i = 0; i < bindingCount; i++) {
RADV_FROM_HANDLE(radv_buffer, buffer, pBuffers[i]);
uint32_t idx = firstBinding + i;
+ VkDeviceSize size = pSizes ? pSizes[i] : 0;
+ VkDeviceSize stride = pStrides ? pStrides[i] : 0;
+ /* pSizes and pStrides are optional. */
if (!changed &&
(vb[idx].buffer != buffer ||
- vb[idx].offset != pOffsets[i])) {
+ vb[idx].offset != pOffsets[i] ||
+ vb[idx].size != size ||
+ vb[idx].stride != stride)) {
changed = true;
}
vb[idx].buffer = buffer;
vb[idx].offset = pOffsets[i];
+ vb[idx].size = size;
+ vb[idx].stride = stride;
if (buffer) {
radv_cs_add_buffer(cmd_buffer->device->ws,
state->dirty |= RADV_CMD_DIRTY_DYNAMIC_FRONT_FACE;
}
+void radv_CmdSetPrimitiveTopologyEXT(
+ VkCommandBuffer commandBuffer,
+ VkPrimitiveTopology primitiveTopology)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+ unsigned primitive_topology = si_translate_prim(primitiveTopology);
+
+ if (state->dynamic.primitive_topology == primitive_topology)
+ return;
+
+ state->dynamic.primitive_topology = primitive_topology;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY;
+}
+
+void radv_CmdSetViewportWithCountEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t viewportCount,
+ const VkViewport* pViewports)
+{
+ radv_CmdSetViewport(commandBuffer, 0, viewportCount, pViewports);
+}
+
+void radv_CmdSetScissorWithCountEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t scissorCount,
+ const VkRect2D* pScissors)
+{
+ radv_CmdSetScissor(commandBuffer, 0, scissorCount, pScissors);
+}
+
+void radv_CmdSetDepthTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthTestEnable)
+
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+
+ if (state->dynamic.depth_test_enable == depthTestEnable)
+ return;
+
+ state->dynamic.depth_test_enable = depthTestEnable;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE;
+}
+
+void radv_CmdSetDepthWriteEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthWriteEnable)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+
+ if (state->dynamic.depth_write_enable == depthWriteEnable)
+ return;
+
+ state->dynamic.depth_write_enable = depthWriteEnable;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE;
+}
+
+void radv_CmdSetDepthCompareOpEXT(
+ VkCommandBuffer commandBuffer,
+ VkCompareOp depthCompareOp)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+
+ if (state->dynamic.depth_compare_op == depthCompareOp)
+ return;
+
+ state->dynamic.depth_compare_op = depthCompareOp;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP;
+}
+
+void radv_CmdSetDepthBoundsTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthBoundsTestEnable)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+
+ if (state->dynamic.depth_bounds_test_enable == depthBoundsTestEnable)
+ return;
+
+ state->dynamic.depth_bounds_test_enable = depthBoundsTestEnable;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE;
+}
+
+void radv_CmdSetStencilTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 stencilTestEnable)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+
+ if (state->dynamic.stencil_test_enable == stencilTestEnable)
+ return;
+
+ state->dynamic.stencil_test_enable = stencilTestEnable;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE;
+}
+
+void radv_CmdSetStencilOpEXT(
+ VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask,
+ VkStencilOp failOp,
+ VkStencilOp passOp,
+ VkStencilOp depthFailOp,
+ VkCompareOp compareOp)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+ bool front_same =
+ state->dynamic.stencil_op.front.fail_op == failOp &&
+ state->dynamic.stencil_op.front.pass_op == passOp &&
+ state->dynamic.stencil_op.front.depth_fail_op == depthFailOp &&
+ state->dynamic.stencil_op.front.compare_op == compareOp;
+ bool back_same =
+ state->dynamic.stencil_op.back.fail_op == failOp &&
+ state->dynamic.stencil_op.back.pass_op == passOp &&
+ state->dynamic.stencil_op.back.depth_fail_op == depthFailOp &&
+ state->dynamic.stencil_op.back.compare_op == compareOp;
+
+ if ((!(faceMask & VK_STENCIL_FACE_FRONT_BIT) || front_same) &&
+ (!(faceMask & VK_STENCIL_FACE_BACK_BIT) || back_same))
+ return;
+
+ if (faceMask & VK_STENCIL_FACE_FRONT_BIT) {
+ state->dynamic.stencil_op.front.fail_op = failOp;
+ state->dynamic.stencil_op.front.pass_op = passOp;
+ state->dynamic.stencil_op.front.depth_fail_op = depthFailOp;
+ state->dynamic.stencil_op.front.compare_op = compareOp;
+ }
+
+ if (faceMask & VK_STENCIL_FACE_BACK_BIT) {
+ state->dynamic.stencil_op.back.fail_op = failOp;
+ state->dynamic.stencil_op.back.pass_op = passOp;
+ state->dynamic.stencil_op.back.depth_fail_op = depthFailOp;
+ state->dynamic.stencil_op.back.compare_op = compareOp;
+ }
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_STENCIL_OP;
+}
+
void radv_CmdExecuteCommands(
VkCommandBuffer commandBuffer,
uint32_t commandBufferCount,
list_for_each_entry_safe(struct radv_cmd_buffer, cmd_buffer,
&pool->cmd_buffers, pool_link) {
- radv_cmd_buffer_destroy(cmd_buffer);
+ radv_destroy_cmd_buffer(cmd_buffer);
}
list_for_each_entry_safe(struct radv_cmd_buffer, cmd_buffer,
&pool->free_cmd_buffers, pool_link) {
- radv_cmd_buffer_destroy(cmd_buffer);
+ radv_destroy_cmd_buffer(cmd_buffer);
}
vk_object_base_finish(&pool->base);
list_for_each_entry_safe(struct radv_cmd_buffer, cmd_buffer,
&pool->free_cmd_buffers, pool_link) {
- radv_cmd_buffer_destroy(cmd_buffer);
+ radv_destroy_cmd_buffer(cmd_buffer);
}
}