unreachable("unexpected wrap");
}
+static VkCompareOp
+compare_op(enum pipe_compare_func op)
+{
+ switch (op) {
+ case PIPE_FUNC_NEVER: return VK_COMPARE_OP_NEVER;
+ case PIPE_FUNC_LESS: return VK_COMPARE_OP_LESS;
+ case PIPE_FUNC_EQUAL: return VK_COMPARE_OP_EQUAL;
+ case PIPE_FUNC_LEQUAL: return VK_COMPARE_OP_LESS_OR_EQUAL;
+ case PIPE_FUNC_GREATER: return VK_COMPARE_OP_GREATER;
+ case PIPE_FUNC_NOTEQUAL: return VK_COMPARE_OP_NOT_EQUAL;
+ case PIPE_FUNC_GEQUAL: return VK_COMPARE_OP_GREATER_OR_EQUAL;
+ case PIPE_FUNC_ALWAYS: return VK_COMPARE_OP_ALWAYS;
+ }
+ unreachable("unexpected compare");
+}
+
static void *
zink_create_sampler_state(struct pipe_context *pctx,
const struct pipe_sampler_state *state)
sci.addressModeV = sampler_address_mode(state->wrap_t);
sci.addressModeW = sampler_address_mode(state->wrap_r);
sci.mipLodBias = state->lod_bias;
- sci.compareOp = VK_COMPARE_OP_NEVER; // TODO
+
+ if (state->compare_mode == PIPE_TEX_COMPARE_NONE)
+ sci.compareOp = VK_COMPARE_OP_NEVER;
+ else
+ sci.compareOp = compare_op(state->compare_func);
+
sci.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // TODO
sci.unnormalizedCoordinates = !state->normalized_coords;
memcpy(ctx->blend_constants, color->color, sizeof(float) * 4);
}
+static void
+zink_set_sample_mask(struct pipe_context *pctx, unsigned sample_mask)
+{
+ struct zink_context *ctx = zink_context(pctx);
+ ctx->gfx_pipeline_state.sample_mask = sample_mask;
+}
+
static VkAccessFlags
access_flags(VkImageLayout layout)
{
if (ctx->num_scissors) {
for (unsigned i = 0 ; i < ctx->num_scissors; ++i) {
rects[i].rect = ctx->scissors[i];
+ rects[i].rect.extent.width = MIN2(rects[i].rect.extent.width,
+ fb->width);
+ rects[i].rect.extent.height = MIN2(rects[i].rect.extent.height,
+ fb->height);
rects[i].baseArrayLayer = 0;
rects[i].layerCount = num_layers;
}
num_rects = 1;
}
- if (!batch->rp)
- zink_begin_render_pass(ctx, batch);
-
vkCmdClearAttachments(batch->cmdbuf,
num_attachments, attachments,
num_rects, rects);
}
static VkDescriptorSet
-allocate_descriptor_set(struct zink_context *ctx, VkDescriptorSetLayout dsl)
+allocate_descriptor_set(struct zink_screen *screen,
+ struct zink_batch *batch,
+ struct zink_gfx_program *prog)
{
- struct zink_screen *screen = zink_screen(ctx->base.screen);
+ assert(batch->descs_left >= prog->num_descriptors);
VkDescriptorSetAllocateInfo dsai;
memset((void *)&dsai, 0, sizeof(dsai));
dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
dsai.pNext = NULL;
- dsai.descriptorPool = ctx->descpool;
+ dsai.descriptorPool = batch->descpool;
dsai.descriptorSetCount = 1;
- dsai.pSetLayouts = &dsl;
+ dsai.pSetLayouts = &prog->dsl;
VkDescriptorSet desc_set;
if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
-
- /* if we run out of descriptor sets we either need to create a bunch
- * more... or flush and wait. For simplicity, let's flush for now.
- */
- struct pipe_fence_handle *fence = NULL;
- ctx->base.flush(&ctx->base, &fence, 0);
- ctx->base.screen->fence_finish(ctx->base.screen, &ctx->base, fence,
- PIPE_TIMEOUT_INFINITE);
-
- if (vkResetDescriptorPool(screen->dev, ctx->descpool, 0) != VK_SUCCESS) {
- fprintf(stderr, "vkResetDescriptorPool failed\n");
- return VK_NULL_HANDLE;
- }
- if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
- fprintf(stderr, "vkAllocateDescriptorSets failed\n");
- return VK_NULL_HANDLE;
- }
+ debug_printf("ZINK: failed to allocate descriptor set :/");
+ return VK_NULL_HANDLE;
}
+ batch->descs_left -= prog->num_descriptors;
return desc_set;
}
VK_IMAGE_LAYOUT_GENERAL);
}
- VkDescriptorSet desc_set = allocate_descriptor_set(ctx, gfx_program->dsl);
-
batch = zink_batch_rp(ctx);
+ if (batch->descs_left < gfx_program->num_descriptors) {
+ flush_batch(ctx);
+ batch = zink_batch_rp(ctx);
+ assert(batch->descs_left >= gfx_program->num_descriptors);
+ }
+
+ VkDescriptorSet desc_set = allocate_descriptor_set(screen, batch,
+ gfx_program);
+ assert(desc_set != VK_NULL_HANDLE);
+
for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
struct zink_shader *shader = ctx->gfx_stages[i];
if (!shader)
ctx->base.set_active_query_state = zink_set_active_query_state;
ctx->base.set_blend_color = zink_set_blend_color;
+ ctx->base.set_sample_mask = zink_set_sample_mask;
+
ctx->base.clear = zink_clear;
ctx->base.draw_vbo = zink_draw_vbo;
ctx->base.flush = zink_flush;
cbai.commandPool = ctx->cmdpool;
cbai.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
cbai.commandBufferCount = 1;
+
+ VkDescriptorPoolSize sizes[] = {
+ {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, ZINK_BATCH_DESC_SIZE}
+ };
+ VkDescriptorPoolCreateInfo dpci = {};
+ dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+ dpci.pPoolSizes = sizes;
+ dpci.poolSizeCount = ARRAY_SIZE(sizes);
+ dpci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
+ dpci.maxSets = ZINK_BATCH_DESC_SIZE;
+
for (int i = 0; i < ARRAY_SIZE(ctx->batches); ++i) {
if (vkAllocateCommandBuffers(screen->dev, &cbai, &ctx->batches[i].cmdbuf) != VK_SUCCESS)
goto fail;
goto fail;
util_dynarray_init(&ctx->batches[i].zombie_samplers, NULL);
- }
- VkDescriptorPoolSize sizes[] = {
- {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1000}
- };
- VkDescriptorPoolCreateInfo dpci = {};
- dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
- dpci.pPoolSizes = sizes;
- dpci.poolSizeCount = ARRAY_SIZE(sizes);
- dpci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
- dpci.maxSets = 1000;
-
- if(vkCreateDescriptorPool(screen->dev, &dpci, 0, &ctx->descpool) != VK_SUCCESS)
- goto fail;
+ if (vkCreateDescriptorPool(screen->dev, &dpci, 0,
+ &ctx->batches[i].descpool) != VK_SUCCESS)
+ goto fail;
+ }
vkGetDeviceQueue(screen->dev, screen->gfx_queue, 0, &ctx->queue);