zink: move drawing separate source
[mesa.git] / src / gallium / drivers / zink / zink_draw.c
1 #include "zink_compiler.h"
2 #include "zink_context.h"
3 #include "zink_program.h"
4 #include "zink_resource.h"
5 #include "zink_screen.h"
6 #include "zink_state.h"
7
8 #include "indices/u_primconvert.h"
9 #include "util/hash_table.h"
10 #include "util/u_debug.h"
11 #include "util/u_helpers.h"
12 #include "util/u_inlines.h"
13 #include "util/u_prim.h"
14
15 static VkDescriptorSet
16 allocate_descriptor_set(struct zink_screen *screen,
17 struct zink_batch *batch,
18 struct zink_gfx_program *prog)
19 {
20 assert(batch->descs_left >= prog->num_descriptors);
21 VkDescriptorSetAllocateInfo dsai;
22 memset((void *)&dsai, 0, sizeof(dsai));
23 dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
24 dsai.pNext = NULL;
25 dsai.descriptorPool = batch->descpool;
26 dsai.descriptorSetCount = 1;
27 dsai.pSetLayouts = &prog->dsl;
28
29 VkDescriptorSet desc_set;
30 if (vkAllocateDescriptorSets(screen->dev, &dsai, &desc_set) != VK_SUCCESS) {
31 debug_printf("ZINK: failed to allocate descriptor set :/");
32 return VK_NULL_HANDLE;
33 }
34
35 batch->descs_left -= prog->num_descriptors;
36 return desc_set;
37 }
38
39 static void
40 zink_bind_vertex_buffers(struct zink_batch *batch, struct zink_context *ctx)
41 {
42 VkBuffer buffers[PIPE_MAX_ATTRIBS];
43 VkDeviceSize buffer_offsets[PIPE_MAX_ATTRIBS];
44 const struct zink_vertex_elements_state *elems = ctx->element_state;
45 for (unsigned i = 0; i < elems->hw_state.num_bindings; i++) {
46 struct pipe_vertex_buffer *vb = ctx->buffers + ctx->element_state->binding_map[i];
47 assert(vb && vb->buffer.resource);
48 struct zink_resource *res = zink_resource(vb->buffer.resource);
49 buffers[i] = res->buffer;
50 buffer_offsets[i] = vb->buffer_offset;
51 zink_batch_reference_resoure(batch, res);
52 }
53
54 if (elems->hw_state.num_bindings > 0)
55 vkCmdBindVertexBuffers(batch->cmdbuf, 0,
56 elems->hw_state.num_bindings,
57 buffers, buffer_offsets);
58 }
59
60 static struct zink_gfx_program *
61 get_gfx_program(struct zink_context *ctx)
62 {
63 if (ctx->dirty_program) {
64 struct hash_entry *entry = _mesa_hash_table_search(ctx->program_cache,
65 ctx->gfx_stages);
66 if (!entry) {
67 struct zink_gfx_program *prog;
68 prog = zink_create_gfx_program(zink_screen(ctx->base.screen),
69 ctx->gfx_stages);
70 entry = _mesa_hash_table_insert(ctx->program_cache, prog->stages, prog);
71 if (!entry)
72 return NULL;
73 }
74 ctx->curr_program = entry->data;
75 ctx->dirty_program = false;
76 }
77
78 assert(ctx->curr_program);
79 return ctx->curr_program;
80 }
81
82 static bool
83 line_width_needed(enum pipe_prim_type reduced_prim,
84 VkPolygonMode polygon_mode)
85 {
86 switch (reduced_prim) {
87 case PIPE_PRIM_POINTS:
88 return false;
89
90 case PIPE_PRIM_LINES:
91 return true;
92
93 case PIPE_PRIM_TRIANGLES:
94 return polygon_mode == VK_POLYGON_MODE_LINE;
95
96 default:
97 unreachable("unexpected reduced prim");
98 }
99 }
100
101 void
102 zink_draw_vbo(struct pipe_context *pctx,
103 const struct pipe_draw_info *dinfo)
104 {
105 struct zink_context *ctx = zink_context(pctx);
106 struct zink_screen *screen = zink_screen(pctx->screen);
107 struct zink_rasterizer_state *rast_state = ctx->rast_state;
108
109 if (dinfo->mode >= PIPE_PRIM_QUADS ||
110 dinfo->mode == PIPE_PRIM_LINE_LOOP ||
111 dinfo->index_size == 1) {
112 if (!u_trim_pipe_prim(dinfo->mode, (unsigned *)&dinfo->count))
113 return;
114
115 util_primconvert_save_rasterizer_state(ctx->primconvert, &rast_state->base);
116 util_primconvert_draw_vbo(ctx->primconvert, dinfo);
117 return;
118 }
119
120 struct zink_gfx_program *gfx_program = get_gfx_program(ctx);
121 if (!gfx_program)
122 return;
123
124 VkPipeline pipeline = zink_get_gfx_pipeline(screen, gfx_program,
125 &ctx->gfx_pipeline_state,
126 dinfo->mode);
127
128 enum pipe_prim_type reduced_prim = u_reduced_prim(dinfo->mode);
129
130 bool depth_bias = false;
131 switch (reduced_prim) {
132 case PIPE_PRIM_POINTS:
133 depth_bias = rast_state->offset_point;
134 break;
135
136 case PIPE_PRIM_LINES:
137 depth_bias = rast_state->offset_line;
138 break;
139
140 case PIPE_PRIM_TRIANGLES:
141 depth_bias = rast_state->offset_tri;
142 break;
143
144 default:
145 unreachable("unexpected reduced prim");
146 }
147
148 unsigned index_offset = 0;
149 struct pipe_resource *index_buffer = NULL;
150 if (dinfo->index_size > 0) {
151 if (dinfo->has_user_indices) {
152 if (!util_upload_index_buffer(pctx, dinfo, &index_buffer, &index_offset)) {
153 debug_printf("util_upload_index_buffer() failed\n");
154 return;
155 }
156 } else
157 index_buffer = dinfo->index.resource;
158 }
159
160 VkWriteDescriptorSet wds[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS + PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
161 VkDescriptorBufferInfo buffer_infos[PIPE_SHADER_TYPES * PIPE_MAX_CONSTANT_BUFFERS];
162 VkDescriptorImageInfo image_infos[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
163 int num_wds = 0, num_buffer_info = 0, num_image_info = 0;
164
165 struct zink_resource *transitions[PIPE_SHADER_TYPES * PIPE_MAX_SHADER_SAMPLER_VIEWS];
166 int num_transitions = 0;
167
168 for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
169 struct zink_shader *shader = ctx->gfx_stages[i];
170 if (!shader)
171 continue;
172
173 for (int j = 0; j < shader->num_bindings; j++) {
174 int index = shader->bindings[j].index;
175 if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
176 assert(ctx->ubos[i][index].buffer_size > 0);
177 assert(ctx->ubos[i][index].buffer_size <= screen->props.limits.maxUniformBufferRange);
178 assert(ctx->ubos[i][index].buffer);
179 struct zink_resource *res = zink_resource(ctx->ubos[i][index].buffer);
180 buffer_infos[num_buffer_info].buffer = res->buffer;
181 buffer_infos[num_buffer_info].offset = ctx->ubos[i][index].buffer_offset;
182 buffer_infos[num_buffer_info].range = ctx->ubos[i][index].buffer_size;
183 wds[num_wds].pBufferInfo = buffer_infos + num_buffer_info;
184 ++num_buffer_info;
185 } else {
186 struct pipe_sampler_view *psampler_view = ctx->image_views[i][index];
187 assert(psampler_view);
188 struct zink_sampler_view *sampler_view = zink_sampler_view(psampler_view);
189
190 struct zink_resource *res = zink_resource(psampler_view->texture);
191 VkImageLayout layout = res->layout;
192 if (layout != VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL &&
193 layout != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL &&
194 layout != VK_IMAGE_LAYOUT_GENERAL) {
195 transitions[num_transitions++] = res;
196 layout = VK_IMAGE_LAYOUT_GENERAL;
197 }
198 image_infos[num_image_info].imageLayout = layout;
199 image_infos[num_image_info].imageView = sampler_view->image_view;
200 image_infos[num_image_info].sampler = ctx->samplers[i][index];
201 wds[num_wds].pImageInfo = image_infos + num_image_info;
202 ++num_image_info;
203 }
204
205 wds[num_wds].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
206 wds[num_wds].pNext = NULL;
207 wds[num_wds].dstBinding = shader->bindings[j].binding;
208 wds[num_wds].dstArrayElement = 0;
209 wds[num_wds].descriptorCount = 1;
210 wds[num_wds].descriptorType = shader->bindings[j].type;
211 ++num_wds;
212 }
213 }
214
215 struct zink_batch *batch;
216 if (num_transitions > 0) {
217 batch = zink_batch_no_rp(ctx);
218
219 for (int i = 0; i < num_transitions; ++i)
220 zink_resource_barrier(batch->cmdbuf, transitions[i],
221 transitions[i]->aspect,
222 VK_IMAGE_LAYOUT_GENERAL);
223 }
224
225 batch = zink_batch_rp(ctx);
226
227 if (batch->descs_left < gfx_program->num_descriptors) {
228 ctx->base.flush(&ctx->base, NULL, 0);
229 batch = zink_batch_rp(ctx);
230 assert(batch->descs_left >= gfx_program->num_descriptors);
231 }
232
233 VkDescriptorSet desc_set = allocate_descriptor_set(screen, batch,
234 gfx_program);
235 assert(desc_set != VK_NULL_HANDLE);
236
237 for (int i = 0; i < ARRAY_SIZE(ctx->gfx_stages); i++) {
238 struct zink_shader *shader = ctx->gfx_stages[i];
239 if (!shader)
240 continue;
241
242 for (int j = 0; j < shader->num_bindings; j++) {
243 int index = shader->bindings[j].index;
244 if (shader->bindings[j].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
245 struct zink_resource *res = zink_resource(ctx->ubos[i][index].buffer);
246 zink_batch_reference_resoure(batch, res);
247 } else {
248 struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->image_views[i][index]);
249 zink_batch_reference_sampler_view(batch, sampler_view);
250 }
251 }
252 }
253
254 vkCmdSetViewport(batch->cmdbuf, 0, ctx->num_viewports, ctx->viewports);
255 if (ctx->rast_state->base.scissor)
256 vkCmdSetScissor(batch->cmdbuf, 0, ctx->num_viewports, ctx->scissors);
257 else if (ctx->fb_state.width && ctx->fb_state.height) {
258 VkRect2D fb_scissor = {};
259 fb_scissor.extent.width = ctx->fb_state.width;
260 fb_scissor.extent.height = ctx->fb_state.height;
261 vkCmdSetScissor(batch->cmdbuf, 0, 1, &fb_scissor);
262 }
263
264 if (line_width_needed(reduced_prim, rast_state->hw_state.polygon_mode)) {
265 if (screen->feats.wideLines || ctx->line_width == 1.0f)
266 vkCmdSetLineWidth(batch->cmdbuf, ctx->line_width);
267 else
268 debug_printf("BUG: wide lines not supported, needs fallback!");
269 }
270
271 vkCmdSetStencilReference(batch->cmdbuf, VK_STENCIL_FACE_FRONT_BIT, ctx->stencil_ref.ref_value[0]);
272 vkCmdSetStencilReference(batch->cmdbuf, VK_STENCIL_FACE_BACK_BIT, ctx->stencil_ref.ref_value[1]);
273
274 if (depth_bias)
275 vkCmdSetDepthBias(batch->cmdbuf, rast_state->offset_units, rast_state->offset_clamp, rast_state->offset_scale);
276 else
277 vkCmdSetDepthBias(batch->cmdbuf, 0.0f, 0.0f, 0.0f);
278
279 if (ctx->gfx_pipeline_state.blend_state->need_blend_constants)
280 vkCmdSetBlendConstants(batch->cmdbuf, ctx->blend_constants);
281
282 if (num_wds > 0) {
283 for (int i = 0; i < num_wds; ++i)
284 wds[i].dstSet = desc_set;
285 vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);
286 }
287
288 vkCmdBindPipeline(batch->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
289 vkCmdBindDescriptorSets(batch->cmdbuf, VK_PIPELINE_BIND_POINT_GRAPHICS,
290 gfx_program->layout, 0, 1, &desc_set, 0, NULL);
291 zink_bind_vertex_buffers(batch, ctx);
292
293 if (dinfo->index_size > 0) {
294 assert(dinfo->index_size != 1);
295 VkIndexType index_type = dinfo->index_size == 2 ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
296 struct zink_resource *res = zink_resource(index_buffer);
297 vkCmdBindIndexBuffer(batch->cmdbuf, res->buffer, index_offset, index_type);
298 zink_batch_reference_resoure(batch, res);
299 vkCmdDrawIndexed(batch->cmdbuf,
300 dinfo->count, dinfo->instance_count,
301 dinfo->start, dinfo->index_bias, dinfo->start_instance);
302 } else
303 vkCmdDraw(batch->cmdbuf, dinfo->count, dinfo->instance_count, dinfo->start, dinfo->start_instance);
304
305 if (dinfo->index_size > 0 && dinfo->has_user_indices)
306 pipe_resource_reference(&index_buffer, NULL);
307 }