anv: move to using vk_alloc helpers.
[mesa.git] / src / intel / vulkan / anv_cmd_buffer.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "anv_private.h"
31
32 #include "vk_format_info.h"
33
34 /** \file anv_cmd_buffer.c
35 *
36 * This file contains all of the stuff for emitting commands into a command
37 * buffer. This includes implementations of most of the vkCmd*
38 * entrypoints. This file is concerned entirely with state emission and
39 * not with the command buffer data structure itself. As far as this file
40 * is concerned, most of anv_cmd_buffer is magic.
41 */
42
43 /* TODO: These are taken from GLES. We should check the Vulkan spec */
44 const struct anv_dynamic_state default_dynamic_state = {
45 .viewport = {
46 .count = 0,
47 },
48 .scissor = {
49 .count = 0,
50 },
51 .line_width = 1.0f,
52 .depth_bias = {
53 .bias = 0.0f,
54 .clamp = 0.0f,
55 .slope = 0.0f,
56 },
57 .blend_constants = { 0.0f, 0.0f, 0.0f, 0.0f },
58 .depth_bounds = {
59 .min = 0.0f,
60 .max = 1.0f,
61 },
62 .stencil_compare_mask = {
63 .front = ~0u,
64 .back = ~0u,
65 },
66 .stencil_write_mask = {
67 .front = ~0u,
68 .back = ~0u,
69 },
70 .stencil_reference = {
71 .front = 0u,
72 .back = 0u,
73 },
74 };
75
76 void
77 anv_dynamic_state_copy(struct anv_dynamic_state *dest,
78 const struct anv_dynamic_state *src,
79 uint32_t copy_mask)
80 {
81 if (copy_mask & (1 << VK_DYNAMIC_STATE_VIEWPORT)) {
82 dest->viewport.count = src->viewport.count;
83 typed_memcpy(dest->viewport.viewports, src->viewport.viewports,
84 src->viewport.count);
85 }
86
87 if (copy_mask & (1 << VK_DYNAMIC_STATE_SCISSOR)) {
88 dest->scissor.count = src->scissor.count;
89 typed_memcpy(dest->scissor.scissors, src->scissor.scissors,
90 src->scissor.count);
91 }
92
93 if (copy_mask & (1 << VK_DYNAMIC_STATE_LINE_WIDTH))
94 dest->line_width = src->line_width;
95
96 if (copy_mask & (1 << VK_DYNAMIC_STATE_DEPTH_BIAS))
97 dest->depth_bias = src->depth_bias;
98
99 if (copy_mask & (1 << VK_DYNAMIC_STATE_BLEND_CONSTANTS))
100 typed_memcpy(dest->blend_constants, src->blend_constants, 4);
101
102 if (copy_mask & (1 << VK_DYNAMIC_STATE_DEPTH_BOUNDS))
103 dest->depth_bounds = src->depth_bounds;
104
105 if (copy_mask & (1 << VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK))
106 dest->stencil_compare_mask = src->stencil_compare_mask;
107
108 if (copy_mask & (1 << VK_DYNAMIC_STATE_STENCIL_WRITE_MASK))
109 dest->stencil_write_mask = src->stencil_write_mask;
110
111 if (copy_mask & (1 << VK_DYNAMIC_STATE_STENCIL_REFERENCE))
112 dest->stencil_reference = src->stencil_reference;
113 }
114
115 static void
116 anv_cmd_state_reset(struct anv_cmd_buffer *cmd_buffer)
117 {
118 struct anv_cmd_state *state = &cmd_buffer->state;
119
120 memset(&state->descriptors, 0, sizeof(state->descriptors));
121 memset(&state->push_constants, 0, sizeof(state->push_constants));
122 memset(state->binding_tables, 0, sizeof(state->binding_tables));
123 memset(state->samplers, 0, sizeof(state->samplers));
124
125 /* 0 isn't a valid config. This ensures that we always configure L3$. */
126 cmd_buffer->state.current_l3_config = 0;
127
128 state->dirty = 0;
129 state->vb_dirty = 0;
130 state->pending_pipe_bits = 0;
131 state->descriptors_dirty = 0;
132 state->push_constants_dirty = 0;
133 state->pipeline = NULL;
134 state->push_constant_stages = 0;
135 state->restart_index = UINT32_MAX;
136 state->dynamic = default_dynamic_state;
137 state->need_query_wa = true;
138
139 if (state->attachments != NULL) {
140 vk_free(&cmd_buffer->pool->alloc, state->attachments);
141 state->attachments = NULL;
142 }
143
144 state->gen7.index_buffer = NULL;
145 }
146
147 /**
148 * Setup anv_cmd_state::attachments for vkCmdBeginRenderPass.
149 */
150 void
151 anv_cmd_state_setup_attachments(struct anv_cmd_buffer *cmd_buffer,
152 const VkRenderPassBeginInfo *info)
153 {
154 struct anv_cmd_state *state = &cmd_buffer->state;
155 ANV_FROM_HANDLE(anv_render_pass, pass, info->renderPass);
156
157 vk_free(&cmd_buffer->pool->alloc, state->attachments);
158
159 if (pass->attachment_count == 0) {
160 state->attachments = NULL;
161 return;
162 }
163
164 state->attachments = vk_alloc(&cmd_buffer->pool->alloc,
165 pass->attachment_count *
166 sizeof(state->attachments[0]),
167 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
168 if (state->attachments == NULL) {
169 /* FIXME: Propagate VK_ERROR_OUT_OF_HOST_MEMORY to vkEndCommandBuffer */
170 abort();
171 }
172
173 for (uint32_t i = 0; i < pass->attachment_count; ++i) {
174 struct anv_render_pass_attachment *att = &pass->attachments[i];
175 VkImageAspectFlags att_aspects = vk_format_aspects(att->format);
176 VkImageAspectFlags clear_aspects = 0;
177
178 if (att_aspects == VK_IMAGE_ASPECT_COLOR_BIT) {
179 /* color attachment */
180 if (att->load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
181 clear_aspects |= VK_IMAGE_ASPECT_COLOR_BIT;
182 }
183 } else {
184 /* depthstencil attachment */
185 if ((att_aspects & VK_IMAGE_ASPECT_DEPTH_BIT) &&
186 att->load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
187 clear_aspects |= VK_IMAGE_ASPECT_DEPTH_BIT;
188 }
189 if ((att_aspects & VK_IMAGE_ASPECT_STENCIL_BIT) &&
190 att->stencil_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR) {
191 clear_aspects |= VK_IMAGE_ASPECT_STENCIL_BIT;
192 }
193 }
194
195 state->attachments[i].pending_clear_aspects = clear_aspects;
196 if (clear_aspects) {
197 assert(info->clearValueCount > i);
198 state->attachments[i].clear_value = info->pClearValues[i];
199 }
200 }
201 }
202
203 VkResult
204 anv_cmd_buffer_ensure_push_constants_size(struct anv_cmd_buffer *cmd_buffer,
205 gl_shader_stage stage, uint32_t size)
206 {
207 struct anv_push_constants **ptr = &cmd_buffer->state.push_constants[stage];
208
209 if (*ptr == NULL) {
210 *ptr = vk_alloc(&cmd_buffer->pool->alloc, size, 8,
211 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
212 if (*ptr == NULL)
213 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
214 } else if ((*ptr)->size < size) {
215 *ptr = vk_realloc(&cmd_buffer->pool->alloc, *ptr, size, 8,
216 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
217 if (*ptr == NULL)
218 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
219 }
220 (*ptr)->size = size;
221
222 return VK_SUCCESS;
223 }
224
225 static VkResult anv_create_cmd_buffer(
226 struct anv_device * device,
227 struct anv_cmd_pool * pool,
228 VkCommandBufferLevel level,
229 VkCommandBuffer* pCommandBuffer)
230 {
231 struct anv_cmd_buffer *cmd_buffer;
232 VkResult result;
233
234 cmd_buffer = vk_alloc(&pool->alloc, sizeof(*cmd_buffer), 8,
235 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
236 if (cmd_buffer == NULL)
237 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
238
239 cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
240 cmd_buffer->device = device;
241 cmd_buffer->pool = pool;
242 cmd_buffer->level = level;
243 cmd_buffer->state.attachments = NULL;
244
245 result = anv_cmd_buffer_init_batch_bo_chain(cmd_buffer);
246 if (result != VK_SUCCESS)
247 goto fail;
248
249 anv_state_stream_init(&cmd_buffer->surface_state_stream,
250 &device->surface_state_block_pool);
251 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
252 &device->dynamic_state_block_pool);
253
254 if (pool) {
255 list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers);
256 } else {
257 /* Init the pool_link so we can safefly call list_del when we destroy
258 * the command buffer
259 */
260 list_inithead(&cmd_buffer->pool_link);
261 }
262
263 *pCommandBuffer = anv_cmd_buffer_to_handle(cmd_buffer);
264
265 return VK_SUCCESS;
266
267 fail:
268 vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
269
270 return result;
271 }
272
273 VkResult anv_AllocateCommandBuffers(
274 VkDevice _device,
275 const VkCommandBufferAllocateInfo* pAllocateInfo,
276 VkCommandBuffer* pCommandBuffers)
277 {
278 ANV_FROM_HANDLE(anv_device, device, _device);
279 ANV_FROM_HANDLE(anv_cmd_pool, pool, pAllocateInfo->commandPool);
280
281 VkResult result = VK_SUCCESS;
282 uint32_t i;
283
284 for (i = 0; i < pAllocateInfo->commandBufferCount; i++) {
285 result = anv_create_cmd_buffer(device, pool, pAllocateInfo->level,
286 &pCommandBuffers[i]);
287 if (result != VK_SUCCESS)
288 break;
289 }
290
291 if (result != VK_SUCCESS)
292 anv_FreeCommandBuffers(_device, pAllocateInfo->commandPool,
293 i, pCommandBuffers);
294
295 return result;
296 }
297
298 static void
299 anv_cmd_buffer_destroy(struct anv_cmd_buffer *cmd_buffer)
300 {
301 list_del(&cmd_buffer->pool_link);
302
303 anv_cmd_buffer_fini_batch_bo_chain(cmd_buffer);
304
305 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
306 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
307
308 vk_free(&cmd_buffer->pool->alloc, cmd_buffer->state.attachments);
309 vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
310 }
311
312 void anv_FreeCommandBuffers(
313 VkDevice device,
314 VkCommandPool commandPool,
315 uint32_t commandBufferCount,
316 const VkCommandBuffer* pCommandBuffers)
317 {
318 for (uint32_t i = 0; i < commandBufferCount; i++) {
319 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, pCommandBuffers[i]);
320
321 anv_cmd_buffer_destroy(cmd_buffer);
322 }
323 }
324
325 VkResult
326 anv_cmd_buffer_reset(struct anv_cmd_buffer *cmd_buffer)
327 {
328 cmd_buffer->usage_flags = 0;
329 cmd_buffer->state.current_pipeline = UINT32_MAX;
330 anv_cmd_buffer_reset_batch_bo_chain(cmd_buffer);
331 anv_cmd_state_reset(cmd_buffer);
332
333 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
334 anv_state_stream_init(&cmd_buffer->surface_state_stream,
335 &cmd_buffer->device->surface_state_block_pool);
336
337 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
338 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
339 &cmd_buffer->device->dynamic_state_block_pool);
340 return VK_SUCCESS;
341 }
342
343 VkResult anv_ResetCommandBuffer(
344 VkCommandBuffer commandBuffer,
345 VkCommandBufferResetFlags flags)
346 {
347 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
348 return anv_cmd_buffer_reset(cmd_buffer);
349 }
350
351 void anv_CmdBindPipeline(
352 VkCommandBuffer commandBuffer,
353 VkPipelineBindPoint pipelineBindPoint,
354 VkPipeline _pipeline)
355 {
356 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
357 ANV_FROM_HANDLE(anv_pipeline, pipeline, _pipeline);
358
359 switch (pipelineBindPoint) {
360 case VK_PIPELINE_BIND_POINT_COMPUTE:
361 cmd_buffer->state.compute_pipeline = pipeline;
362 cmd_buffer->state.compute_dirty |= ANV_CMD_DIRTY_PIPELINE;
363 cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
364 cmd_buffer->state.descriptors_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
365 break;
366
367 case VK_PIPELINE_BIND_POINT_GRAPHICS:
368 cmd_buffer->state.pipeline = pipeline;
369 cmd_buffer->state.vb_dirty |= pipeline->vb_used;
370 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_PIPELINE;
371 cmd_buffer->state.push_constants_dirty |= pipeline->active_stages;
372 cmd_buffer->state.descriptors_dirty |= pipeline->active_stages;
373
374 /* Apply the dynamic state from the pipeline */
375 cmd_buffer->state.dirty |= pipeline->dynamic_state_mask;
376 anv_dynamic_state_copy(&cmd_buffer->state.dynamic,
377 &pipeline->dynamic_state,
378 pipeline->dynamic_state_mask);
379 break;
380
381 default:
382 assert(!"invalid bind point");
383 break;
384 }
385 }
386
387 void anv_CmdSetViewport(
388 VkCommandBuffer commandBuffer,
389 uint32_t firstViewport,
390 uint32_t viewportCount,
391 const VkViewport* pViewports)
392 {
393 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
394
395 const uint32_t total_count = firstViewport + viewportCount;
396 if (cmd_buffer->state.dynamic.viewport.count < total_count)
397 cmd_buffer->state.dynamic.viewport.count = total_count;
398
399 memcpy(cmd_buffer->state.dynamic.viewport.viewports + firstViewport,
400 pViewports, viewportCount * sizeof(*pViewports));
401
402 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_VIEWPORT;
403 }
404
405 void anv_CmdSetScissor(
406 VkCommandBuffer commandBuffer,
407 uint32_t firstScissor,
408 uint32_t scissorCount,
409 const VkRect2D* pScissors)
410 {
411 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
412
413 const uint32_t total_count = firstScissor + scissorCount;
414 if (cmd_buffer->state.dynamic.scissor.count < total_count)
415 cmd_buffer->state.dynamic.scissor.count = total_count;
416
417 memcpy(cmd_buffer->state.dynamic.scissor.scissors + firstScissor,
418 pScissors, scissorCount * sizeof(*pScissors));
419
420 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_SCISSOR;
421 }
422
423 void anv_CmdSetLineWidth(
424 VkCommandBuffer commandBuffer,
425 float lineWidth)
426 {
427 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
428
429 cmd_buffer->state.dynamic.line_width = lineWidth;
430 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH;
431 }
432
433 void anv_CmdSetDepthBias(
434 VkCommandBuffer commandBuffer,
435 float depthBiasConstantFactor,
436 float depthBiasClamp,
437 float depthBiasSlopeFactor)
438 {
439 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
440
441 cmd_buffer->state.dynamic.depth_bias.bias = depthBiasConstantFactor;
442 cmd_buffer->state.dynamic.depth_bias.clamp = depthBiasClamp;
443 cmd_buffer->state.dynamic.depth_bias.slope = depthBiasSlopeFactor;
444
445 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS;
446 }
447
448 void anv_CmdSetBlendConstants(
449 VkCommandBuffer commandBuffer,
450 const float blendConstants[4])
451 {
452 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
453
454 memcpy(cmd_buffer->state.dynamic.blend_constants,
455 blendConstants, sizeof(float) * 4);
456
457 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS;
458 }
459
460 void anv_CmdSetDepthBounds(
461 VkCommandBuffer commandBuffer,
462 float minDepthBounds,
463 float maxDepthBounds)
464 {
465 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
466
467 cmd_buffer->state.dynamic.depth_bounds.min = minDepthBounds;
468 cmd_buffer->state.dynamic.depth_bounds.max = maxDepthBounds;
469
470 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS;
471 }
472
473 void anv_CmdSetStencilCompareMask(
474 VkCommandBuffer commandBuffer,
475 VkStencilFaceFlags faceMask,
476 uint32_t compareMask)
477 {
478 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
479
480 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
481 cmd_buffer->state.dynamic.stencil_compare_mask.front = compareMask;
482 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
483 cmd_buffer->state.dynamic.stencil_compare_mask.back = compareMask;
484
485 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK;
486 }
487
488 void anv_CmdSetStencilWriteMask(
489 VkCommandBuffer commandBuffer,
490 VkStencilFaceFlags faceMask,
491 uint32_t writeMask)
492 {
493 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
494
495 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
496 cmd_buffer->state.dynamic.stencil_write_mask.front = writeMask;
497 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
498 cmd_buffer->state.dynamic.stencil_write_mask.back = writeMask;
499
500 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK;
501 }
502
503 void anv_CmdSetStencilReference(
504 VkCommandBuffer commandBuffer,
505 VkStencilFaceFlags faceMask,
506 uint32_t reference)
507 {
508 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
509
510 if (faceMask & VK_STENCIL_FACE_FRONT_BIT)
511 cmd_buffer->state.dynamic.stencil_reference.front = reference;
512 if (faceMask & VK_STENCIL_FACE_BACK_BIT)
513 cmd_buffer->state.dynamic.stencil_reference.back = reference;
514
515 cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE;
516 }
517
518 void anv_CmdBindDescriptorSets(
519 VkCommandBuffer commandBuffer,
520 VkPipelineBindPoint pipelineBindPoint,
521 VkPipelineLayout _layout,
522 uint32_t firstSet,
523 uint32_t descriptorSetCount,
524 const VkDescriptorSet* pDescriptorSets,
525 uint32_t dynamicOffsetCount,
526 const uint32_t* pDynamicOffsets)
527 {
528 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
529 ANV_FROM_HANDLE(anv_pipeline_layout, layout, _layout);
530 struct anv_descriptor_set_layout *set_layout;
531
532 assert(firstSet + descriptorSetCount < MAX_SETS);
533
534 for (uint32_t i = 0; i < descriptorSetCount; i++) {
535 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
536 set_layout = layout->set[firstSet + i].layout;
537
538 if (cmd_buffer->state.descriptors[firstSet + i] != set) {
539 cmd_buffer->state.descriptors[firstSet + i] = set;
540 cmd_buffer->state.descriptors_dirty |= set_layout->shader_stages;
541 }
542
543 if (set_layout->dynamic_offset_count > 0) {
544 anv_foreach_stage(s, set_layout->shader_stages) {
545 anv_cmd_buffer_ensure_push_constant_field(cmd_buffer, s, dynamic);
546
547 struct anv_push_constants *push =
548 cmd_buffer->state.push_constants[s];
549
550 unsigned d = layout->set[firstSet + i].dynamic_offset_start;
551 const uint32_t *offsets = pDynamicOffsets;
552 struct anv_descriptor *desc = set->descriptors;
553
554 for (unsigned b = 0; b < set_layout->binding_count; b++) {
555 if (set_layout->binding[b].dynamic_offset_index < 0)
556 continue;
557
558 unsigned array_size = set_layout->binding[b].array_size;
559 for (unsigned j = 0; j < array_size; j++) {
560 push->dynamic[d].offset = *(offsets++);
561 push->dynamic[d].range = (desc->buffer_view) ?
562 desc->buffer_view->range : 0;
563 desc++;
564 d++;
565 }
566 }
567 }
568 cmd_buffer->state.push_constants_dirty |= set_layout->shader_stages;
569 }
570 }
571 }
572
573 void anv_CmdBindVertexBuffers(
574 VkCommandBuffer commandBuffer,
575 uint32_t firstBinding,
576 uint32_t bindingCount,
577 const VkBuffer* pBuffers,
578 const VkDeviceSize* pOffsets)
579 {
580 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
581 struct anv_vertex_binding *vb = cmd_buffer->state.vertex_bindings;
582
583 /* We have to defer setting up vertex buffer since we need the buffer
584 * stride from the pipeline. */
585
586 assert(firstBinding + bindingCount < MAX_VBS);
587 for (uint32_t i = 0; i < bindingCount; i++) {
588 vb[firstBinding + i].buffer = anv_buffer_from_handle(pBuffers[i]);
589 vb[firstBinding + i].offset = pOffsets[i];
590 cmd_buffer->state.vb_dirty |= 1 << (firstBinding + i);
591 }
592 }
593
594 enum isl_format
595 anv_isl_format_for_descriptor_type(VkDescriptorType type)
596 {
597 switch (type) {
598 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
599 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
600 return ISL_FORMAT_R32G32B32A32_FLOAT;
601
602 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
603 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
604 return ISL_FORMAT_RAW;
605
606 default:
607 unreachable("Invalid descriptor type");
608 }
609 }
610
611 struct anv_state
612 anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer *cmd_buffer,
613 const void *data, uint32_t size, uint32_t alignment)
614 {
615 struct anv_state state;
616
617 state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, size, alignment);
618 memcpy(state.map, data, size);
619
620 if (!cmd_buffer->device->info.has_llc)
621 anv_state_clflush(state);
622
623 VG(VALGRIND_CHECK_MEM_IS_DEFINED(state.map, size));
624
625 return state;
626 }
627
628 struct anv_state
629 anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer *cmd_buffer,
630 uint32_t *a, uint32_t *b,
631 uint32_t dwords, uint32_t alignment)
632 {
633 struct anv_state state;
634 uint32_t *p;
635
636 state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
637 dwords * 4, alignment);
638 p = state.map;
639 for (uint32_t i = 0; i < dwords; i++)
640 p[i] = a[i] | b[i];
641
642 if (!cmd_buffer->device->info.has_llc)
643 anv_state_clflush(state);
644
645 VG(VALGRIND_CHECK_MEM_IS_DEFINED(p, dwords * 4));
646
647 return state;
648 }
649
650 struct anv_state
651 anv_cmd_buffer_push_constants(struct anv_cmd_buffer *cmd_buffer,
652 gl_shader_stage stage)
653 {
654 /* If we don't have this stage, bail. */
655 if (!anv_pipeline_has_stage(cmd_buffer->state.pipeline, stage))
656 return (struct anv_state) { .offset = 0 };
657
658 struct anv_push_constants *data =
659 cmd_buffer->state.push_constants[stage];
660 const struct brw_stage_prog_data *prog_data =
661 anv_shader_bin_get_prog_data(cmd_buffer->state.pipeline->shaders[stage]);
662
663 /* If we don't actually have any push constants, bail. */
664 if (data == NULL || prog_data == NULL || prog_data->nr_params == 0)
665 return (struct anv_state) { .offset = 0 };
666
667 struct anv_state state =
668 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
669 prog_data->nr_params * sizeof(float),
670 32 /* bottom 5 bits MBZ */);
671
672 /* Walk through the param array and fill the buffer with data */
673 uint32_t *u32_map = state.map;
674 for (unsigned i = 0; i < prog_data->nr_params; i++) {
675 uint32_t offset = (uintptr_t)prog_data->param[i];
676 u32_map[i] = *(uint32_t *)((uint8_t *)data + offset);
677 }
678
679 if (!cmd_buffer->device->info.has_llc)
680 anv_state_clflush(state);
681
682 return state;
683 }
684
685 struct anv_state
686 anv_cmd_buffer_cs_push_constants(struct anv_cmd_buffer *cmd_buffer)
687 {
688 struct anv_push_constants *data =
689 cmd_buffer->state.push_constants[MESA_SHADER_COMPUTE];
690 struct anv_pipeline *pipeline = cmd_buffer->state.compute_pipeline;
691 const struct brw_cs_prog_data *cs_prog_data = get_cs_prog_data(pipeline);
692 const struct brw_stage_prog_data *prog_data = &cs_prog_data->base;
693
694 /* If we don't actually have any push constants, bail. */
695 if (cs_prog_data->push.total.size == 0)
696 return (struct anv_state) { .offset = 0 };
697
698 const unsigned push_constant_alignment =
699 cmd_buffer->device->info.gen < 8 ? 32 : 64;
700 const unsigned aligned_total_push_constants_size =
701 ALIGN(cs_prog_data->push.total.size, push_constant_alignment);
702 struct anv_state state =
703 anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
704 aligned_total_push_constants_size,
705 push_constant_alignment);
706
707 /* Walk through the param array and fill the buffer with data */
708 uint32_t *u32_map = state.map;
709
710 if (cs_prog_data->push.cross_thread.size > 0) {
711 assert(cs_prog_data->thread_local_id_index < 0 ||
712 cs_prog_data->thread_local_id_index >=
713 cs_prog_data->push.cross_thread.dwords);
714 for (unsigned i = 0;
715 i < cs_prog_data->push.cross_thread.dwords;
716 i++) {
717 uint32_t offset = (uintptr_t)prog_data->param[i];
718 u32_map[i] = *(uint32_t *)((uint8_t *)data + offset);
719 }
720 }
721
722 if (cs_prog_data->push.per_thread.size > 0) {
723 for (unsigned t = 0; t < cs_prog_data->threads; t++) {
724 unsigned dst =
725 8 * (cs_prog_data->push.per_thread.regs * t +
726 cs_prog_data->push.cross_thread.regs);
727 unsigned src = cs_prog_data->push.cross_thread.dwords;
728 for ( ; src < prog_data->nr_params; src++, dst++) {
729 if (src != cs_prog_data->thread_local_id_index) {
730 uint32_t offset = (uintptr_t)prog_data->param[src];
731 u32_map[dst] = *(uint32_t *)((uint8_t *)data + offset);
732 } else {
733 u32_map[dst] = t * cs_prog_data->simd_size;
734 }
735 }
736 }
737 }
738
739 if (!cmd_buffer->device->info.has_llc)
740 anv_state_clflush(state);
741
742 return state;
743 }
744
745 void anv_CmdPushConstants(
746 VkCommandBuffer commandBuffer,
747 VkPipelineLayout layout,
748 VkShaderStageFlags stageFlags,
749 uint32_t offset,
750 uint32_t size,
751 const void* pValues)
752 {
753 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer);
754
755 anv_foreach_stage(stage, stageFlags) {
756 anv_cmd_buffer_ensure_push_constant_field(cmd_buffer, stage, client_data);
757
758 memcpy(cmd_buffer->state.push_constants[stage]->client_data + offset,
759 pValues, size);
760 }
761
762 cmd_buffer->state.push_constants_dirty |= stageFlags;
763 }
764
765 VkResult anv_CreateCommandPool(
766 VkDevice _device,
767 const VkCommandPoolCreateInfo* pCreateInfo,
768 const VkAllocationCallbacks* pAllocator,
769 VkCommandPool* pCmdPool)
770 {
771 ANV_FROM_HANDLE(anv_device, device, _device);
772 struct anv_cmd_pool *pool;
773
774 pool = vk_alloc2(&device->alloc, pAllocator, sizeof(*pool), 8,
775 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
776 if (pool == NULL)
777 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
778
779 if (pAllocator)
780 pool->alloc = *pAllocator;
781 else
782 pool->alloc = device->alloc;
783
784 list_inithead(&pool->cmd_buffers);
785
786 *pCmdPool = anv_cmd_pool_to_handle(pool);
787
788 return VK_SUCCESS;
789 }
790
791 void anv_DestroyCommandPool(
792 VkDevice _device,
793 VkCommandPool commandPool,
794 const VkAllocationCallbacks* pAllocator)
795 {
796 ANV_FROM_HANDLE(anv_device, device, _device);
797 ANV_FROM_HANDLE(anv_cmd_pool, pool, commandPool);
798
799 list_for_each_entry_safe(struct anv_cmd_buffer, cmd_buffer,
800 &pool->cmd_buffers, pool_link) {
801 anv_cmd_buffer_destroy(cmd_buffer);
802 }
803
804 vk_free2(&device->alloc, pAllocator, pool);
805 }
806
807 VkResult anv_ResetCommandPool(
808 VkDevice device,
809 VkCommandPool commandPool,
810 VkCommandPoolResetFlags flags)
811 {
812 ANV_FROM_HANDLE(anv_cmd_pool, pool, commandPool);
813
814 list_for_each_entry(struct anv_cmd_buffer, cmd_buffer,
815 &pool->cmd_buffers, pool_link) {
816 anv_cmd_buffer_reset(cmd_buffer);
817 }
818
819 return VK_SUCCESS;
820 }
821
822 /**
823 * Return NULL if the current subpass has no depthstencil attachment.
824 */
825 const struct anv_image_view *
826 anv_cmd_buffer_get_depth_stencil_view(const struct anv_cmd_buffer *cmd_buffer)
827 {
828 const struct anv_subpass *subpass = cmd_buffer->state.subpass;
829 const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer;
830
831 if (subpass->depth_stencil_attachment == VK_ATTACHMENT_UNUSED)
832 return NULL;
833
834 const struct anv_image_view *iview =
835 fb->attachments[subpass->depth_stencil_attachment];
836
837 assert(iview->aspect_mask & (VK_IMAGE_ASPECT_DEPTH_BIT |
838 VK_IMAGE_ASPECT_STENCIL_BIT));
839
840 return iview;
841 }