vk: Unharcode an argument to sizeof
[mesa.git] / src / vulkan / anv_device.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "anv_private.h"
31 #include "mesa/main/git_sha1.h"
32 #include "util/strtod.h"
33
34 static VkResult
35 anv_physical_device_init(struct anv_physical_device *device,
36 struct anv_instance *instance,
37 const char *path)
38 {
39 int fd;
40
41 fd = open(path, O_RDWR | O_CLOEXEC);
42 if (fd < 0)
43 return vk_error(VK_ERROR_UNAVAILABLE);
44
45 device->instance = instance;
46 device->path = path;
47
48 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
49 if (!device->chipset_id)
50 goto fail;
51
52 device->name = brw_get_device_name(device->chipset_id);
53 device->info = brw_get_device_info(device->chipset_id, -1);
54 if (!device->info)
55 goto fail;
56
57 if (anv_gem_get_aperture(fd, &device->aperture_size) == -1)
58 goto fail;
59
60 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
61 goto fail;
62
63 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
64 goto fail;
65
66 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
67 goto fail;
68
69 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
70 goto fail;
71
72 close(fd);
73
74 return VK_SUCCESS;
75
76 fail:
77 close(fd);
78 return vk_error(VK_ERROR_UNAVAILABLE);
79 }
80
81 static void *default_alloc(
82 void* pUserData,
83 size_t size,
84 size_t alignment,
85 VkSystemAllocType allocType)
86 {
87 return malloc(size);
88 }
89
90 static void default_free(
91 void* pUserData,
92 void* pMem)
93 {
94 free(pMem);
95 }
96
97 static const VkAllocCallbacks default_alloc_callbacks = {
98 .pUserData = NULL,
99 .pfnAlloc = default_alloc,
100 .pfnFree = default_free
101 };
102
103 VkResult anv_CreateInstance(
104 const VkInstanceCreateInfo* pCreateInfo,
105 VkInstance* pInstance)
106 {
107 struct anv_instance *instance;
108 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
109 void *user_data = NULL;
110
111 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
112
113 if (pCreateInfo->pAllocCb) {
114 alloc_callbacks = pCreateInfo->pAllocCb;
115 user_data = pCreateInfo->pAllocCb->pUserData;
116 }
117 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
118 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
119 if (!instance)
120 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
121
122 instance->pAllocUserData = alloc_callbacks->pUserData;
123 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
124 instance->pfnFree = alloc_callbacks->pfnFree;
125 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
126 instance->physicalDeviceCount = 0;
127
128 _mesa_locale_init();
129
130 VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
131
132 *pInstance = anv_instance_to_handle(instance);
133
134 return VK_SUCCESS;
135 }
136
137 VkResult anv_DestroyInstance(
138 VkInstance _instance)
139 {
140 ANV_FROM_HANDLE(anv_instance, instance, _instance);
141
142 VG(VALGRIND_DESTROY_MEMPOOL(instance));
143
144 _mesa_locale_fini();
145
146 instance->pfnFree(instance->pAllocUserData, instance);
147
148 return VK_SUCCESS;
149 }
150
151 static void *
152 anv_instance_alloc(struct anv_instance *instance, size_t size,
153 size_t alignment, VkSystemAllocType allocType)
154 {
155 void *mem = instance->pfnAlloc(instance->pAllocUserData,
156 size, alignment, allocType);
157 if (mem) {
158 VALGRIND_MEMPOOL_ALLOC(instance, mem, size);
159 VALGRIND_MAKE_MEM_UNDEFINED(mem, size);
160 }
161 return mem;
162 }
163
164 static void
165 anv_instance_free(struct anv_instance *instance, void *mem)
166 {
167 if (mem == NULL)
168 return;
169
170 VALGRIND_MEMPOOL_FREE(instance, mem);
171
172 instance->pfnFree(instance->pAllocUserData, mem);
173 }
174
175 VkResult anv_EnumeratePhysicalDevices(
176 VkInstance _instance,
177 uint32_t* pPhysicalDeviceCount,
178 VkPhysicalDevice* pPhysicalDevices)
179 {
180 ANV_FROM_HANDLE(anv_instance, instance, _instance);
181 VkResult result;
182
183 if (instance->physicalDeviceCount == 0) {
184 result = anv_physical_device_init(&instance->physicalDevice,
185 instance, "/dev/dri/renderD128");
186 if (result != VK_SUCCESS)
187 return result;
188
189 instance->physicalDeviceCount = 1;
190 }
191
192 /* pPhysicalDeviceCount is an out parameter if pPhysicalDevices is NULL;
193 * otherwise it's an inout parameter.
194 *
195 * The Vulkan spec (git aaed022) says:
196 *
197 * pPhysicalDeviceCount is a pointer to an unsigned integer variable
198 * that is initialized with the number of devices the application is
199 * prepared to receive handles to. pname:pPhysicalDevices is pointer to
200 * an array of at least this many VkPhysicalDevice handles [...].
201 *
202 * Upon success, if pPhysicalDevices is NULL, vkEnumeratePhysicalDevices
203 * overwrites the contents of the variable pointed to by
204 * pPhysicalDeviceCount with the number of physical devices in in the
205 * instance; otherwise, vkEnumeratePhysicalDevices overwrites
206 * pPhysicalDeviceCount with the number of physical handles written to
207 * pPhysicalDevices.
208 */
209 if (!pPhysicalDevices) {
210 *pPhysicalDeviceCount = instance->physicalDeviceCount;
211 } else if (*pPhysicalDeviceCount >= 1) {
212 pPhysicalDevices[0] = anv_physical_device_to_handle(&instance->physicalDevice);
213 *pPhysicalDeviceCount = 1;
214 } else {
215 *pPhysicalDeviceCount = 0;
216 }
217
218 return VK_SUCCESS;
219 }
220
221 VkResult anv_GetPhysicalDeviceFeatures(
222 VkPhysicalDevice physicalDevice,
223 VkPhysicalDeviceFeatures* pFeatures)
224 {
225 anv_finishme("Get correct values for PhysicalDeviceFeatures");
226
227 *pFeatures = (VkPhysicalDeviceFeatures) {
228 .robustBufferAccess = false,
229 .fullDrawIndexUint32 = false,
230 .imageCubeArray = false,
231 .independentBlend = false,
232 .geometryShader = true,
233 .tessellationShader = false,
234 .sampleRateShading = false,
235 .dualSourceBlend = true,
236 .logicOp = true,
237 .instancedDrawIndirect = true,
238 .depthClip = false,
239 .depthBiasClamp = false,
240 .fillModeNonSolid = true,
241 .depthBounds = false,
242 .wideLines = true,
243 .largePoints = true,
244 .textureCompressionETC2 = true,
245 .textureCompressionASTC_LDR = true,
246 .textureCompressionBC = true,
247 .pipelineStatisticsQuery = true,
248 .vertexSideEffects = false,
249 .tessellationSideEffects = false,
250 .geometrySideEffects = false,
251 .fragmentSideEffects = false,
252 .shaderTessellationPointSize = false,
253 .shaderGeometryPointSize = true,
254 .shaderTextureGatherExtended = true,
255 .shaderStorageImageExtendedFormats = false,
256 .shaderStorageImageMultisample = false,
257 .shaderStorageBufferArrayConstantIndexing = false,
258 .shaderStorageImageArrayConstantIndexing = false,
259 .shaderUniformBufferArrayDynamicIndexing = true,
260 .shaderSampledImageArrayDynamicIndexing = false,
261 .shaderStorageBufferArrayDynamicIndexing = false,
262 .shaderStorageImageArrayDynamicIndexing = false,
263 .shaderClipDistance = false,
264 .shaderCullDistance = false,
265 .shaderFloat64 = false,
266 .shaderInt64 = false,
267 .shaderFloat16 = false,
268 .shaderInt16 = false,
269 };
270
271 return VK_SUCCESS;
272 }
273
274 VkResult anv_GetPhysicalDeviceLimits(
275 VkPhysicalDevice physicalDevice,
276 VkPhysicalDeviceLimits* pLimits)
277 {
278 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
279 const struct brw_device_info *devinfo = physical_device->info;
280
281 anv_finishme("Get correct values for PhysicalDeviceLimits");
282
283 *pLimits = (VkPhysicalDeviceLimits) {
284 .maxImageDimension1D = (1 << 14),
285 .maxImageDimension2D = (1 << 14),
286 .maxImageDimension3D = (1 << 10),
287 .maxImageDimensionCube = (1 << 14),
288 .maxImageArrayLayers = (1 << 10),
289 .maxTexelBufferSize = (1 << 14),
290 .maxUniformBufferSize = UINT32_MAX,
291 .maxStorageBufferSize = UINT32_MAX,
292 .maxPushConstantsSize = 128,
293 .maxMemoryAllocationCount = UINT32_MAX,
294 .bufferImageGranularity = 64, /* A cache line */
295 .maxBoundDescriptorSets = MAX_SETS,
296 .maxDescriptorSets = UINT32_MAX,
297 .maxPerStageDescriptorSamplers = 64,
298 .maxPerStageDescriptorUniformBuffers = 64,
299 .maxPerStageDescriptorStorageBuffers = 64,
300 .maxPerStageDescriptorSampledImages = 64,
301 .maxPerStageDescriptorStorageImages = 64,
302 .maxDescriptorSetSamplers = 256,
303 .maxDescriptorSetUniformBuffers = 256,
304 .maxDescriptorSetStorageBuffers = 256,
305 .maxDescriptorSetSampledImages = 256,
306 .maxDescriptorSetStorageImages = 256,
307 .maxVertexInputAttributes = 32,
308 .maxVertexInputAttributeOffset = 256,
309 .maxVertexInputBindingStride = 256,
310 .maxVertexOutputComponents = 32,
311 .maxTessGenLevel = 0,
312 .maxTessPatchSize = 0,
313 .maxTessControlPerVertexInputComponents = 0,
314 .maxTessControlPerVertexOutputComponents = 0,
315 .maxTessControlPerPatchOutputComponents = 0,
316 .maxTessControlTotalOutputComponents = 0,
317 .maxTessEvaluationInputComponents = 0,
318 .maxTessEvaluationOutputComponents = 0,
319 .maxGeometryShaderInvocations = 6,
320 .maxGeometryInputComponents = 16,
321 .maxGeometryOutputComponents = 16,
322 .maxGeometryOutputVertices = 16,
323 .maxGeometryTotalOutputComponents = 16,
324 .maxFragmentInputComponents = 16,
325 .maxFragmentOutputBuffers = 8,
326 .maxFragmentDualSourceBuffers = 2,
327 .maxFragmentCombinedOutputResources = 8,
328 .maxComputeSharedMemorySize = 1024,
329 .maxComputeWorkGroupCount = {
330 16 * devinfo->max_cs_threads,
331 16 * devinfo->max_cs_threads,
332 16 * devinfo->max_cs_threads,
333 },
334 .maxComputeWorkGroupInvocations = 16 * devinfo->max_cs_threads,
335 .maxComputeWorkGroupSize = {
336 16 * devinfo->max_cs_threads,
337 16 * devinfo->max_cs_threads,
338 16 * devinfo->max_cs_threads,
339 },
340 .subPixelPrecisionBits = 4 /* FIXME */,
341 .subTexelPrecisionBits = 4 /* FIXME */,
342 .mipmapPrecisionBits = 4 /* FIXME */,
343 .maxDrawIndexedIndexValue = UINT32_MAX,
344 .maxDrawIndirectInstanceCount = UINT32_MAX,
345 .primitiveRestartForPatches = UINT32_MAX,
346 .maxSamplerLodBias = 16,
347 .maxSamplerAnisotropy = 16,
348 .maxViewports = 16,
349 .maxDynamicViewportStates = UINT32_MAX,
350 .maxViewportDimensions = { (1 << 14), (1 << 14) },
351 .viewportBoundsRange = { -1.0, 1.0 }, /* FIXME */
352 .viewportSubPixelBits = 13, /* We take a float? */
353 .minMemoryMapAlignment = 64, /* A cache line */
354 .minTexelBufferOffsetAlignment = 1,
355 .minUniformBufferOffsetAlignment = 1,
356 .minStorageBufferOffsetAlignment = 1,
357 .minTexelOffset = 0, /* FIXME */
358 .maxTexelOffset = 0, /* FIXME */
359 .minTexelGatherOffset = 0, /* FIXME */
360 .maxTexelGatherOffset = 0, /* FIXME */
361 .minInterpolationOffset = 0, /* FIXME */
362 .maxInterpolationOffset = 0, /* FIXME */
363 .subPixelInterpolationOffsetBits = 0, /* FIXME */
364 .maxFramebufferWidth = (1 << 14),
365 .maxFramebufferHeight = (1 << 14),
366 .maxFramebufferLayers = (1 << 10),
367 .maxFramebufferColorSamples = 8,
368 .maxFramebufferDepthSamples = 8,
369 .maxFramebufferStencilSamples = 8,
370 .maxColorAttachments = MAX_RTS,
371 .maxSampledImageColorSamples = 8,
372 .maxSampledImageDepthSamples = 8,
373 .maxSampledImageIntegerSamples = 1,
374 .maxStorageImageSamples = 1,
375 .maxSampleMaskWords = 1,
376 .timestampFrequency = 1000 * 1000 * 1000 / 80,
377 .maxClipDistances = 0 /* FIXME */,
378 .maxCullDistances = 0 /* FIXME */,
379 .maxCombinedClipAndCullDistances = 0 /* FIXME */,
380 .pointSizeRange = { 0.125, 255.875 },
381 .lineWidthRange = { 0.0, 7.9921875 },
382 .pointSizeGranularity = (1.0 / 8.0),
383 .lineWidthGranularity = (1.0 / 128.0),
384 };
385
386 return VK_SUCCESS;
387 }
388
389 VkResult anv_GetPhysicalDeviceProperties(
390 VkPhysicalDevice physicalDevice,
391 VkPhysicalDeviceProperties* pProperties)
392 {
393 ANV_FROM_HANDLE(anv_physical_device, pdevice, physicalDevice);
394
395 *pProperties = (VkPhysicalDeviceProperties) {
396 .apiVersion = VK_MAKE_VERSION(0, 138, 1),
397 .driverVersion = 1,
398 .vendorId = 0x8086,
399 .deviceId = pdevice->chipset_id,
400 .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
401 };
402
403 strcpy(pProperties->deviceName, pdevice->name);
404 snprintf((char *)pProperties->pipelineCacheUUID, VK_UUID_LENGTH,
405 "anv-%s", MESA_GIT_SHA1 + 4);
406
407 return VK_SUCCESS;
408 }
409
410 VkResult anv_GetPhysicalDeviceQueueCount(
411 VkPhysicalDevice physicalDevice,
412 uint32_t* pCount)
413 {
414 *pCount = 1;
415
416 return VK_SUCCESS;
417 }
418
419 VkResult anv_GetPhysicalDeviceQueueProperties(
420 VkPhysicalDevice physicalDevice,
421 uint32_t count,
422 VkPhysicalDeviceQueueProperties* pQueueProperties)
423 {
424 assert(count == 1);
425
426 *pQueueProperties = (VkPhysicalDeviceQueueProperties) {
427 .queueFlags = VK_QUEUE_GRAPHICS_BIT |
428 VK_QUEUE_COMPUTE_BIT |
429 VK_QUEUE_DMA_BIT,
430 .queueCount = 1,
431 .supportsTimestamps = true,
432 };
433
434 return VK_SUCCESS;
435 }
436
437 VkResult anv_GetPhysicalDeviceMemoryProperties(
438 VkPhysicalDevice physicalDevice,
439 VkPhysicalDeviceMemoryProperties* pMemoryProperties)
440 {
441 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
442 VkDeviceSize heap_size;
443
444 /* Reserve some wiggle room for the driver by exposing only 75% of the
445 * aperture to the heap.
446 */
447 heap_size = 3 * physical_device->aperture_size / 4;
448
449 /* The property flags below are valid only for llc platforms. */
450 pMemoryProperties->memoryTypeCount = 1;
451 pMemoryProperties->memoryTypes[0] = (VkMemoryType) {
452 .propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
453 .heapIndex = 1,
454 };
455
456 pMemoryProperties->memoryHeapCount = 1;
457 pMemoryProperties->memoryHeaps[0] = (VkMemoryHeap) {
458 .size = heap_size,
459 .flags = VK_MEMORY_HEAP_HOST_LOCAL,
460 };
461
462 return VK_SUCCESS;
463 }
464
465 PFN_vkVoidFunction anv_GetInstanceProcAddr(
466 VkInstance instance,
467 const char* pName)
468 {
469 return anv_lookup_entrypoint(pName);
470 }
471
472 PFN_vkVoidFunction anv_GetDeviceProcAddr(
473 VkDevice device,
474 const char* pName)
475 {
476 return anv_lookup_entrypoint(pName);
477 }
478
479 static VkResult
480 anv_queue_init(struct anv_device *device, struct anv_queue *queue)
481 {
482 queue->device = device;
483 queue->pool = &device->surface_state_pool;
484
485 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
486 if (queue->completed_serial.map == NULL)
487 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
488
489 *(uint32_t *)queue->completed_serial.map = 0;
490 queue->next_serial = 1;
491
492 return VK_SUCCESS;
493 }
494
495 static void
496 anv_queue_finish(struct anv_queue *queue)
497 {
498 #ifdef HAVE_VALGRIND
499 /* This gets torn down with the device so we only need to do this if
500 * valgrind is present.
501 */
502 anv_state_pool_free(queue->pool, queue->completed_serial);
503 #endif
504 }
505
506 static void
507 anv_device_init_border_colors(struct anv_device *device)
508 {
509 static const VkClearColorValue border_colors[] = {
510 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 0.0 } },
511 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 1.0 } },
512 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE] = { .f32 = { 1.0, 1.0, 1.0, 1.0 } },
513 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK] = { .u32 = { 0, 0, 0, 0 } },
514 [VK_BORDER_COLOR_INT_OPAQUE_BLACK] = { .u32 = { 0, 0, 0, 1 } },
515 [VK_BORDER_COLOR_INT_OPAQUE_WHITE] = { .u32 = { 1, 1, 1, 1 } },
516 };
517
518 device->border_colors =
519 anv_state_pool_alloc(&device->dynamic_state_pool,
520 sizeof(border_colors), 32);
521 memcpy(device->border_colors.map, border_colors, sizeof(border_colors));
522 }
523
524 VkResult anv_CreateDevice(
525 VkPhysicalDevice physicalDevice,
526 const VkDeviceCreateInfo* pCreateInfo,
527 VkDevice* pDevice)
528 {
529 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
530 struct anv_instance *instance = physical_device->instance;
531 struct anv_device *device;
532
533 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
534
535 device = anv_instance_alloc(instance, sizeof(*device), 8,
536 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
537 if (!device)
538 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
539
540 device->instance = physical_device->instance;
541
542 /* XXX(chadv): Can we dup() physicalDevice->fd here? */
543 device->fd = open(physical_device->path, O_RDWR | O_CLOEXEC);
544 if (device->fd == -1)
545 goto fail_device;
546
547 device->context_id = anv_gem_create_context(device);
548 if (device->context_id == -1)
549 goto fail_fd;
550
551 anv_bo_pool_init(&device->batch_bo_pool, device, ANV_CMD_BUFFER_BATCH_SIZE);
552
553 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
554
555 anv_state_pool_init(&device->dynamic_state_pool,
556 &device->dynamic_state_block_pool);
557
558 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
559 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
560
561 anv_state_pool_init(&device->surface_state_pool,
562 &device->surface_state_block_pool);
563
564 anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
565
566 device->info = *physical_device->info;
567
568 device->compiler = anv_compiler_create(device);
569
570 pthread_mutex_init(&device->mutex, NULL);
571
572 anv_queue_init(device, &device->queue);
573
574 anv_device_init_meta(device);
575
576 anv_device_init_border_colors(device);
577
578 *pDevice = anv_device_to_handle(device);
579
580 return VK_SUCCESS;
581
582 fail_fd:
583 close(device->fd);
584 fail_device:
585 anv_device_free(device, device);
586
587 return vk_error(VK_ERROR_UNAVAILABLE);
588 }
589
590 VkResult anv_DestroyDevice(
591 VkDevice _device)
592 {
593 ANV_FROM_HANDLE(anv_device, device, _device);
594
595 anv_compiler_destroy(device->compiler);
596
597 anv_queue_finish(&device->queue);
598
599 anv_device_finish_meta(device);
600
601 #ifdef HAVE_VALGRIND
602 /* We only need to free these to prevent valgrind errors. The backing
603 * BO will go away in a couple of lines so we don't actually leak.
604 */
605 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
606 #endif
607
608 anv_bo_pool_finish(&device->batch_bo_pool);
609 anv_state_pool_finish(&device->dynamic_state_pool);
610 anv_block_pool_finish(&device->dynamic_state_block_pool);
611 anv_block_pool_finish(&device->instruction_block_pool);
612 anv_state_pool_finish(&device->surface_state_pool);
613 anv_block_pool_finish(&device->surface_state_block_pool);
614 anv_block_pool_finish(&device->scratch_block_pool);
615
616 close(device->fd);
617
618 anv_instance_free(device->instance, device);
619
620 return VK_SUCCESS;
621 }
622
623 static const VkExtensionProperties global_extensions[] = {
624 {
625 .extName = "VK_WSI_LunarG",
626 .specVersion = 3
627 }
628 };
629
630 VkResult anv_GetGlobalExtensionProperties(
631 const char* pLayerName,
632 uint32_t* pCount,
633 VkExtensionProperties* pProperties)
634 {
635 if (pProperties == NULL) {
636 *pCount = ARRAY_SIZE(global_extensions);
637 return VK_SUCCESS;
638 }
639
640 assert(*pCount < ARRAY_SIZE(global_extensions));
641
642 *pCount = ARRAY_SIZE(global_extensions);
643 memcpy(pProperties, global_extensions, sizeof(global_extensions));
644
645 return VK_SUCCESS;
646 }
647
648 VkResult anv_GetPhysicalDeviceExtensionProperties(
649 VkPhysicalDevice physicalDevice,
650 const char* pLayerName,
651 uint32_t* pCount,
652 VkExtensionProperties* pProperties)
653 {
654 if (pProperties == NULL) {
655 *pCount = 0;
656 return VK_SUCCESS;
657 }
658
659 /* None supported at this time */
660 return vk_error(VK_ERROR_INVALID_EXTENSION);
661 }
662
663 VkResult anv_GetGlobalLayerProperties(
664 uint32_t* pCount,
665 VkLayerProperties* pProperties)
666 {
667 if (pProperties == NULL) {
668 *pCount = 0;
669 return VK_SUCCESS;
670 }
671
672 /* None supported at this time */
673 return vk_error(VK_ERROR_INVALID_LAYER);
674 }
675
676 VkResult anv_GetPhysicalDeviceLayerProperties(
677 VkPhysicalDevice physicalDevice,
678 uint32_t* pCount,
679 VkLayerProperties* pProperties)
680 {
681 if (pProperties == NULL) {
682 *pCount = 0;
683 return VK_SUCCESS;
684 }
685
686 /* None supported at this time */
687 return vk_error(VK_ERROR_INVALID_LAYER);
688 }
689
690 VkResult anv_GetDeviceQueue(
691 VkDevice _device,
692 uint32_t queueNodeIndex,
693 uint32_t queueIndex,
694 VkQueue* pQueue)
695 {
696 ANV_FROM_HANDLE(anv_device, device, _device);
697
698 assert(queueIndex == 0);
699
700 *pQueue = anv_queue_to_handle(&device->queue);
701
702 return VK_SUCCESS;
703 }
704
705 VkResult anv_QueueSubmit(
706 VkQueue _queue,
707 uint32_t cmdBufferCount,
708 const VkCmdBuffer* pCmdBuffers,
709 VkFence _fence)
710 {
711 ANV_FROM_HANDLE(anv_queue, queue, _queue);
712 ANV_FROM_HANDLE(anv_fence, fence, _fence);
713 struct anv_device *device = queue->device;
714 int ret;
715
716 for (uint32_t i = 0; i < cmdBufferCount; i++) {
717 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, pCmdBuffers[i]);
718
719 assert(cmd_buffer->level == VK_CMD_BUFFER_LEVEL_PRIMARY);
720
721 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf2.execbuf);
722 if (ret != 0)
723 return vk_error(VK_ERROR_UNKNOWN);
724
725 if (fence) {
726 ret = anv_gem_execbuffer(device, &fence->execbuf);
727 if (ret != 0)
728 return vk_error(VK_ERROR_UNKNOWN);
729 }
730
731 for (uint32_t i = 0; i < cmd_buffer->execbuf2.bo_count; i++)
732 cmd_buffer->execbuf2.bos[i]->offset = cmd_buffer->execbuf2.objects[i].offset;
733 }
734
735 return VK_SUCCESS;
736 }
737
738 VkResult anv_QueueWaitIdle(
739 VkQueue _queue)
740 {
741 ANV_FROM_HANDLE(anv_queue, queue, _queue);
742
743 return vkDeviceWaitIdle(anv_device_to_handle(queue->device));
744 }
745
746 VkResult anv_DeviceWaitIdle(
747 VkDevice _device)
748 {
749 ANV_FROM_HANDLE(anv_device, device, _device);
750 struct anv_state state;
751 struct anv_batch batch;
752 struct drm_i915_gem_execbuffer2 execbuf;
753 struct drm_i915_gem_exec_object2 exec2_objects[1];
754 struct anv_bo *bo = NULL;
755 VkResult result;
756 int64_t timeout;
757 int ret;
758
759 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
760 bo = &device->dynamic_state_pool.block_pool->bo;
761 batch.start = batch.next = state.map;
762 batch.end = state.map + 32;
763 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
764 anv_batch_emit(&batch, GEN8_MI_NOOP);
765
766 exec2_objects[0].handle = bo->gem_handle;
767 exec2_objects[0].relocation_count = 0;
768 exec2_objects[0].relocs_ptr = 0;
769 exec2_objects[0].alignment = 0;
770 exec2_objects[0].offset = bo->offset;
771 exec2_objects[0].flags = 0;
772 exec2_objects[0].rsvd1 = 0;
773 exec2_objects[0].rsvd2 = 0;
774
775 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
776 execbuf.buffer_count = 1;
777 execbuf.batch_start_offset = state.offset;
778 execbuf.batch_len = batch.next - state.map;
779 execbuf.cliprects_ptr = 0;
780 execbuf.num_cliprects = 0;
781 execbuf.DR1 = 0;
782 execbuf.DR4 = 0;
783
784 execbuf.flags =
785 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
786 execbuf.rsvd1 = device->context_id;
787 execbuf.rsvd2 = 0;
788
789 ret = anv_gem_execbuffer(device, &execbuf);
790 if (ret != 0) {
791 result = vk_error(VK_ERROR_UNKNOWN);
792 goto fail;
793 }
794
795 timeout = INT64_MAX;
796 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
797 if (ret != 0) {
798 result = vk_error(VK_ERROR_UNKNOWN);
799 goto fail;
800 }
801
802 anv_state_pool_free(&device->dynamic_state_pool, state);
803
804 return VK_SUCCESS;
805
806 fail:
807 anv_state_pool_free(&device->dynamic_state_pool, state);
808
809 return result;
810 }
811
812 void *
813 anv_device_alloc(struct anv_device * device,
814 size_t size,
815 size_t alignment,
816 VkSystemAllocType allocType)
817 {
818 return anv_instance_alloc(device->instance, size, alignment, allocType);
819 }
820
821 void
822 anv_device_free(struct anv_device * device,
823 void * mem)
824 {
825 anv_instance_free(device->instance, mem);
826 }
827
828 VkResult
829 anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
830 {
831 bo->gem_handle = anv_gem_create(device, size);
832 if (!bo->gem_handle)
833 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
834
835 bo->map = NULL;
836 bo->index = 0;
837 bo->offset = 0;
838 bo->size = size;
839
840 return VK_SUCCESS;
841 }
842
843 VkResult anv_AllocMemory(
844 VkDevice _device,
845 const VkMemoryAllocInfo* pAllocInfo,
846 VkDeviceMemory* pMem)
847 {
848 ANV_FROM_HANDLE(anv_device, device, _device);
849 struct anv_device_memory *mem;
850 VkResult result;
851
852 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
853
854 if (pAllocInfo->memoryTypeIndex != 0) {
855 /* We support exactly one memory heap. */
856 return vk_error(VK_ERROR_INVALID_VALUE);
857 }
858
859 /* FINISHME: Fail if allocation request exceeds heap size. */
860
861 mem = anv_device_alloc(device, sizeof(*mem), 8,
862 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
863 if (mem == NULL)
864 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
865
866 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
867 if (result != VK_SUCCESS)
868 goto fail;
869
870 *pMem = anv_device_memory_to_handle(mem);
871
872 return VK_SUCCESS;
873
874 fail:
875 anv_device_free(device, mem);
876
877 return result;
878 }
879
880 VkResult anv_FreeMemory(
881 VkDevice _device,
882 VkDeviceMemory _mem)
883 {
884 ANV_FROM_HANDLE(anv_device, device, _device);
885 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
886
887 if (mem->bo.map)
888 anv_gem_munmap(mem->bo.map, mem->bo.size);
889
890 if (mem->bo.gem_handle != 0)
891 anv_gem_close(device, mem->bo.gem_handle);
892
893 anv_device_free(device, mem);
894
895 return VK_SUCCESS;
896 }
897
898 VkResult anv_MapMemory(
899 VkDevice _device,
900 VkDeviceMemory _mem,
901 VkDeviceSize offset,
902 VkDeviceSize size,
903 VkMemoryMapFlags flags,
904 void** ppData)
905 {
906 ANV_FROM_HANDLE(anv_device, device, _device);
907 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
908
909 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
910 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
911 * at a time is valid. We could just mmap up front and return an offset
912 * pointer here, but that may exhaust virtual memory on 32 bit
913 * userspace. */
914
915 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
916 mem->map_size = size;
917
918 *ppData = mem->map;
919
920 return VK_SUCCESS;
921 }
922
923 VkResult anv_UnmapMemory(
924 VkDevice _device,
925 VkDeviceMemory _mem)
926 {
927 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
928
929 anv_gem_munmap(mem->map, mem->map_size);
930
931 return VK_SUCCESS;
932 }
933
934 VkResult anv_FlushMappedMemoryRanges(
935 VkDevice device,
936 uint32_t memRangeCount,
937 const VkMappedMemoryRange* pMemRanges)
938 {
939 /* clflush here for !llc platforms */
940
941 return VK_SUCCESS;
942 }
943
944 VkResult anv_InvalidateMappedMemoryRanges(
945 VkDevice device,
946 uint32_t memRangeCount,
947 const VkMappedMemoryRange* pMemRanges)
948 {
949 return anv_FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
950 }
951
952 VkResult anv_GetBufferMemoryRequirements(
953 VkDevice device,
954 VkBuffer _buffer,
955 VkMemoryRequirements* pMemoryRequirements)
956 {
957 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
958
959 /* The Vulkan spec (git aaed022) says:
960 *
961 * memoryTypeBits is a bitfield and contains one bit set for every
962 * supported memory type for the resource. The bit `1<<i` is set if and
963 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
964 * structure for the physical device is supported.
965 *
966 * We support exactly one memory type.
967 */
968 pMemoryRequirements->memoryTypeBits = 1;
969
970 pMemoryRequirements->size = buffer->size;
971 pMemoryRequirements->alignment = 16;
972
973 return VK_SUCCESS;
974 }
975
976 VkResult anv_GetImageMemoryRequirements(
977 VkDevice device,
978 VkImage _image,
979 VkMemoryRequirements* pMemoryRequirements)
980 {
981 ANV_FROM_HANDLE(anv_image, image, _image);
982
983 /* The Vulkan spec (git aaed022) says:
984 *
985 * memoryTypeBits is a bitfield and contains one bit set for every
986 * supported memory type for the resource. The bit `1<<i` is set if and
987 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
988 * structure for the physical device is supported.
989 *
990 * We support exactly one memory type.
991 */
992 pMemoryRequirements->memoryTypeBits = 1;
993
994 pMemoryRequirements->size = image->size;
995 pMemoryRequirements->alignment = image->alignment;
996
997 return VK_SUCCESS;
998 }
999
1000 VkResult anv_GetImageSparseMemoryRequirements(
1001 VkDevice device,
1002 VkImage image,
1003 uint32_t* pNumRequirements,
1004 VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
1005 {
1006 return vk_error(VK_UNSUPPORTED);
1007 }
1008
1009 VkResult anv_GetDeviceMemoryCommitment(
1010 VkDevice device,
1011 VkDeviceMemory memory,
1012 VkDeviceSize* pCommittedMemoryInBytes)
1013 {
1014 *pCommittedMemoryInBytes = 0;
1015 stub_return(VK_SUCCESS);
1016 }
1017
1018 VkResult anv_BindBufferMemory(
1019 VkDevice device,
1020 VkBuffer _buffer,
1021 VkDeviceMemory _mem,
1022 VkDeviceSize memOffset)
1023 {
1024 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
1025 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1026
1027 buffer->bo = &mem->bo;
1028 buffer->offset = memOffset;
1029
1030 return VK_SUCCESS;
1031 }
1032
1033 VkResult anv_BindImageMemory(
1034 VkDevice device,
1035 VkImage _image,
1036 VkDeviceMemory _mem,
1037 VkDeviceSize memOffset)
1038 {
1039 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
1040 ANV_FROM_HANDLE(anv_image, image, _image);
1041
1042 image->bo = &mem->bo;
1043 image->offset = memOffset;
1044
1045 return VK_SUCCESS;
1046 }
1047
1048 VkResult anv_QueueBindSparseBufferMemory(
1049 VkQueue queue,
1050 VkBuffer buffer,
1051 uint32_t numBindings,
1052 const VkSparseMemoryBindInfo* pBindInfo)
1053 {
1054 stub_return(VK_UNSUPPORTED);
1055 }
1056
1057 VkResult anv_QueueBindSparseImageOpaqueMemory(
1058 VkQueue queue,
1059 VkImage image,
1060 uint32_t numBindings,
1061 const VkSparseMemoryBindInfo* pBindInfo)
1062 {
1063 stub_return(VK_UNSUPPORTED);
1064 }
1065
1066 VkResult anv_QueueBindSparseImageMemory(
1067 VkQueue queue,
1068 VkImage image,
1069 uint32_t numBindings,
1070 const VkSparseImageMemoryBindInfo* pBindInfo)
1071 {
1072 stub_return(VK_UNSUPPORTED);
1073 }
1074
1075 VkResult anv_CreateFence(
1076 VkDevice _device,
1077 const VkFenceCreateInfo* pCreateInfo,
1078 VkFence* pFence)
1079 {
1080 ANV_FROM_HANDLE(anv_device, device, _device);
1081 struct anv_fence *fence;
1082 struct anv_batch batch;
1083 VkResult result;
1084
1085 const uint32_t fence_size = 128;
1086
1087 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1088
1089 fence = anv_device_alloc(device, sizeof(*fence), 8,
1090 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1091 if (fence == NULL)
1092 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1093
1094 result = anv_bo_init_new(&fence->bo, device, fence_size);
1095 if (result != VK_SUCCESS)
1096 goto fail;
1097
1098 fence->bo.map =
1099 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
1100 batch.next = batch.start = fence->bo.map;
1101 batch.end = fence->bo.map + fence->bo.size;
1102 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1103 anv_batch_emit(&batch, GEN8_MI_NOOP);
1104
1105 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1106 fence->exec2_objects[0].relocation_count = 0;
1107 fence->exec2_objects[0].relocs_ptr = 0;
1108 fence->exec2_objects[0].alignment = 0;
1109 fence->exec2_objects[0].offset = fence->bo.offset;
1110 fence->exec2_objects[0].flags = 0;
1111 fence->exec2_objects[0].rsvd1 = 0;
1112 fence->exec2_objects[0].rsvd2 = 0;
1113
1114 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1115 fence->execbuf.buffer_count = 1;
1116 fence->execbuf.batch_start_offset = 0;
1117 fence->execbuf.batch_len = batch.next - fence->bo.map;
1118 fence->execbuf.cliprects_ptr = 0;
1119 fence->execbuf.num_cliprects = 0;
1120 fence->execbuf.DR1 = 0;
1121 fence->execbuf.DR4 = 0;
1122
1123 fence->execbuf.flags =
1124 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1125 fence->execbuf.rsvd1 = device->context_id;
1126 fence->execbuf.rsvd2 = 0;
1127
1128 *pFence = anv_fence_to_handle(fence);
1129
1130 return VK_SUCCESS;
1131
1132 fail:
1133 anv_device_free(device, fence);
1134
1135 return result;
1136 }
1137
1138 VkResult anv_DestroyFence(
1139 VkDevice _device,
1140 VkFence _fence)
1141 {
1142 ANV_FROM_HANDLE(anv_device, device, _device);
1143 ANV_FROM_HANDLE(anv_fence, fence, _fence);
1144
1145 anv_gem_munmap(fence->bo.map, fence->bo.size);
1146 anv_gem_close(device, fence->bo.gem_handle);
1147 anv_device_free(device, fence);
1148
1149 return VK_SUCCESS;
1150 }
1151
1152 VkResult anv_ResetFences(
1153 VkDevice _device,
1154 uint32_t fenceCount,
1155 const VkFence* pFences)
1156 {
1157 for (uint32_t i = 0; i < fenceCount; i++) {
1158 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1159 fence->ready = false;
1160 }
1161
1162 return VK_SUCCESS;
1163 }
1164
1165 VkResult anv_GetFenceStatus(
1166 VkDevice _device,
1167 VkFence _fence)
1168 {
1169 ANV_FROM_HANDLE(anv_device, device, _device);
1170 ANV_FROM_HANDLE(anv_fence, fence, _fence);
1171 int64_t t = 0;
1172 int ret;
1173
1174 if (fence->ready)
1175 return VK_SUCCESS;
1176
1177 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1178 if (ret == 0) {
1179 fence->ready = true;
1180 return VK_SUCCESS;
1181 }
1182
1183 return VK_NOT_READY;
1184 }
1185
1186 VkResult anv_WaitForFences(
1187 VkDevice _device,
1188 uint32_t fenceCount,
1189 const VkFence* pFences,
1190 VkBool32 waitAll,
1191 uint64_t timeout)
1192 {
1193 ANV_FROM_HANDLE(anv_device, device, _device);
1194 int64_t t = timeout;
1195 int ret;
1196
1197 /* FIXME: handle !waitAll */
1198
1199 for (uint32_t i = 0; i < fenceCount; i++) {
1200 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1201 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1202 if (ret == -1 && errno == ETIME)
1203 return VK_TIMEOUT;
1204 else if (ret == -1)
1205 return vk_error(VK_ERROR_UNKNOWN);
1206 }
1207
1208 return VK_SUCCESS;
1209 }
1210
1211 // Queue semaphore functions
1212
1213 VkResult anv_CreateSemaphore(
1214 VkDevice device,
1215 const VkSemaphoreCreateInfo* pCreateInfo,
1216 VkSemaphore* pSemaphore)
1217 {
1218 stub_return(VK_UNSUPPORTED);
1219 }
1220
1221 VkResult anv_DestroySemaphore(
1222 VkDevice device,
1223 VkSemaphore semaphore)
1224 {
1225 stub_return(VK_UNSUPPORTED);
1226 }
1227
1228 VkResult anv_QueueSignalSemaphore(
1229 VkQueue queue,
1230 VkSemaphore semaphore)
1231 {
1232 stub_return(VK_UNSUPPORTED);
1233 }
1234
1235 VkResult anv_QueueWaitSemaphore(
1236 VkQueue queue,
1237 VkSemaphore semaphore)
1238 {
1239 stub_return(VK_UNSUPPORTED);
1240 }
1241
1242 // Event functions
1243
1244 VkResult anv_CreateEvent(
1245 VkDevice device,
1246 const VkEventCreateInfo* pCreateInfo,
1247 VkEvent* pEvent)
1248 {
1249 stub_return(VK_UNSUPPORTED);
1250 }
1251
1252 VkResult anv_DestroyEvent(
1253 VkDevice device,
1254 VkEvent event)
1255 {
1256 stub_return(VK_UNSUPPORTED);
1257 }
1258
1259 VkResult anv_GetEventStatus(
1260 VkDevice device,
1261 VkEvent event)
1262 {
1263 stub_return(VK_UNSUPPORTED);
1264 }
1265
1266 VkResult anv_SetEvent(
1267 VkDevice device,
1268 VkEvent event)
1269 {
1270 stub_return(VK_UNSUPPORTED);
1271 }
1272
1273 VkResult anv_ResetEvent(
1274 VkDevice device,
1275 VkEvent event)
1276 {
1277 stub_return(VK_UNSUPPORTED);
1278 }
1279
1280 // Buffer functions
1281
1282 VkResult anv_CreateBuffer(
1283 VkDevice _device,
1284 const VkBufferCreateInfo* pCreateInfo,
1285 VkBuffer* pBuffer)
1286 {
1287 ANV_FROM_HANDLE(anv_device, device, _device);
1288 struct anv_buffer *buffer;
1289
1290 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1291
1292 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1293 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1294 if (buffer == NULL)
1295 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1296
1297 buffer->size = pCreateInfo->size;
1298 buffer->bo = NULL;
1299 buffer->offset = 0;
1300
1301 *pBuffer = anv_buffer_to_handle(buffer);
1302
1303 return VK_SUCCESS;
1304 }
1305
1306 VkResult anv_DestroyBuffer(
1307 VkDevice _device,
1308 VkBuffer _buffer)
1309 {
1310 ANV_FROM_HANDLE(anv_device, device, _device);
1311 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1312
1313 anv_device_free(device, buffer);
1314
1315 return VK_SUCCESS;
1316 }
1317
1318 // Buffer view functions
1319
1320 void
1321 anv_fill_buffer_surface_state(void *state, const struct anv_format *format,
1322 uint32_t offset, uint32_t range)
1323 {
1324 /* This assumes RGBA float format. */
1325 uint32_t stride = 4;
1326 uint32_t num_elements = range / stride;
1327
1328 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1329 .SurfaceType = SURFTYPE_BUFFER,
1330 .SurfaceArray = false,
1331 .SurfaceFormat = format->surface_format,
1332 .SurfaceVerticalAlignment = VALIGN4,
1333 .SurfaceHorizontalAlignment = HALIGN4,
1334 .TileMode = LINEAR,
1335 .VerticalLineStride = 0,
1336 .VerticalLineStrideOffset = 0,
1337 .SamplerL2BypassModeDisable = true,
1338 .RenderCacheReadWriteMode = WriteOnlyCache,
1339 .MemoryObjectControlState = GEN8_MOCS,
1340 .BaseMipLevel = 0.0,
1341 .SurfaceQPitch = 0,
1342 .Height = (num_elements >> 7) & 0x3fff,
1343 .Width = num_elements & 0x7f,
1344 .Depth = (num_elements >> 21) & 0x3f,
1345 .SurfacePitch = stride - 1,
1346 .MinimumArrayElement = 0,
1347 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1348 .XOffset = 0,
1349 .YOffset = 0,
1350 .SurfaceMinLOD = 0,
1351 .MIPCountLOD = 0,
1352 .AuxiliarySurfaceMode = AUX_NONE,
1353 .RedClearColor = 0,
1354 .GreenClearColor = 0,
1355 .BlueClearColor = 0,
1356 .AlphaClearColor = 0,
1357 .ShaderChannelSelectRed = SCS_RED,
1358 .ShaderChannelSelectGreen = SCS_GREEN,
1359 .ShaderChannelSelectBlue = SCS_BLUE,
1360 .ShaderChannelSelectAlpha = SCS_ALPHA,
1361 .ResourceMinLOD = 0.0,
1362 /* FIXME: We assume that the image must be bound at this time. */
1363 .SurfaceBaseAddress = { NULL, offset },
1364 };
1365
1366 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1367 }
1368
1369 VkResult anv_CreateBufferView(
1370 VkDevice _device,
1371 const VkBufferViewCreateInfo* pCreateInfo,
1372 VkBufferView* pView)
1373 {
1374 ANV_FROM_HANDLE(anv_device, device, _device);
1375 ANV_FROM_HANDLE(anv_buffer, buffer, pCreateInfo->buffer);
1376 struct anv_buffer_view *bview;
1377 struct anv_surface_view *view;
1378
1379 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1380
1381 bview = anv_device_alloc(device, sizeof(*view), 8,
1382 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1383 if (bview == NULL)
1384 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1385
1386 view = &bview->view;
1387 view->bo = buffer->bo;
1388 view->offset = buffer->offset + pCreateInfo->offset;
1389 view->surface_state =
1390 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1391 view->format = anv_format_for_vk_format(pCreateInfo->format);
1392 view->range = pCreateInfo->range;
1393
1394 anv_fill_buffer_surface_state(view->surface_state.map, view->format,
1395 view->offset, pCreateInfo->range);
1396
1397 *pView = anv_buffer_view_to_handle(bview);
1398
1399 return VK_SUCCESS;
1400 }
1401
1402 VkResult anv_DestroyBufferView(
1403 VkDevice _device,
1404 VkBufferView _bview)
1405 {
1406 ANV_FROM_HANDLE(anv_device, device, _device);
1407 ANV_FROM_HANDLE(anv_buffer_view, bview, _bview);
1408
1409 anv_surface_view_fini(device, &bview->view);
1410 anv_device_free(device, bview);
1411
1412 return VK_SUCCESS;
1413 }
1414
1415 // Sampler functions
1416
1417 VkResult anv_CreateSampler(
1418 VkDevice _device,
1419 const VkSamplerCreateInfo* pCreateInfo,
1420 VkSampler* pSampler)
1421 {
1422 ANV_FROM_HANDLE(anv_device, device, _device);
1423 struct anv_sampler *sampler;
1424 uint32_t mag_filter, min_filter, max_anisotropy;
1425
1426 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
1427
1428 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1429 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1430 if (!sampler)
1431 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1432
1433 static const uint32_t vk_to_gen_tex_filter[] = {
1434 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1435 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
1436 };
1437
1438 static const uint32_t vk_to_gen_mipmap_mode[] = {
1439 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1440 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1441 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
1442 };
1443
1444 static const uint32_t vk_to_gen_tex_address[] = {
1445 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1446 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1447 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1448 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1449 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
1450 };
1451
1452 static const uint32_t vk_to_gen_compare_op[] = {
1453 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1454 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1455 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1456 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1457 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1458 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1459 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1460 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
1461 };
1462
1463 if (pCreateInfo->maxAnisotropy > 1) {
1464 mag_filter = MAPFILTER_ANISOTROPIC;
1465 min_filter = MAPFILTER_ANISOTROPIC;
1466 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1467 } else {
1468 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1469 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1470 max_anisotropy = RATIO21;
1471 }
1472
1473 struct GEN8_SAMPLER_STATE sampler_state = {
1474 .SamplerDisable = false,
1475 .TextureBorderColorMode = DX10OGL,
1476 .LODPreClampMode = 0,
1477 .BaseMipLevel = 0.0,
1478 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
1479 .MagModeFilter = mag_filter,
1480 .MinModeFilter = min_filter,
1481 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1482 .AnisotropicAlgorithm = EWAApproximation,
1483 .MinLOD = pCreateInfo->minLod,
1484 .MaxLOD = pCreateInfo->maxLod,
1485 .ChromaKeyEnable = 0,
1486 .ChromaKeyIndex = 0,
1487 .ChromaKeyMode = 0,
1488 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
1489 .CubeSurfaceControlMode = 0,
1490
1491 .IndirectStatePointer =
1492 device->border_colors.offset +
1493 pCreateInfo->borderColor * sizeof(float) * 4,
1494
1495 .LODClampMagnificationMode = MIPNONE,
1496 .MaximumAnisotropy = max_anisotropy,
1497 .RAddressMinFilterRoundingEnable = 0,
1498 .RAddressMagFilterRoundingEnable = 0,
1499 .VAddressMinFilterRoundingEnable = 0,
1500 .VAddressMagFilterRoundingEnable = 0,
1501 .UAddressMinFilterRoundingEnable = 0,
1502 .UAddressMagFilterRoundingEnable = 0,
1503 .TrilinearFilterQuality = 0,
1504 .NonnormalizedCoordinateEnable = 0,
1505 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1506 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1507 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
1508 };
1509
1510 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1511
1512 *pSampler = anv_sampler_to_handle(sampler);
1513
1514 return VK_SUCCESS;
1515 }
1516
1517 VkResult anv_DestroySampler(
1518 VkDevice _device,
1519 VkSampler _sampler)
1520 {
1521 ANV_FROM_HANDLE(anv_device, device, _device);
1522 ANV_FROM_HANDLE(anv_sampler, sampler, _sampler);
1523
1524 anv_device_free(device, sampler);
1525
1526 return VK_SUCCESS;
1527 }
1528
1529 // Descriptor set functions
1530
1531 VkResult anv_CreateDescriptorSetLayout(
1532 VkDevice _device,
1533 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1534 VkDescriptorSetLayout* pSetLayout)
1535 {
1536 ANV_FROM_HANDLE(anv_device, device, _device);
1537 struct anv_descriptor_set_layout *set_layout;
1538
1539 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1540
1541 uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
1542 uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
1543 uint32_t num_dynamic_buffers = 0;
1544 uint32_t count = 0;
1545 uint32_t stages = 0;
1546 uint32_t s;
1547
1548 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1549 switch (pCreateInfo->pBinding[i].descriptorType) {
1550 case VK_DESCRIPTOR_TYPE_SAMPLER:
1551 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1552 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1553 sampler_count[s] += pCreateInfo->pBinding[i].arraySize;
1554 break;
1555 default:
1556 break;
1557 }
1558
1559 switch (pCreateInfo->pBinding[i].descriptorType) {
1560 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1561 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1562 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1563 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1564 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1565 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1566 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1567 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1568 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1569 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1570 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1571 surface_count[s] += pCreateInfo->pBinding[i].arraySize;
1572 break;
1573 default:
1574 break;
1575 }
1576
1577 switch (pCreateInfo->pBinding[i].descriptorType) {
1578 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1579 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1580 num_dynamic_buffers += pCreateInfo->pBinding[i].arraySize;
1581 break;
1582 default:
1583 break;
1584 }
1585
1586 stages |= pCreateInfo->pBinding[i].stageFlags;
1587 count += pCreateInfo->pBinding[i].arraySize;
1588 }
1589
1590 uint32_t sampler_total = 0;
1591 uint32_t surface_total = 0;
1592 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
1593 sampler_total += sampler_count[s];
1594 surface_total += surface_count[s];
1595 }
1596
1597 size_t size = sizeof(*set_layout) +
1598 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
1599 set_layout = anv_device_alloc(device, size, 8,
1600 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1601 if (!set_layout)
1602 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1603
1604 set_layout->num_dynamic_buffers = num_dynamic_buffers;
1605 set_layout->count = count;
1606 set_layout->shader_stages = stages;
1607
1608 struct anv_descriptor_slot *p = set_layout->entries;
1609 struct anv_descriptor_slot *sampler[VK_SHADER_STAGE_NUM];
1610 struct anv_descriptor_slot *surface[VK_SHADER_STAGE_NUM];
1611 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
1612 set_layout->stage[s].surface_count = surface_count[s];
1613 set_layout->stage[s].surface_start = surface[s] = p;
1614 p += surface_count[s];
1615 set_layout->stage[s].sampler_count = sampler_count[s];
1616 set_layout->stage[s].sampler_start = sampler[s] = p;
1617 p += sampler_count[s];
1618 }
1619
1620 uint32_t descriptor = 0;
1621 int8_t dynamic_slot = 0;
1622 bool is_dynamic;
1623 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1624 switch (pCreateInfo->pBinding[i].descriptorType) {
1625 case VK_DESCRIPTOR_TYPE_SAMPLER:
1626 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1627 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1628 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
1629 sampler[s]->index = descriptor + j;
1630 sampler[s]->dynamic_slot = -1;
1631 sampler[s]++;
1632 }
1633 break;
1634 default:
1635 break;
1636 }
1637
1638 switch (pCreateInfo->pBinding[i].descriptorType) {
1639 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1640 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1641 is_dynamic = true;
1642 break;
1643 default:
1644 is_dynamic = false;
1645 break;
1646 }
1647
1648 switch (pCreateInfo->pBinding[i].descriptorType) {
1649 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1650 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1651 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1652 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1653 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1654 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1655 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1656 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1657 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1658 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1659 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1660 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
1661 surface[s]->index = descriptor + j;
1662 if (is_dynamic)
1663 surface[s]->dynamic_slot = dynamic_slot + j;
1664 else
1665 surface[s]->dynamic_slot = -1;
1666 surface[s]++;
1667 }
1668 break;
1669 default:
1670 break;
1671 }
1672
1673 if (is_dynamic)
1674 dynamic_slot += pCreateInfo->pBinding[i].arraySize;
1675
1676 descriptor += pCreateInfo->pBinding[i].arraySize;
1677 }
1678
1679 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
1680
1681 return VK_SUCCESS;
1682 }
1683
1684 VkResult anv_DestroyDescriptorSetLayout(
1685 VkDevice _device,
1686 VkDescriptorSetLayout _set_layout)
1687 {
1688 ANV_FROM_HANDLE(anv_device, device, _device);
1689 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
1690
1691 anv_device_free(device, set_layout);
1692
1693 return VK_SUCCESS;
1694 }
1695
1696 VkResult anv_CreateDescriptorPool(
1697 VkDevice device,
1698 VkDescriptorPoolUsage poolUsage,
1699 uint32_t maxSets,
1700 const VkDescriptorPoolCreateInfo* pCreateInfo,
1701 VkDescriptorPool* pDescriptorPool)
1702 {
1703 anv_finishme("VkDescriptorPool is a stub");
1704 pDescriptorPool->handle = 1;
1705 return VK_SUCCESS;
1706 }
1707
1708 VkResult anv_DestroyDescriptorPool(
1709 VkDevice _device,
1710 VkDescriptorPool _pool)
1711 {
1712 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1713 return VK_SUCCESS;
1714 }
1715
1716 VkResult anv_ResetDescriptorPool(
1717 VkDevice device,
1718 VkDescriptorPool descriptorPool)
1719 {
1720 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1721 return VK_SUCCESS;
1722 }
1723
1724 VkResult
1725 anv_descriptor_set_create(struct anv_device *device,
1726 const struct anv_descriptor_set_layout *layout,
1727 struct anv_descriptor_set **out_set)
1728 {
1729 struct anv_descriptor_set *set;
1730 size_t size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
1731
1732 set = anv_device_alloc(device, size, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1733 if (!set)
1734 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1735
1736 /* A descriptor set may not be 100% filled. Clear the set so we can can
1737 * later detect holes in it.
1738 */
1739 memset(set, 0, size);
1740
1741 *out_set = set;
1742
1743 return VK_SUCCESS;
1744 }
1745
1746 void
1747 anv_descriptor_set_destroy(struct anv_device *device,
1748 struct anv_descriptor_set *set)
1749 {
1750 anv_device_free(device, set);
1751 }
1752
1753 VkResult anv_AllocDescriptorSets(
1754 VkDevice _device,
1755 VkDescriptorPool descriptorPool,
1756 VkDescriptorSetUsage setUsage,
1757 uint32_t count,
1758 const VkDescriptorSetLayout* pSetLayouts,
1759 VkDescriptorSet* pDescriptorSets,
1760 uint32_t* pCount)
1761 {
1762 ANV_FROM_HANDLE(anv_device, device, _device);
1763
1764 VkResult result;
1765 struct anv_descriptor_set *set;
1766
1767 for (uint32_t i = 0; i < count; i++) {
1768 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, pSetLayouts[i]);
1769
1770 result = anv_descriptor_set_create(device, layout, &set);
1771 if (result != VK_SUCCESS) {
1772 *pCount = i;
1773 return result;
1774 }
1775
1776 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1777 }
1778
1779 *pCount = count;
1780
1781 return VK_SUCCESS;
1782 }
1783
1784 VkResult anv_FreeDescriptorSets(
1785 VkDevice _device,
1786 VkDescriptorPool descriptorPool,
1787 uint32_t count,
1788 const VkDescriptorSet* pDescriptorSets)
1789 {
1790 ANV_FROM_HANDLE(anv_device, device, _device);
1791
1792 for (uint32_t i = 0; i < count; i++) {
1793 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1794
1795 anv_descriptor_set_destroy(device, set);
1796 }
1797
1798 return VK_SUCCESS;
1799 }
1800
1801 VkResult anv_UpdateDescriptorSets(
1802 VkDevice device,
1803 uint32_t writeCount,
1804 const VkWriteDescriptorSet* pDescriptorWrites,
1805 uint32_t copyCount,
1806 const VkCopyDescriptorSet* pDescriptorCopies)
1807 {
1808 for (uint32_t i = 0; i < writeCount; i++) {
1809 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1810 ANV_FROM_HANDLE(anv_descriptor_set, set, write->destSet);
1811
1812 switch (write->descriptorType) {
1813 case VK_DESCRIPTOR_TYPE_SAMPLER:
1814 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1815 for (uint32_t j = 0; j < write->count; j++) {
1816 set->descriptors[write->destBinding + j].sampler =
1817 anv_sampler_from_handle(write->pDescriptors[j].sampler);
1818 }
1819
1820 if (write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
1821 break;
1822
1823 /* fallthrough */
1824
1825 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1826 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1827 for (uint32_t j = 0; j < write->count; j++) {
1828 ANV_FROM_HANDLE(anv_image_view, iview,
1829 write->pDescriptors[j].imageView);
1830 set->descriptors[write->destBinding + j].view = &iview->view;
1831 }
1832 break;
1833
1834 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1835 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1836 anv_finishme("texel buffers not implemented");
1837 break;
1838
1839 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1840 anv_finishme("input attachments not implemented");
1841 break;
1842
1843 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1844 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1845 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1846 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1847 for (uint32_t j = 0; j < write->count; j++) {
1848 ANV_FROM_HANDLE(anv_buffer_view, bview,
1849 write->pDescriptors[j].bufferView);
1850 set->descriptors[write->destBinding + j].view = &bview->view;
1851 }
1852
1853 default:
1854 break;
1855 }
1856 }
1857
1858 for (uint32_t i = 0; i < copyCount; i++) {
1859 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1860 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->destSet);
1861 ANV_FROM_HANDLE(anv_descriptor_set, dest, copy->destSet);
1862 for (uint32_t j = 0; j < copy->count; j++) {
1863 dest->descriptors[copy->destBinding + j] =
1864 src->descriptors[copy->srcBinding + j];
1865 }
1866 }
1867
1868 return VK_SUCCESS;
1869 }
1870
1871 // State object functions
1872
1873 static inline int64_t
1874 clamp_int64(int64_t x, int64_t min, int64_t max)
1875 {
1876 if (x < min)
1877 return min;
1878 else if (x < max)
1879 return x;
1880 else
1881 return max;
1882 }
1883
1884 VkResult anv_CreateDynamicViewportState(
1885 VkDevice _device,
1886 const VkDynamicViewportStateCreateInfo* pCreateInfo,
1887 VkDynamicViewportState* pState)
1888 {
1889 ANV_FROM_HANDLE(anv_device, device, _device);
1890 struct anv_dynamic_vp_state *state;
1891
1892 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO);
1893
1894 state = anv_device_alloc(device, sizeof(*state), 8,
1895 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1896 if (state == NULL)
1897 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1898
1899 unsigned count = pCreateInfo->viewportAndScissorCount;
1900 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
1901 count * 64, 64);
1902 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
1903 count * 8, 32);
1904 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
1905 count * 32, 32);
1906
1907 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1908 const VkViewport *vp = &pCreateInfo->pViewports[i];
1909 const VkRect2D *s = &pCreateInfo->pScissors[i];
1910
1911 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1912 .ViewportMatrixElementm00 = vp->width / 2,
1913 .ViewportMatrixElementm11 = vp->height / 2,
1914 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1915 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1916 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1917 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1918 .XMinClipGuardband = -1.0f,
1919 .XMaxClipGuardband = 1.0f,
1920 .YMinClipGuardband = -1.0f,
1921 .YMaxClipGuardband = 1.0f,
1922 .XMinViewPort = vp->originX,
1923 .XMaxViewPort = vp->originX + vp->width - 1,
1924 .YMinViewPort = vp->originY,
1925 .YMaxViewPort = vp->originY + vp->height - 1,
1926 };
1927
1928 struct GEN8_CC_VIEWPORT cc_viewport = {
1929 .MinimumDepth = vp->minDepth,
1930 .MaximumDepth = vp->maxDepth
1931 };
1932
1933 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1934 * ymax < ymin for empty clips. In case clip x, y, width height are all
1935 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1936 * what we want. Just special case empty clips and produce a canonical
1937 * empty clip. */
1938 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1939 .ScissorRectangleYMin = 1,
1940 .ScissorRectangleXMin = 1,
1941 .ScissorRectangleYMax = 0,
1942 .ScissorRectangleXMax = 0
1943 };
1944
1945 const int max = 0xffff;
1946 struct GEN8_SCISSOR_RECT scissor = {
1947 /* Do this math using int64_t so overflow gets clamped correctly. */
1948 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
1949 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
1950 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
1951 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
1952 };
1953
1954 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
1955 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
1956
1957 if (s->extent.width <= 0 || s->extent.height <= 0) {
1958 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
1959 } else {
1960 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
1961 }
1962 }
1963
1964 *pState = anv_dynamic_vp_state_to_handle(state);
1965
1966 return VK_SUCCESS;
1967 }
1968
1969 VkResult anv_DestroyDynamicViewportState(
1970 VkDevice _device,
1971 VkDynamicViewportState _vp_state)
1972 {
1973 ANV_FROM_HANDLE(anv_device, device, _device);
1974 ANV_FROM_HANDLE(anv_dynamic_vp_state, vp_state, _vp_state);
1975
1976 anv_state_pool_free(&device->dynamic_state_pool, vp_state->sf_clip_vp);
1977 anv_state_pool_free(&device->dynamic_state_pool, vp_state->cc_vp);
1978 anv_state_pool_free(&device->dynamic_state_pool, vp_state->scissor);
1979
1980 anv_device_free(device, vp_state);
1981
1982 return VK_SUCCESS;
1983 }
1984
1985 VkResult anv_CreateDynamicRasterState(
1986 VkDevice _device,
1987 const VkDynamicRasterStateCreateInfo* pCreateInfo,
1988 VkDynamicRasterState* pState)
1989 {
1990 ANV_FROM_HANDLE(anv_device, device, _device);
1991 struct anv_dynamic_rs_state *state;
1992
1993 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RASTER_STATE_CREATE_INFO);
1994
1995 state = anv_device_alloc(device, sizeof(*state), 8,
1996 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1997 if (state == NULL)
1998 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1999
2000 struct GEN8_3DSTATE_SF sf = {
2001 GEN8_3DSTATE_SF_header,
2002 .LineWidth = pCreateInfo->lineWidth,
2003 };
2004
2005 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
2006
2007 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
2008 pCreateInfo->slopeScaledDepthBias != 0.0f;
2009 struct GEN8_3DSTATE_RASTER raster = {
2010 .GlobalDepthOffsetEnableSolid = enable_bias,
2011 .GlobalDepthOffsetEnableWireframe = enable_bias,
2012 .GlobalDepthOffsetEnablePoint = enable_bias,
2013 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
2014 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
2015 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
2016 };
2017
2018 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
2019
2020 *pState = anv_dynamic_rs_state_to_handle(state);
2021
2022 return VK_SUCCESS;
2023 }
2024
2025 VkResult anv_DestroyDynamicRasterState(
2026 VkDevice _device,
2027 VkDynamicRasterState _rs_state)
2028 {
2029 ANV_FROM_HANDLE(anv_device, device, _device);
2030 ANV_FROM_HANDLE(anv_dynamic_rs_state, rs_state, _rs_state);
2031
2032 anv_device_free(device, rs_state);
2033
2034 return VK_SUCCESS;
2035 }
2036
2037 VkResult anv_CreateDynamicColorBlendState(
2038 VkDevice _device,
2039 const VkDynamicColorBlendStateCreateInfo* pCreateInfo,
2040 VkDynamicColorBlendState* pState)
2041 {
2042 ANV_FROM_HANDLE(anv_device, device, _device);
2043 struct anv_dynamic_cb_state *state;
2044
2045 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_COLOR_BLEND_STATE_CREATE_INFO);
2046
2047 state = anv_device_alloc(device, sizeof(*state), 8,
2048 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2049 if (state == NULL)
2050 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2051
2052 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2053 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2054 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2055 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2056 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2057 };
2058
2059 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2060
2061 *pState = anv_dynamic_cb_state_to_handle(state);
2062
2063 return VK_SUCCESS;
2064 }
2065
2066 VkResult anv_DestroyDynamicColorBlendState(
2067 VkDevice _device,
2068 VkDynamicColorBlendState _cb_state)
2069 {
2070 ANV_FROM_HANDLE(anv_device, device, _device);
2071 ANV_FROM_HANDLE(anv_dynamic_cb_state, cb_state, _cb_state);
2072
2073 anv_device_free(device, cb_state);
2074
2075 return VK_SUCCESS;
2076 }
2077
2078 VkResult anv_CreateDynamicDepthStencilState(
2079 VkDevice _device,
2080 const VkDynamicDepthStencilStateCreateInfo* pCreateInfo,
2081 VkDynamicDepthStencilState* pState)
2082 {
2083 ANV_FROM_HANDLE(anv_device, device, _device);
2084 struct anv_dynamic_ds_state *state;
2085
2086 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DEPTH_STENCIL_STATE_CREATE_INFO);
2087
2088 state = anv_device_alloc(device, sizeof(*state), 8,
2089 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2090 if (state == NULL)
2091 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2092
2093 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2094 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2095
2096 /* Is this what we need to do? */
2097 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2098
2099 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2100 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
2101
2102 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2103 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
2104 };
2105
2106 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2107 &wm_depth_stencil);
2108
2109 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2110 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2111 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2112 };
2113
2114 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2115
2116 *pState = anv_dynamic_ds_state_to_handle(state);
2117
2118 return VK_SUCCESS;
2119 }
2120
2121 VkResult anv_DestroyDynamicDepthStencilState(
2122 VkDevice _device,
2123 VkDynamicDepthStencilState _ds_state)
2124 {
2125 ANV_FROM_HANDLE(anv_device, device, _device);
2126 ANV_FROM_HANDLE(anv_dynamic_ds_state, ds_state, _ds_state);
2127
2128 anv_device_free(device, ds_state);
2129
2130 return VK_SUCCESS;
2131 }
2132
2133 VkResult anv_CreateFramebuffer(
2134 VkDevice _device,
2135 const VkFramebufferCreateInfo* pCreateInfo,
2136 VkFramebuffer* pFramebuffer)
2137 {
2138 ANV_FROM_HANDLE(anv_device, device, _device);
2139 struct anv_framebuffer *framebuffer;
2140
2141 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2142
2143 size_t size = sizeof(*framebuffer) +
2144 sizeof(struct anv_attachment_view *) * pCreateInfo->attachmentCount;
2145 framebuffer = anv_device_alloc(device, size, 8,
2146 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2147 if (framebuffer == NULL)
2148 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2149
2150 framebuffer->attachment_count = pCreateInfo->attachmentCount;
2151 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2152 ANV_FROM_HANDLE(anv_attachment_view, view,
2153 pCreateInfo->pAttachments[i].view);
2154
2155 framebuffer->attachments[i] = view;
2156 }
2157
2158 framebuffer->width = pCreateInfo->width;
2159 framebuffer->height = pCreateInfo->height;
2160 framebuffer->layers = pCreateInfo->layers;
2161
2162 anv_CreateDynamicViewportState(anv_device_to_handle(device),
2163 &(VkDynamicViewportStateCreateInfo) {
2164 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO,
2165 .viewportAndScissorCount = 1,
2166 .pViewports = (VkViewport[]) {
2167 {
2168 .originX = 0,
2169 .originY = 0,
2170 .width = pCreateInfo->width,
2171 .height = pCreateInfo->height,
2172 .minDepth = 0,
2173 .maxDepth = 1
2174 },
2175 },
2176 .pScissors = (VkRect2D[]) {
2177 { { 0, 0 },
2178 { pCreateInfo->width, pCreateInfo->height } },
2179 }
2180 },
2181 &framebuffer->vp_state);
2182
2183 *pFramebuffer = anv_framebuffer_to_handle(framebuffer);
2184
2185 return VK_SUCCESS;
2186 }
2187
2188 VkResult anv_DestroyFramebuffer(
2189 VkDevice _device,
2190 VkFramebuffer _fb)
2191 {
2192 ANV_FROM_HANDLE(anv_device, device, _device);
2193 ANV_FROM_HANDLE(anv_framebuffer, fb, _fb);
2194
2195 anv_DestroyDynamicViewportState(anv_device_to_handle(device),
2196 fb->vp_state);
2197 anv_device_free(device, fb);
2198
2199 return VK_SUCCESS;
2200 }
2201
2202 VkResult anv_CreateRenderPass(
2203 VkDevice _device,
2204 const VkRenderPassCreateInfo* pCreateInfo,
2205 VkRenderPass* pRenderPass)
2206 {
2207 ANV_FROM_HANDLE(anv_device, device, _device);
2208 struct anv_render_pass *pass;
2209 size_t size;
2210
2211 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
2212
2213 size = sizeof(*pass) +
2214 pCreateInfo->subpassCount * sizeof(pass->subpasses[0]);
2215 pass = anv_device_alloc(device, size, 8,
2216 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2217 if (pass == NULL)
2218 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2219
2220 /* Clear the subpasses along with the parent pass. This required because
2221 * each array member of anv_subpass must be a valid pointer if not NULL.
2222 */
2223 memset(pass, 0, size);
2224
2225 pass->attachment_count = pCreateInfo->attachmentCount;
2226 pass->subpass_count = pCreateInfo->subpassCount;
2227
2228 size = pCreateInfo->attachmentCount * sizeof(*pass->attachments);
2229 pass->attachments = anv_device_alloc(device, size, 8,
2230 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2231 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2232 pass->attachments[i].format =
2233 anv_format_for_vk_format(pCreateInfo->pAttachments[i].format);
2234 pass->attachments[i].samples = pCreateInfo->pAttachments[i].samples;
2235 pass->attachments[i].load_op = pCreateInfo->pAttachments[i].loadOp;
2236 pass->attachments[i].stencil_load_op = pCreateInfo->pAttachments[i].stencilLoadOp;
2237 // pass->attachments[i].store_op = pCreateInfo->pAttachments[i].storeOp;
2238 // pass->attachments[i].stencil_store_op = pCreateInfo->pAttachments[i].stencilStoreOp;
2239 }
2240
2241 for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
2242 const VkSubpassDescription *desc = &pCreateInfo->pSubpasses[i];
2243 struct anv_subpass *subpass = &pass->subpasses[i];
2244
2245 subpass->input_count = desc->inputCount;
2246 subpass->color_count = desc->colorCount;
2247
2248 if (desc->inputCount > 0) {
2249 subpass->input_attachments =
2250 anv_device_alloc(device, desc->inputCount * sizeof(uint32_t),
2251 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2252
2253 for (uint32_t j = 0; j < desc->inputCount; j++) {
2254 subpass->input_attachments[j]
2255 = desc->inputAttachments[j].attachment;
2256 }
2257 }
2258
2259 if (desc->colorCount > 0) {
2260 subpass->color_attachments =
2261 anv_device_alloc(device, desc->colorCount * sizeof(uint32_t),
2262 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2263
2264 for (uint32_t j = 0; j < desc->colorCount; j++) {
2265 subpass->color_attachments[j]
2266 = desc->colorAttachments[j].attachment;
2267 }
2268 }
2269
2270 if (desc->resolveAttachments) {
2271 subpass->resolve_attachments =
2272 anv_device_alloc(device, desc->colorCount * sizeof(uint32_t),
2273 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2274
2275 for (uint32_t j = 0; j < desc->colorCount; j++) {
2276 subpass->resolve_attachments[j]
2277 = desc->resolveAttachments[j].attachment;
2278 }
2279 }
2280
2281 subpass->depth_stencil_attachment = desc->depthStencilAttachment.attachment;
2282 }
2283
2284 *pRenderPass = anv_render_pass_to_handle(pass);
2285
2286 return VK_SUCCESS;
2287 }
2288
2289 VkResult anv_DestroyRenderPass(
2290 VkDevice _device,
2291 VkRenderPass _pass)
2292 {
2293 ANV_FROM_HANDLE(anv_device, device, _device);
2294 ANV_FROM_HANDLE(anv_render_pass, pass, _pass);
2295
2296 anv_device_free(device, pass->attachments);
2297
2298 for (uint32_t i = 0; i < pass->subpass_count; i++) {
2299 /* In VkSubpassCreateInfo, each of the attachment arrays may be null.
2300 * Don't free the null arrays.
2301 */
2302 struct anv_subpass *subpass = &pass->subpasses[i];
2303
2304 anv_device_free(device, subpass->input_attachments);
2305 anv_device_free(device, subpass->color_attachments);
2306 anv_device_free(device, subpass->resolve_attachments);
2307 }
2308
2309 anv_device_free(device, pass);
2310
2311 return VK_SUCCESS;
2312 }
2313
2314 VkResult anv_GetRenderAreaGranularity(
2315 VkDevice device,
2316 VkRenderPass renderPass,
2317 VkExtent2D* pGranularity)
2318 {
2319 *pGranularity = (VkExtent2D) { 1, 1 };
2320
2321 return VK_SUCCESS;
2322 }
2323
2324 void vkCmdDbgMarkerBegin(
2325 VkCmdBuffer cmdBuffer,
2326 const char* pMarker)
2327 __attribute__ ((visibility ("default")));
2328
2329 void vkCmdDbgMarkerEnd(
2330 VkCmdBuffer cmdBuffer)
2331 __attribute__ ((visibility ("default")));
2332
2333 void vkCmdDbgMarkerBegin(
2334 VkCmdBuffer cmdBuffer,
2335 const char* pMarker)
2336 {
2337 }
2338
2339 void vkCmdDbgMarkerEnd(
2340 VkCmdBuffer cmdBuffer)
2341 {
2342 }