vk/device: Make BATCH_SIZE a global #define
[mesa.git] / src / vulkan / anv_device.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "anv_private.h"
31 #include "mesa/main/git_sha1.h"
32
33 static int
34 anv_env_get_int(const char *name)
35 {
36 const char *val = getenv(name);
37
38 if (!val)
39 return 0;
40
41 return strtol(val, NULL, 0);
42 }
43
44 static void
45 anv_physical_device_finish(struct anv_physical_device *device)
46 {
47 if (device->fd >= 0)
48 close(device->fd);
49 }
50
51 static VkResult
52 anv_physical_device_init(struct anv_physical_device *device,
53 struct anv_instance *instance,
54 const char *path)
55 {
56 device->fd = open(path, O_RDWR | O_CLOEXEC);
57 if (device->fd < 0)
58 return vk_error(VK_ERROR_UNAVAILABLE);
59
60 device->instance = instance;
61 device->path = path;
62
63 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
64 device->no_hw = false;
65 if (device->chipset_id) {
66 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
67 device->no_hw = true;
68 } else {
69 device->chipset_id = anv_gem_get_param(device->fd, I915_PARAM_CHIPSET_ID);
70 }
71 if (!device->chipset_id)
72 goto fail;
73
74 device->name = brw_get_device_name(device->chipset_id);
75 device->info = brw_get_device_info(device->chipset_id, -1);
76 if (!device->info)
77 goto fail;
78
79 if (!anv_gem_get_param(device->fd, I915_PARAM_HAS_WAIT_TIMEOUT))
80 goto fail;
81
82 if (!anv_gem_get_param(device->fd, I915_PARAM_HAS_EXECBUF2))
83 goto fail;
84
85 if (!anv_gem_get_param(device->fd, I915_PARAM_HAS_LLC))
86 goto fail;
87
88 if (!anv_gem_get_param(device->fd, I915_PARAM_HAS_EXEC_CONSTANTS))
89 goto fail;
90
91 return VK_SUCCESS;
92
93 fail:
94 anv_physical_device_finish(device);
95 return vk_error(VK_ERROR_UNAVAILABLE);
96 }
97
98 static void *default_alloc(
99 void* pUserData,
100 size_t size,
101 size_t alignment,
102 VkSystemAllocType allocType)
103 {
104 return malloc(size);
105 }
106
107 static void default_free(
108 void* pUserData,
109 void* pMem)
110 {
111 free(pMem);
112 }
113
114 static const VkAllocCallbacks default_alloc_callbacks = {
115 .pUserData = NULL,
116 .pfnAlloc = default_alloc,
117 .pfnFree = default_free
118 };
119
120 VkResult anv_CreateInstance(
121 const VkInstanceCreateInfo* pCreateInfo,
122 VkInstance* pInstance)
123 {
124 struct anv_instance *instance;
125 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
126 void *user_data = NULL;
127
128 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
129
130 if (pCreateInfo->pAllocCb) {
131 alloc_callbacks = pCreateInfo->pAllocCb;
132 user_data = pCreateInfo->pAllocCb->pUserData;
133 }
134 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
135 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
136 if (!instance)
137 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
138
139 instance->pAllocUserData = alloc_callbacks->pUserData;
140 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
141 instance->pfnFree = alloc_callbacks->pfnFree;
142 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
143 instance->physicalDeviceCount = 0;
144
145 *pInstance = anv_instance_to_handle(instance);
146
147 return VK_SUCCESS;
148 }
149
150 VkResult anv_DestroyInstance(
151 VkInstance _instance)
152 {
153 ANV_FROM_HANDLE(anv_instance, instance, _instance);
154
155 if (instance->physicalDeviceCount > 0) {
156 anv_physical_device_finish(&instance->physicalDevice);
157 }
158
159 instance->pfnFree(instance->pAllocUserData, instance);
160
161 return VK_SUCCESS;
162 }
163
164 VkResult anv_EnumeratePhysicalDevices(
165 VkInstance _instance,
166 uint32_t* pPhysicalDeviceCount,
167 VkPhysicalDevice* pPhysicalDevices)
168 {
169 ANV_FROM_HANDLE(anv_instance, instance, _instance);
170 VkResult result;
171
172 if (instance->physicalDeviceCount == 0) {
173 result = anv_physical_device_init(&instance->physicalDevice,
174 instance, "/dev/dri/renderD128");
175 if (result != VK_SUCCESS)
176 return result;
177
178 instance->physicalDeviceCount = 1;
179 }
180
181 /* pPhysicalDeviceCount is an out parameter if pPhysicalDevices is NULL;
182 * otherwise it's an inout parameter.
183 *
184 * The Vulkan spec (git aaed022) says:
185 *
186 * pPhysicalDeviceCount is a pointer to an unsigned integer variable
187 * that is initialized with the number of devices the application is
188 * prepared to receive handles to. pname:pPhysicalDevices is pointer to
189 * an array of at least this many VkPhysicalDevice handles [...].
190 *
191 * Upon success, if pPhysicalDevices is NULL, vkEnumeratePhysicalDevices
192 * overwrites the contents of the variable pointed to by
193 * pPhysicalDeviceCount with the number of physical devices in in the
194 * instance; otherwise, vkEnumeratePhysicalDevices overwrites
195 * pPhysicalDeviceCount with the number of physical handles written to
196 * pPhysicalDevices.
197 */
198 if (!pPhysicalDevices) {
199 *pPhysicalDeviceCount = instance->physicalDeviceCount;
200 } else if (*pPhysicalDeviceCount >= 1) {
201 pPhysicalDevices[0] = anv_physical_device_to_handle(&instance->physicalDevice);
202 *pPhysicalDeviceCount = 1;
203 } else {
204 *pPhysicalDeviceCount = 0;
205 }
206
207 return VK_SUCCESS;
208 }
209
210 VkResult anv_GetPhysicalDeviceFeatures(
211 VkPhysicalDevice physicalDevice,
212 VkPhysicalDeviceFeatures* pFeatures)
213 {
214 anv_finishme("Get correct values for PhysicalDeviceFeatures");
215
216 *pFeatures = (VkPhysicalDeviceFeatures) {
217 .robustBufferAccess = false,
218 .fullDrawIndexUint32 = false,
219 .imageCubeArray = false,
220 .independentBlend = false,
221 .geometryShader = true,
222 .tessellationShader = false,
223 .sampleRateShading = false,
224 .dualSourceBlend = true,
225 .logicOp = true,
226 .instancedDrawIndirect = true,
227 .depthClip = false,
228 .depthBiasClamp = false,
229 .fillModeNonSolid = true,
230 .depthBounds = false,
231 .wideLines = true,
232 .largePoints = true,
233 .textureCompressionETC2 = true,
234 .textureCompressionASTC_LDR = true,
235 .textureCompressionBC = true,
236 .pipelineStatisticsQuery = true,
237 .vertexSideEffects = false,
238 .tessellationSideEffects = false,
239 .geometrySideEffects = false,
240 .fragmentSideEffects = false,
241 .shaderTessellationPointSize = false,
242 .shaderGeometryPointSize = true,
243 .shaderTextureGatherExtended = true,
244 .shaderStorageImageExtendedFormats = false,
245 .shaderStorageImageMultisample = false,
246 .shaderStorageBufferArrayConstantIndexing = false,
247 .shaderStorageImageArrayConstantIndexing = false,
248 .shaderUniformBufferArrayDynamicIndexing = true,
249 .shaderSampledImageArrayDynamicIndexing = false,
250 .shaderStorageBufferArrayDynamicIndexing = false,
251 .shaderStorageImageArrayDynamicIndexing = false,
252 .shaderClipDistance = false,
253 .shaderCullDistance = false,
254 .shaderFloat64 = false,
255 .shaderInt64 = false,
256 .shaderFloat16 = false,
257 .shaderInt16 = false,
258 };
259
260 return VK_SUCCESS;
261 }
262
263 VkResult anv_GetPhysicalDeviceLimits(
264 VkPhysicalDevice physicalDevice,
265 VkPhysicalDeviceLimits* pLimits)
266 {
267 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
268 const struct brw_device_info *devinfo = physical_device->info;
269
270 anv_finishme("Get correct values for PhysicalDeviceLimits");
271
272 *pLimits = (VkPhysicalDeviceLimits) {
273 .maxImageDimension1D = (1 << 14),
274 .maxImageDimension2D = (1 << 14),
275 .maxImageDimension3D = (1 << 10),
276 .maxImageDimensionCube = (1 << 14),
277 .maxImageArrayLayers = (1 << 10),
278 .maxTexelBufferSize = (1 << 14),
279 .maxUniformBufferSize = UINT32_MAX,
280 .maxStorageBufferSize = UINT32_MAX,
281 .maxPushConstantsSize = 128,
282 .maxMemoryAllocationCount = UINT32_MAX,
283 .bufferImageGranularity = 64, /* A cache line */
284 .maxBoundDescriptorSets = MAX_SETS,
285 .maxDescriptorSets = UINT32_MAX,
286 .maxPerStageDescriptorSamplers = 64,
287 .maxPerStageDescriptorUniformBuffers = 64,
288 .maxPerStageDescriptorStorageBuffers = 64,
289 .maxPerStageDescriptorSampledImages = 64,
290 .maxPerStageDescriptorStorageImages = 64,
291 .maxDescriptorSetSamplers = 256,
292 .maxDescriptorSetUniformBuffers = 256,
293 .maxDescriptorSetStorageBuffers = 256,
294 .maxDescriptorSetSampledImages = 256,
295 .maxDescriptorSetStorageImages = 256,
296 .maxVertexInputAttributes = 32,
297 .maxVertexInputAttributeOffset = 256,
298 .maxVertexInputBindingStride = 256,
299 .maxVertexOutputComponents = 32,
300 .maxTessGenLevel = 0,
301 .maxTessPatchSize = 0,
302 .maxTessControlPerVertexInputComponents = 0,
303 .maxTessControlPerVertexOutputComponents = 0,
304 .maxTessControlPerPatchOutputComponents = 0,
305 .maxTessControlTotalOutputComponents = 0,
306 .maxTessEvaluationInputComponents = 0,
307 .maxTessEvaluationOutputComponents = 0,
308 .maxGeometryShaderInvocations = 6,
309 .maxGeometryInputComponents = 16,
310 .maxGeometryOutputComponents = 16,
311 .maxGeometryOutputVertices = 16,
312 .maxGeometryTotalOutputComponents = 16,
313 .maxFragmentInputComponents = 16,
314 .maxFragmentOutputBuffers = 8,
315 .maxFragmentDualSourceBuffers = 2,
316 .maxFragmentCombinedOutputResources = 8,
317 .maxComputeSharedMemorySize = 1024,
318 .maxComputeWorkGroupCount = {
319 16 * devinfo->max_cs_threads,
320 16 * devinfo->max_cs_threads,
321 16 * devinfo->max_cs_threads,
322 },
323 .maxComputeWorkGroupInvocations = 16 * devinfo->max_cs_threads,
324 .maxComputeWorkGroupSize = {
325 16 * devinfo->max_cs_threads,
326 16 * devinfo->max_cs_threads,
327 16 * devinfo->max_cs_threads,
328 },
329 .subPixelPrecisionBits = 4 /* FIXME */,
330 .subTexelPrecisionBits = 4 /* FIXME */,
331 .mipmapPrecisionBits = 4 /* FIXME */,
332 .maxDrawIndexedIndexValue = UINT32_MAX,
333 .maxDrawIndirectInstanceCount = UINT32_MAX,
334 .primitiveRestartForPatches = UINT32_MAX,
335 .maxSamplerLodBias = 16,
336 .maxSamplerAnisotropy = 16,
337 .maxViewports = 16,
338 .maxDynamicViewportStates = UINT32_MAX,
339 .maxViewportDimensions = { (1 << 14), (1 << 14) },
340 .viewportBoundsRange = { -1.0, 1.0 }, /* FIXME */
341 .viewportSubPixelBits = 13, /* We take a float? */
342 .minMemoryMapAlignment = 64, /* A cache line */
343 .minTexelBufferOffsetAlignment = 1,
344 .minUniformBufferOffsetAlignment = 1,
345 .minStorageBufferOffsetAlignment = 1,
346 .minTexelOffset = 0, /* FIXME */
347 .maxTexelOffset = 0, /* FIXME */
348 .minTexelGatherOffset = 0, /* FIXME */
349 .maxTexelGatherOffset = 0, /* FIXME */
350 .minInterpolationOffset = 0, /* FIXME */
351 .maxInterpolationOffset = 0, /* FIXME */
352 .subPixelInterpolationOffsetBits = 0, /* FIXME */
353 .maxFramebufferWidth = (1 << 14),
354 .maxFramebufferHeight = (1 << 14),
355 .maxFramebufferLayers = (1 << 10),
356 .maxFramebufferColorSamples = 8,
357 .maxFramebufferDepthSamples = 8,
358 .maxFramebufferStencilSamples = 8,
359 .maxColorAttachments = MAX_RTS,
360 .maxSampledImageColorSamples = 8,
361 .maxSampledImageDepthSamples = 8,
362 .maxSampledImageIntegerSamples = 1,
363 .maxStorageImageSamples = 1,
364 .maxSampleMaskWords = 1,
365 .timestampFrequency = 1000 * 1000 * 1000 / 80,
366 .maxClipDistances = 0 /* FIXME */,
367 .maxCullDistances = 0 /* FIXME */,
368 .maxCombinedClipAndCullDistances = 0 /* FIXME */,
369 .pointSizeRange = { 0.125, 255.875 },
370 .lineWidthRange = { 0.0, 7.9921875 },
371 .pointSizeGranularity = (1.0 / 8.0),
372 .lineWidthGranularity = (1.0 / 128.0),
373 };
374
375 return VK_SUCCESS;
376 }
377
378 VkResult anv_GetPhysicalDeviceProperties(
379 VkPhysicalDevice physicalDevice,
380 VkPhysicalDeviceProperties* pProperties)
381 {
382 ANV_FROM_HANDLE(anv_physical_device, pdevice, physicalDevice);
383
384 *pProperties = (VkPhysicalDeviceProperties) {
385 .apiVersion = VK_MAKE_VERSION(0, 138, 1),
386 .driverVersion = 1,
387 .vendorId = 0x8086,
388 .deviceId = pdevice->chipset_id,
389 .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
390 };
391
392 strcpy(pProperties->deviceName, pdevice->name);
393 snprintf((char *)pProperties->pipelineCacheUUID, VK_UUID_LENGTH,
394 "anv-%s", MESA_GIT_SHA1 + 4);
395
396 return VK_SUCCESS;
397 }
398
399 VkResult anv_GetPhysicalDeviceQueueCount(
400 VkPhysicalDevice physicalDevice,
401 uint32_t* pCount)
402 {
403 *pCount = 1;
404
405 return VK_SUCCESS;
406 }
407
408 VkResult anv_GetPhysicalDeviceQueueProperties(
409 VkPhysicalDevice physicalDevice,
410 uint32_t count,
411 VkPhysicalDeviceQueueProperties* pQueueProperties)
412 {
413 assert(count == 1);
414
415 *pQueueProperties = (VkPhysicalDeviceQueueProperties) {
416 .queueFlags = VK_QUEUE_GRAPHICS_BIT |
417 VK_QUEUE_COMPUTE_BIT |
418 VK_QUEUE_DMA_BIT,
419 .queueCount = 1,
420 .supportsTimestamps = true,
421 };
422
423 return VK_SUCCESS;
424 }
425
426 VkResult anv_GetPhysicalDeviceMemoryProperties(
427 VkPhysicalDevice physicalDevice,
428 VkPhysicalDeviceMemoryProperties* pMemoryProperties)
429 {
430 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
431
432 size_t aperture_size;
433 size_t heap_size;
434
435 if (anv_gem_get_aperture(physical_device, &aperture_size) == -1)
436 return vk_error(VK_ERROR_UNAVAILABLE);
437
438 /* Reserve some wiggle room for the driver by exposing only 75% of the
439 * aperture to the heap.
440 */
441 heap_size = 3 * aperture_size / 4;
442
443 /* The property flags below are valid only for llc platforms. */
444 pMemoryProperties->memoryTypeCount = 1;
445 pMemoryProperties->memoryTypes[0] = (VkMemoryType) {
446 .propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
447 .heapIndex = 1,
448 };
449
450 pMemoryProperties->memoryHeapCount = 1;
451 pMemoryProperties->memoryHeaps[0] = (VkMemoryHeap) {
452 .size = heap_size,
453 .flags = VK_MEMORY_HEAP_HOST_LOCAL,
454 };
455
456 return VK_SUCCESS;
457 }
458
459 PFN_vkVoidFunction anv_GetInstanceProcAddr(
460 VkInstance instance,
461 const char* pName)
462 {
463 return anv_lookup_entrypoint(pName);
464 }
465
466 PFN_vkVoidFunction anv_GetDeviceProcAddr(
467 VkDevice device,
468 const char* pName)
469 {
470 return anv_lookup_entrypoint(pName);
471 }
472
473 static void
474 parse_debug_flags(struct anv_device *device)
475 {
476 const char *debug, *p, *end;
477
478 debug = getenv("INTEL_DEBUG");
479 device->dump_aub = false;
480 if (debug) {
481 for (p = debug; *p; p = end + 1) {
482 end = strchrnul(p, ',');
483 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
484 device->dump_aub = true;
485 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
486 device->no_hw = true;
487 if (*end == '\0')
488 break;
489 }
490 }
491 }
492
493 static VkResult
494 anv_queue_init(struct anv_device *device, struct anv_queue *queue)
495 {
496 queue->device = device;
497 queue->pool = &device->surface_state_pool;
498
499 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
500 if (queue->completed_serial.map == NULL)
501 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
502
503 *(uint32_t *)queue->completed_serial.map = 0;
504 queue->next_serial = 1;
505
506 return VK_SUCCESS;
507 }
508
509 static void
510 anv_queue_finish(struct anv_queue *queue)
511 {
512 #ifdef HAVE_VALGRIND
513 /* This gets torn down with the device so we only need to do this if
514 * valgrind is present.
515 */
516 anv_state_pool_free(queue->pool, queue->completed_serial);
517 #endif
518 }
519
520 static void
521 anv_device_init_border_colors(struct anv_device *device)
522 {
523 static const VkClearColorValue border_colors[] = {
524 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 0.0 } },
525 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 1.0 } },
526 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE] = { .f32 = { 1.0, 1.0, 1.0, 1.0 } },
527 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK] = { .u32 = { 0, 0, 0, 0 } },
528 [VK_BORDER_COLOR_INT_OPAQUE_BLACK] = { .u32 = { 0, 0, 0, 1 } },
529 [VK_BORDER_COLOR_INT_OPAQUE_WHITE] = { .u32 = { 1, 1, 1, 1 } },
530 };
531
532 device->border_colors =
533 anv_state_pool_alloc(&device->dynamic_state_pool,
534 sizeof(border_colors), 32);
535 memcpy(device->border_colors.map, border_colors, sizeof(border_colors));
536 }
537
538 VkResult anv_CreateDevice(
539 VkPhysicalDevice physicalDevice,
540 const VkDeviceCreateInfo* pCreateInfo,
541 VkDevice* pDevice)
542 {
543 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
544 struct anv_instance *instance = physical_device->instance;
545 struct anv_device *device;
546
547 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
548
549 device = instance->pfnAlloc(instance->pAllocUserData,
550 sizeof(*device), 8,
551 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
552 if (!device)
553 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
554
555 device->no_hw = physical_device->no_hw;
556 parse_debug_flags(device);
557
558 device->instance = physical_device->instance;
559
560 /* XXX(chadv): Can we dup() physicalDevice->fd here? */
561 device->fd = open(physical_device->path, O_RDWR | O_CLOEXEC);
562 if (device->fd == -1)
563 goto fail_device;
564
565 device->context_id = anv_gem_create_context(device);
566 if (device->context_id == -1)
567 goto fail_fd;
568
569 anv_bo_pool_init(&device->batch_bo_pool, device, ANV_CMD_BUFFER_BATCH_SIZE);
570
571 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
572
573 anv_state_pool_init(&device->dynamic_state_pool,
574 &device->dynamic_state_block_pool);
575
576 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
577 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
578
579 anv_state_pool_init(&device->surface_state_pool,
580 &device->surface_state_block_pool);
581
582 anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
583
584 device->info = *physical_device->info;
585
586 device->compiler = anv_compiler_create(device);
587 device->aub_writer = NULL;
588
589 pthread_mutex_init(&device->mutex, NULL);
590
591 anv_queue_init(device, &device->queue);
592
593 anv_device_init_meta(device);
594
595 anv_device_init_border_colors(device);
596
597 *pDevice = anv_device_to_handle(device);
598
599 return VK_SUCCESS;
600
601 fail_fd:
602 close(device->fd);
603 fail_device:
604 anv_device_free(device, device);
605
606 return vk_error(VK_ERROR_UNAVAILABLE);
607 }
608
609 VkResult anv_DestroyDevice(
610 VkDevice _device)
611 {
612 ANV_FROM_HANDLE(anv_device, device, _device);
613
614 anv_compiler_destroy(device->compiler);
615
616 anv_queue_finish(&device->queue);
617
618 anv_device_finish_meta(device);
619
620 #ifdef HAVE_VALGRIND
621 /* We only need to free these to prevent valgrind errors. The backing
622 * BO will go away in a couple of lines so we don't actually leak.
623 */
624 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
625 #endif
626
627 anv_bo_pool_finish(&device->batch_bo_pool);
628 anv_block_pool_finish(&device->dynamic_state_block_pool);
629 anv_block_pool_finish(&device->instruction_block_pool);
630 anv_block_pool_finish(&device->surface_state_block_pool);
631 anv_block_pool_finish(&device->scratch_block_pool);
632
633 close(device->fd);
634
635 if (device->aub_writer)
636 anv_aub_writer_destroy(device->aub_writer);
637
638 anv_device_free(device, device);
639
640 return VK_SUCCESS;
641 }
642
643 static const VkExtensionProperties global_extensions[] = {
644 {
645 .extName = "VK_WSI_LunarG",
646 .specVersion = 3
647 }
648 };
649
650 VkResult anv_GetGlobalExtensionProperties(
651 const char* pLayerName,
652 uint32_t* pCount,
653 VkExtensionProperties* pProperties)
654 {
655 if (pProperties == NULL) {
656 *pCount = ARRAY_SIZE(global_extensions);
657 return VK_SUCCESS;
658 }
659
660 assert(*pCount < ARRAY_SIZE(global_extensions));
661
662 *pCount = ARRAY_SIZE(global_extensions);
663 memcpy(pProperties, global_extensions, sizeof(global_extensions));
664
665 return VK_SUCCESS;
666 }
667
668 VkResult anv_GetPhysicalDeviceExtensionProperties(
669 VkPhysicalDevice physicalDevice,
670 const char* pLayerName,
671 uint32_t* pCount,
672 VkExtensionProperties* pProperties)
673 {
674 if (pProperties == NULL) {
675 *pCount = 0;
676 return VK_SUCCESS;
677 }
678
679 /* None supported at this time */
680 return vk_error(VK_ERROR_INVALID_EXTENSION);
681 }
682
683 VkResult anv_GetGlobalLayerProperties(
684 uint32_t* pCount,
685 VkLayerProperties* pProperties)
686 {
687 if (pProperties == NULL) {
688 *pCount = 0;
689 return VK_SUCCESS;
690 }
691
692 /* None supported at this time */
693 return vk_error(VK_ERROR_INVALID_LAYER);
694 }
695
696 VkResult anv_GetPhysicalDeviceLayerProperties(
697 VkPhysicalDevice physicalDevice,
698 uint32_t* pCount,
699 VkLayerProperties* pProperties)
700 {
701 if (pProperties == NULL) {
702 *pCount = 0;
703 return VK_SUCCESS;
704 }
705
706 /* None supported at this time */
707 return vk_error(VK_ERROR_INVALID_LAYER);
708 }
709
710 VkResult anv_GetDeviceQueue(
711 VkDevice _device,
712 uint32_t queueNodeIndex,
713 uint32_t queueIndex,
714 VkQueue* pQueue)
715 {
716 ANV_FROM_HANDLE(anv_device, device, _device);
717
718 assert(queueIndex == 0);
719
720 *pQueue = anv_queue_to_handle(&device->queue);
721
722 return VK_SUCCESS;
723 }
724
725 VkResult anv_QueueSubmit(
726 VkQueue _queue,
727 uint32_t cmdBufferCount,
728 const VkCmdBuffer* pCmdBuffers,
729 VkFence _fence)
730 {
731 ANV_FROM_HANDLE(anv_queue, queue, _queue);
732 ANV_FROM_HANDLE(anv_fence, fence, _fence);
733 struct anv_device *device = queue->device;
734 int ret;
735
736 for (uint32_t i = 0; i < cmdBufferCount; i++) {
737 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, pCmdBuffers[i]);
738
739 if (device->dump_aub)
740 anv_cmd_buffer_dump(cmd_buffer);
741
742 if (!device->no_hw) {
743 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf2.execbuf);
744 if (ret != 0)
745 return vk_error(VK_ERROR_UNKNOWN);
746
747 if (fence) {
748 ret = anv_gem_execbuffer(device, &fence->execbuf);
749 if (ret != 0)
750 return vk_error(VK_ERROR_UNKNOWN);
751 }
752
753 for (uint32_t i = 0; i < cmd_buffer->execbuf2.bo_count; i++)
754 cmd_buffer->execbuf2.bos[i]->offset = cmd_buffer->execbuf2.objects[i].offset;
755 } else {
756 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
757 }
758 }
759
760 return VK_SUCCESS;
761 }
762
763 VkResult anv_QueueWaitIdle(
764 VkQueue _queue)
765 {
766 ANV_FROM_HANDLE(anv_queue, queue, _queue);
767
768 return vkDeviceWaitIdle(anv_device_to_handle(queue->device));
769 }
770
771 VkResult anv_DeviceWaitIdle(
772 VkDevice _device)
773 {
774 ANV_FROM_HANDLE(anv_device, device, _device);
775 struct anv_state state;
776 struct anv_batch batch;
777 struct drm_i915_gem_execbuffer2 execbuf;
778 struct drm_i915_gem_exec_object2 exec2_objects[1];
779 struct anv_bo *bo = NULL;
780 VkResult result;
781 int64_t timeout;
782 int ret;
783
784 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
785 bo = &device->dynamic_state_pool.block_pool->bo;
786 batch.start = batch.next = state.map;
787 batch.end = state.map + 32;
788 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
789 anv_batch_emit(&batch, GEN8_MI_NOOP);
790
791 exec2_objects[0].handle = bo->gem_handle;
792 exec2_objects[0].relocation_count = 0;
793 exec2_objects[0].relocs_ptr = 0;
794 exec2_objects[0].alignment = 0;
795 exec2_objects[0].offset = bo->offset;
796 exec2_objects[0].flags = 0;
797 exec2_objects[0].rsvd1 = 0;
798 exec2_objects[0].rsvd2 = 0;
799
800 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
801 execbuf.buffer_count = 1;
802 execbuf.batch_start_offset = state.offset;
803 execbuf.batch_len = batch.next - state.map;
804 execbuf.cliprects_ptr = 0;
805 execbuf.num_cliprects = 0;
806 execbuf.DR1 = 0;
807 execbuf.DR4 = 0;
808
809 execbuf.flags =
810 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
811 execbuf.rsvd1 = device->context_id;
812 execbuf.rsvd2 = 0;
813
814 if (!device->no_hw) {
815 ret = anv_gem_execbuffer(device, &execbuf);
816 if (ret != 0) {
817 result = vk_error(VK_ERROR_UNKNOWN);
818 goto fail;
819 }
820
821 timeout = INT64_MAX;
822 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
823 if (ret != 0) {
824 result = vk_error(VK_ERROR_UNKNOWN);
825 goto fail;
826 }
827 }
828
829 anv_state_pool_free(&device->dynamic_state_pool, state);
830
831 return VK_SUCCESS;
832
833 fail:
834 anv_state_pool_free(&device->dynamic_state_pool, state);
835
836 return result;
837 }
838
839 void *
840 anv_device_alloc(struct anv_device * device,
841 size_t size,
842 size_t alignment,
843 VkSystemAllocType allocType)
844 {
845 return device->instance->pfnAlloc(device->instance->pAllocUserData,
846 size,
847 alignment,
848 allocType);
849 }
850
851 void
852 anv_device_free(struct anv_device * device,
853 void * mem)
854 {
855 if (mem == NULL)
856 return;
857
858 return device->instance->pfnFree(device->instance->pAllocUserData,
859 mem);
860 }
861
862 VkResult
863 anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
864 {
865 bo->gem_handle = anv_gem_create(device, size);
866 if (!bo->gem_handle)
867 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
868
869 bo->map = NULL;
870 bo->index = 0;
871 bo->offset = 0;
872 bo->size = size;
873
874 return VK_SUCCESS;
875 }
876
877 VkResult anv_AllocMemory(
878 VkDevice _device,
879 const VkMemoryAllocInfo* pAllocInfo,
880 VkDeviceMemory* pMem)
881 {
882 ANV_FROM_HANDLE(anv_device, device, _device);
883 struct anv_device_memory *mem;
884 VkResult result;
885
886 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
887
888 if (pAllocInfo->memoryTypeIndex != 0) {
889 /* We support exactly one memory heap. */
890 return vk_error(VK_ERROR_INVALID_VALUE);
891 }
892
893 /* FINISHME: Fail if allocation request exceeds heap size. */
894
895 mem = anv_device_alloc(device, sizeof(*mem), 8,
896 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
897 if (mem == NULL)
898 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
899
900 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
901 if (result != VK_SUCCESS)
902 goto fail;
903
904 *pMem = anv_device_memory_to_handle(mem);
905
906 return VK_SUCCESS;
907
908 fail:
909 anv_device_free(device, mem);
910
911 return result;
912 }
913
914 VkResult anv_FreeMemory(
915 VkDevice _device,
916 VkDeviceMemory _mem)
917 {
918 ANV_FROM_HANDLE(anv_device, device, _device);
919 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
920
921 if (mem->bo.map)
922 anv_gem_munmap(mem->bo.map, mem->bo.size);
923
924 if (mem->bo.gem_handle != 0)
925 anv_gem_close(device, mem->bo.gem_handle);
926
927 anv_device_free(device, mem);
928
929 return VK_SUCCESS;
930 }
931
932 VkResult anv_MapMemory(
933 VkDevice _device,
934 VkDeviceMemory _mem,
935 VkDeviceSize offset,
936 VkDeviceSize size,
937 VkMemoryMapFlags flags,
938 void** ppData)
939 {
940 ANV_FROM_HANDLE(anv_device, device, _device);
941 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
942
943 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
944 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
945 * at a time is valid. We could just mmap up front and return an offset
946 * pointer here, but that may exhaust virtual memory on 32 bit
947 * userspace. */
948
949 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
950 mem->map_size = size;
951
952 *ppData = mem->map;
953
954 return VK_SUCCESS;
955 }
956
957 VkResult anv_UnmapMemory(
958 VkDevice _device,
959 VkDeviceMemory _mem)
960 {
961 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
962
963 anv_gem_munmap(mem->map, mem->map_size);
964
965 return VK_SUCCESS;
966 }
967
968 VkResult anv_FlushMappedMemoryRanges(
969 VkDevice device,
970 uint32_t memRangeCount,
971 const VkMappedMemoryRange* pMemRanges)
972 {
973 /* clflush here for !llc platforms */
974
975 return VK_SUCCESS;
976 }
977
978 VkResult anv_InvalidateMappedMemoryRanges(
979 VkDevice device,
980 uint32_t memRangeCount,
981 const VkMappedMemoryRange* pMemRanges)
982 {
983 return anv_FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
984 }
985
986 VkResult anv_GetBufferMemoryRequirements(
987 VkDevice device,
988 VkBuffer _buffer,
989 VkMemoryRequirements* pMemoryRequirements)
990 {
991 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
992
993 /* The Vulkan spec (git aaed022) says:
994 *
995 * memoryTypeBits is a bitfield and contains one bit set for every
996 * supported memory type for the resource. The bit `1<<i` is set if and
997 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
998 * structure for the physical device is supported.
999 *
1000 * We support exactly one memory type.
1001 */
1002 pMemoryRequirements->memoryTypeBits = 1;
1003
1004 pMemoryRequirements->size = buffer->size;
1005 pMemoryRequirements->alignment = 16;
1006
1007 return VK_SUCCESS;
1008 }
1009
1010 VkResult anv_GetImageMemoryRequirements(
1011 VkDevice device,
1012 VkImage _image,
1013 VkMemoryRequirements* pMemoryRequirements)
1014 {
1015 ANV_FROM_HANDLE(anv_image, image, _image);
1016
1017 /* The Vulkan spec (git aaed022) says:
1018 *
1019 * memoryTypeBits is a bitfield and contains one bit set for every
1020 * supported memory type for the resource. The bit `1<<i` is set if and
1021 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1022 * structure for the physical device is supported.
1023 *
1024 * We support exactly one memory type.
1025 */
1026 pMemoryRequirements->memoryTypeBits = 1;
1027
1028 pMemoryRequirements->size = image->size;
1029 pMemoryRequirements->alignment = image->alignment;
1030
1031 return VK_SUCCESS;
1032 }
1033
1034 VkResult anv_GetImageSparseMemoryRequirements(
1035 VkDevice device,
1036 VkImage image,
1037 uint32_t* pNumRequirements,
1038 VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
1039 {
1040 return vk_error(VK_UNSUPPORTED);
1041 }
1042
1043 VkResult anv_GetDeviceMemoryCommitment(
1044 VkDevice device,
1045 VkDeviceMemory memory,
1046 VkDeviceSize* pCommittedMemoryInBytes)
1047 {
1048 *pCommittedMemoryInBytes = 0;
1049 stub_return(VK_SUCCESS);
1050 }
1051
1052 VkResult anv_BindBufferMemory(
1053 VkDevice device,
1054 VkBuffer _buffer,
1055 VkDeviceMemory _mem,
1056 VkDeviceSize memOffset)
1057 {
1058 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
1059 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1060
1061 buffer->bo = &mem->bo;
1062 buffer->offset = memOffset;
1063
1064 return VK_SUCCESS;
1065 }
1066
1067 VkResult anv_BindImageMemory(
1068 VkDevice device,
1069 VkImage _image,
1070 VkDeviceMemory _mem,
1071 VkDeviceSize memOffset)
1072 {
1073 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
1074 ANV_FROM_HANDLE(anv_image, image, _image);
1075
1076 image->bo = &mem->bo;
1077 image->offset = memOffset;
1078
1079 return VK_SUCCESS;
1080 }
1081
1082 VkResult anv_QueueBindSparseBufferMemory(
1083 VkQueue queue,
1084 VkBuffer buffer,
1085 uint32_t numBindings,
1086 const VkSparseMemoryBindInfo* pBindInfo)
1087 {
1088 stub_return(VK_UNSUPPORTED);
1089 }
1090
1091 VkResult anv_QueueBindSparseImageOpaqueMemory(
1092 VkQueue queue,
1093 VkImage image,
1094 uint32_t numBindings,
1095 const VkSparseMemoryBindInfo* pBindInfo)
1096 {
1097 stub_return(VK_UNSUPPORTED);
1098 }
1099
1100 VkResult anv_QueueBindSparseImageMemory(
1101 VkQueue queue,
1102 VkImage image,
1103 uint32_t numBindings,
1104 const VkSparseImageMemoryBindInfo* pBindInfo)
1105 {
1106 stub_return(VK_UNSUPPORTED);
1107 }
1108
1109 VkResult anv_CreateFence(
1110 VkDevice _device,
1111 const VkFenceCreateInfo* pCreateInfo,
1112 VkFence* pFence)
1113 {
1114 ANV_FROM_HANDLE(anv_device, device, _device);
1115 struct anv_fence *fence;
1116 struct anv_batch batch;
1117 VkResult result;
1118
1119 const uint32_t fence_size = 128;
1120
1121 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1122
1123 fence = anv_device_alloc(device, sizeof(*fence), 8,
1124 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1125 if (fence == NULL)
1126 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1127
1128 result = anv_bo_init_new(&fence->bo, device, fence_size);
1129 if (result != VK_SUCCESS)
1130 goto fail;
1131
1132 fence->bo.map =
1133 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
1134 batch.next = batch.start = fence->bo.map;
1135 batch.end = fence->bo.map + fence->bo.size;
1136 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1137 anv_batch_emit(&batch, GEN8_MI_NOOP);
1138
1139 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1140 fence->exec2_objects[0].relocation_count = 0;
1141 fence->exec2_objects[0].relocs_ptr = 0;
1142 fence->exec2_objects[0].alignment = 0;
1143 fence->exec2_objects[0].offset = fence->bo.offset;
1144 fence->exec2_objects[0].flags = 0;
1145 fence->exec2_objects[0].rsvd1 = 0;
1146 fence->exec2_objects[0].rsvd2 = 0;
1147
1148 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1149 fence->execbuf.buffer_count = 1;
1150 fence->execbuf.batch_start_offset = 0;
1151 fence->execbuf.batch_len = batch.next - fence->bo.map;
1152 fence->execbuf.cliprects_ptr = 0;
1153 fence->execbuf.num_cliprects = 0;
1154 fence->execbuf.DR1 = 0;
1155 fence->execbuf.DR4 = 0;
1156
1157 fence->execbuf.flags =
1158 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1159 fence->execbuf.rsvd1 = device->context_id;
1160 fence->execbuf.rsvd2 = 0;
1161
1162 *pFence = anv_fence_to_handle(fence);
1163
1164 return VK_SUCCESS;
1165
1166 fail:
1167 anv_device_free(device, fence);
1168
1169 return result;
1170 }
1171
1172 VkResult anv_DestroyFence(
1173 VkDevice _device,
1174 VkFence _fence)
1175 {
1176 ANV_FROM_HANDLE(anv_device, device, _device);
1177 ANV_FROM_HANDLE(anv_fence, fence, _fence);
1178
1179 anv_gem_munmap(fence->bo.map, fence->bo.size);
1180 anv_gem_close(device, fence->bo.gem_handle);
1181 anv_device_free(device, fence);
1182
1183 return VK_SUCCESS;
1184 }
1185
1186 VkResult anv_ResetFences(
1187 VkDevice _device,
1188 uint32_t fenceCount,
1189 const VkFence* pFences)
1190 {
1191 for (uint32_t i = 0; i < fenceCount; i++) {
1192 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1193 fence->ready = false;
1194 }
1195
1196 return VK_SUCCESS;
1197 }
1198
1199 VkResult anv_GetFenceStatus(
1200 VkDevice _device,
1201 VkFence _fence)
1202 {
1203 ANV_FROM_HANDLE(anv_device, device, _device);
1204 ANV_FROM_HANDLE(anv_fence, fence, _fence);
1205 int64_t t = 0;
1206 int ret;
1207
1208 if (fence->ready)
1209 return VK_SUCCESS;
1210
1211 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1212 if (ret == 0) {
1213 fence->ready = true;
1214 return VK_SUCCESS;
1215 }
1216
1217 return VK_NOT_READY;
1218 }
1219
1220 VkResult anv_WaitForFences(
1221 VkDevice _device,
1222 uint32_t fenceCount,
1223 const VkFence* pFences,
1224 VkBool32 waitAll,
1225 uint64_t timeout)
1226 {
1227 ANV_FROM_HANDLE(anv_device, device, _device);
1228 int64_t t = timeout;
1229 int ret;
1230
1231 /* FIXME: handle !waitAll */
1232
1233 for (uint32_t i = 0; i < fenceCount; i++) {
1234 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1235 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1236 if (ret == -1 && errno == ETIME)
1237 return VK_TIMEOUT;
1238 else if (ret == -1)
1239 return vk_error(VK_ERROR_UNKNOWN);
1240 }
1241
1242 return VK_SUCCESS;
1243 }
1244
1245 // Queue semaphore functions
1246
1247 VkResult anv_CreateSemaphore(
1248 VkDevice device,
1249 const VkSemaphoreCreateInfo* pCreateInfo,
1250 VkSemaphore* pSemaphore)
1251 {
1252 stub_return(VK_UNSUPPORTED);
1253 }
1254
1255 VkResult anv_DestroySemaphore(
1256 VkDevice device,
1257 VkSemaphore semaphore)
1258 {
1259 stub_return(VK_UNSUPPORTED);
1260 }
1261
1262 VkResult anv_QueueSignalSemaphore(
1263 VkQueue queue,
1264 VkSemaphore semaphore)
1265 {
1266 stub_return(VK_UNSUPPORTED);
1267 }
1268
1269 VkResult anv_QueueWaitSemaphore(
1270 VkQueue queue,
1271 VkSemaphore semaphore)
1272 {
1273 stub_return(VK_UNSUPPORTED);
1274 }
1275
1276 // Event functions
1277
1278 VkResult anv_CreateEvent(
1279 VkDevice device,
1280 const VkEventCreateInfo* pCreateInfo,
1281 VkEvent* pEvent)
1282 {
1283 stub_return(VK_UNSUPPORTED);
1284 }
1285
1286 VkResult anv_DestroyEvent(
1287 VkDevice device,
1288 VkEvent event)
1289 {
1290 stub_return(VK_UNSUPPORTED);
1291 }
1292
1293 VkResult anv_GetEventStatus(
1294 VkDevice device,
1295 VkEvent event)
1296 {
1297 stub_return(VK_UNSUPPORTED);
1298 }
1299
1300 VkResult anv_SetEvent(
1301 VkDevice device,
1302 VkEvent event)
1303 {
1304 stub_return(VK_UNSUPPORTED);
1305 }
1306
1307 VkResult anv_ResetEvent(
1308 VkDevice device,
1309 VkEvent event)
1310 {
1311 stub_return(VK_UNSUPPORTED);
1312 }
1313
1314 // Buffer functions
1315
1316 VkResult anv_CreateBuffer(
1317 VkDevice _device,
1318 const VkBufferCreateInfo* pCreateInfo,
1319 VkBuffer* pBuffer)
1320 {
1321 ANV_FROM_HANDLE(anv_device, device, _device);
1322 struct anv_buffer *buffer;
1323
1324 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1325
1326 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1327 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1328 if (buffer == NULL)
1329 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1330
1331 buffer->size = pCreateInfo->size;
1332 buffer->bo = NULL;
1333 buffer->offset = 0;
1334
1335 *pBuffer = anv_buffer_to_handle(buffer);
1336
1337 return VK_SUCCESS;
1338 }
1339
1340 VkResult anv_DestroyBuffer(
1341 VkDevice _device,
1342 VkBuffer _buffer)
1343 {
1344 ANV_FROM_HANDLE(anv_device, device, _device);
1345 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1346
1347 anv_device_free(device, buffer);
1348
1349 return VK_SUCCESS;
1350 }
1351
1352 // Buffer view functions
1353
1354 void
1355 anv_fill_buffer_surface_state(void *state, VkFormat format,
1356 uint32_t offset, uint32_t range)
1357 {
1358 const struct anv_format *info;
1359
1360 info = anv_format_for_vk_format(format);
1361 /* This assumes RGBA float format. */
1362 uint32_t stride = 4;
1363 uint32_t num_elements = range / stride;
1364
1365 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1366 .SurfaceType = SURFTYPE_BUFFER,
1367 .SurfaceArray = false,
1368 .SurfaceFormat = info->surface_format,
1369 .SurfaceVerticalAlignment = VALIGN4,
1370 .SurfaceHorizontalAlignment = HALIGN4,
1371 .TileMode = LINEAR,
1372 .VerticalLineStride = 0,
1373 .VerticalLineStrideOffset = 0,
1374 .SamplerL2BypassModeDisable = true,
1375 .RenderCacheReadWriteMode = WriteOnlyCache,
1376 .MemoryObjectControlState = GEN8_MOCS,
1377 .BaseMipLevel = 0.0,
1378 .SurfaceQPitch = 0,
1379 .Height = (num_elements >> 7) & 0x3fff,
1380 .Width = num_elements & 0x7f,
1381 .Depth = (num_elements >> 21) & 0x3f,
1382 .SurfacePitch = stride - 1,
1383 .MinimumArrayElement = 0,
1384 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1385 .XOffset = 0,
1386 .YOffset = 0,
1387 .SurfaceMinLOD = 0,
1388 .MIPCountLOD = 0,
1389 .AuxiliarySurfaceMode = AUX_NONE,
1390 .RedClearColor = 0,
1391 .GreenClearColor = 0,
1392 .BlueClearColor = 0,
1393 .AlphaClearColor = 0,
1394 .ShaderChannelSelectRed = SCS_RED,
1395 .ShaderChannelSelectGreen = SCS_GREEN,
1396 .ShaderChannelSelectBlue = SCS_BLUE,
1397 .ShaderChannelSelectAlpha = SCS_ALPHA,
1398 .ResourceMinLOD = 0.0,
1399 /* FIXME: We assume that the image must be bound at this time. */
1400 .SurfaceBaseAddress = { NULL, offset },
1401 };
1402
1403 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1404 }
1405
1406 VkResult anv_CreateBufferView(
1407 VkDevice _device,
1408 const VkBufferViewCreateInfo* pCreateInfo,
1409 VkBufferView* pView)
1410 {
1411 ANV_FROM_HANDLE(anv_device, device, _device);
1412 ANV_FROM_HANDLE(anv_buffer, buffer, pCreateInfo->buffer);
1413 struct anv_buffer_view *bview;
1414 struct anv_surface_view *view;
1415
1416 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1417
1418 bview = anv_device_alloc(device, sizeof(*view), 8,
1419 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1420 if (bview == NULL)
1421 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1422
1423 view = &bview->view;
1424 view->bo = buffer->bo;
1425 view->offset = buffer->offset + pCreateInfo->offset;
1426 view->surface_state =
1427 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1428 view->format = pCreateInfo->format;
1429 view->range = pCreateInfo->range;
1430
1431 anv_fill_buffer_surface_state(view->surface_state.map,
1432 pCreateInfo->format,
1433 view->offset, pCreateInfo->range);
1434
1435 *pView = anv_buffer_view_to_handle(bview);
1436
1437 return VK_SUCCESS;
1438 }
1439
1440 VkResult anv_DestroyBufferView(
1441 VkDevice _device,
1442 VkBufferView _bview)
1443 {
1444 ANV_FROM_HANDLE(anv_device, device, _device);
1445 ANV_FROM_HANDLE(anv_buffer_view, bview, _bview);
1446
1447 anv_surface_view_fini(device, &bview->view);
1448 anv_device_free(device, bview);
1449
1450 return VK_SUCCESS;
1451 }
1452
1453 // Sampler functions
1454
1455 VkResult anv_CreateSampler(
1456 VkDevice _device,
1457 const VkSamplerCreateInfo* pCreateInfo,
1458 VkSampler* pSampler)
1459 {
1460 ANV_FROM_HANDLE(anv_device, device, _device);
1461 struct anv_sampler *sampler;
1462 uint32_t mag_filter, min_filter, max_anisotropy;
1463
1464 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
1465
1466 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1467 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1468 if (!sampler)
1469 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1470
1471 static const uint32_t vk_to_gen_tex_filter[] = {
1472 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1473 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
1474 };
1475
1476 static const uint32_t vk_to_gen_mipmap_mode[] = {
1477 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1478 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1479 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
1480 };
1481
1482 static const uint32_t vk_to_gen_tex_address[] = {
1483 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1484 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1485 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1486 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1487 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
1488 };
1489
1490 static const uint32_t vk_to_gen_compare_op[] = {
1491 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1492 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1493 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1494 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1495 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1496 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1497 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1498 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
1499 };
1500
1501 if (pCreateInfo->maxAnisotropy > 1) {
1502 mag_filter = MAPFILTER_ANISOTROPIC;
1503 min_filter = MAPFILTER_ANISOTROPIC;
1504 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1505 } else {
1506 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1507 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1508 max_anisotropy = RATIO21;
1509 }
1510
1511 struct GEN8_SAMPLER_STATE sampler_state = {
1512 .SamplerDisable = false,
1513 .TextureBorderColorMode = DX10OGL,
1514 .LODPreClampMode = 0,
1515 .BaseMipLevel = 0.0,
1516 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
1517 .MagModeFilter = mag_filter,
1518 .MinModeFilter = min_filter,
1519 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1520 .AnisotropicAlgorithm = EWAApproximation,
1521 .MinLOD = pCreateInfo->minLod,
1522 .MaxLOD = pCreateInfo->maxLod,
1523 .ChromaKeyEnable = 0,
1524 .ChromaKeyIndex = 0,
1525 .ChromaKeyMode = 0,
1526 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
1527 .CubeSurfaceControlMode = 0,
1528
1529 .IndirectStatePointer =
1530 device->border_colors.offset +
1531 pCreateInfo->borderColor * sizeof(float) * 4,
1532
1533 .LODClampMagnificationMode = MIPNONE,
1534 .MaximumAnisotropy = max_anisotropy,
1535 .RAddressMinFilterRoundingEnable = 0,
1536 .RAddressMagFilterRoundingEnable = 0,
1537 .VAddressMinFilterRoundingEnable = 0,
1538 .VAddressMagFilterRoundingEnable = 0,
1539 .UAddressMinFilterRoundingEnable = 0,
1540 .UAddressMagFilterRoundingEnable = 0,
1541 .TrilinearFilterQuality = 0,
1542 .NonnormalizedCoordinateEnable = 0,
1543 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1544 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1545 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
1546 };
1547
1548 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1549
1550 *pSampler = anv_sampler_to_handle(sampler);
1551
1552 return VK_SUCCESS;
1553 }
1554
1555 VkResult anv_DestroySampler(
1556 VkDevice _device,
1557 VkSampler _sampler)
1558 {
1559 ANV_FROM_HANDLE(anv_device, device, _device);
1560 ANV_FROM_HANDLE(anv_sampler, sampler, _sampler);
1561
1562 anv_device_free(device, sampler);
1563
1564 return VK_SUCCESS;
1565 }
1566
1567 // Descriptor set functions
1568
1569 VkResult anv_CreateDescriptorSetLayout(
1570 VkDevice _device,
1571 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1572 VkDescriptorSetLayout* pSetLayout)
1573 {
1574 ANV_FROM_HANDLE(anv_device, device, _device);
1575 struct anv_descriptor_set_layout *set_layout;
1576
1577 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1578
1579 uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
1580 uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
1581 uint32_t num_dynamic_buffers = 0;
1582 uint32_t count = 0;
1583 uint32_t stages = 0;
1584 uint32_t s;
1585
1586 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1587 switch (pCreateInfo->pBinding[i].descriptorType) {
1588 case VK_DESCRIPTOR_TYPE_SAMPLER:
1589 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1590 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1591 sampler_count[s] += pCreateInfo->pBinding[i].arraySize;
1592 break;
1593 default:
1594 break;
1595 }
1596
1597 switch (pCreateInfo->pBinding[i].descriptorType) {
1598 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1599 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1600 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1601 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1602 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1603 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1604 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1605 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1606 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1607 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1608 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1609 surface_count[s] += pCreateInfo->pBinding[i].arraySize;
1610 break;
1611 default:
1612 break;
1613 }
1614
1615 switch (pCreateInfo->pBinding[i].descriptorType) {
1616 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1617 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1618 num_dynamic_buffers += pCreateInfo->pBinding[i].arraySize;
1619 break;
1620 default:
1621 break;
1622 }
1623
1624 stages |= pCreateInfo->pBinding[i].stageFlags;
1625 count += pCreateInfo->pBinding[i].arraySize;
1626 }
1627
1628 uint32_t sampler_total = 0;
1629 uint32_t surface_total = 0;
1630 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
1631 sampler_total += sampler_count[s];
1632 surface_total += surface_count[s];
1633 }
1634
1635 size_t size = sizeof(*set_layout) +
1636 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
1637 set_layout = anv_device_alloc(device, size, 8,
1638 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1639 if (!set_layout)
1640 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1641
1642 set_layout->num_dynamic_buffers = num_dynamic_buffers;
1643 set_layout->count = count;
1644 set_layout->shader_stages = stages;
1645
1646 struct anv_descriptor_slot *p = set_layout->entries;
1647 struct anv_descriptor_slot *sampler[VK_SHADER_STAGE_NUM];
1648 struct anv_descriptor_slot *surface[VK_SHADER_STAGE_NUM];
1649 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
1650 set_layout->stage[s].surface_count = surface_count[s];
1651 set_layout->stage[s].surface_start = surface[s] = p;
1652 p += surface_count[s];
1653 set_layout->stage[s].sampler_count = sampler_count[s];
1654 set_layout->stage[s].sampler_start = sampler[s] = p;
1655 p += sampler_count[s];
1656 }
1657
1658 uint32_t descriptor = 0;
1659 int8_t dynamic_slot = 0;
1660 bool is_dynamic;
1661 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1662 switch (pCreateInfo->pBinding[i].descriptorType) {
1663 case VK_DESCRIPTOR_TYPE_SAMPLER:
1664 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1665 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1666 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
1667 sampler[s]->index = descriptor + j;
1668 sampler[s]->dynamic_slot = -1;
1669 sampler[s]++;
1670 }
1671 break;
1672 default:
1673 break;
1674 }
1675
1676 switch (pCreateInfo->pBinding[i].descriptorType) {
1677 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1678 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1679 is_dynamic = true;
1680 break;
1681 default:
1682 is_dynamic = false;
1683 break;
1684 }
1685
1686 switch (pCreateInfo->pBinding[i].descriptorType) {
1687 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1688 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1689 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1690 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1691 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1692 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1693 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1694 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1695 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1696 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1697 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1698 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
1699 surface[s]->index = descriptor + j;
1700 if (is_dynamic)
1701 surface[s]->dynamic_slot = dynamic_slot + j;
1702 else
1703 surface[s]->dynamic_slot = -1;
1704 surface[s]++;
1705 }
1706 break;
1707 default:
1708 break;
1709 }
1710
1711 if (is_dynamic)
1712 dynamic_slot += pCreateInfo->pBinding[i].arraySize;
1713
1714 descriptor += pCreateInfo->pBinding[i].arraySize;
1715 }
1716
1717 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
1718
1719 return VK_SUCCESS;
1720 }
1721
1722 VkResult anv_DestroyDescriptorSetLayout(
1723 VkDevice _device,
1724 VkDescriptorSetLayout _set_layout)
1725 {
1726 ANV_FROM_HANDLE(anv_device, device, _device);
1727 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
1728
1729 anv_device_free(device, set_layout);
1730
1731 return VK_SUCCESS;
1732 }
1733
1734 VkResult anv_CreateDescriptorPool(
1735 VkDevice device,
1736 VkDescriptorPoolUsage poolUsage,
1737 uint32_t maxSets,
1738 const VkDescriptorPoolCreateInfo* pCreateInfo,
1739 VkDescriptorPool* pDescriptorPool)
1740 {
1741 anv_finishme("VkDescriptorPool is a stub");
1742 pDescriptorPool->handle = 1;
1743 return VK_SUCCESS;
1744 }
1745
1746 VkResult anv_DestroyDescriptorPool(
1747 VkDevice _device,
1748 VkDescriptorPool _pool)
1749 {
1750 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1751 return VK_SUCCESS;
1752 }
1753
1754 VkResult anv_ResetDescriptorPool(
1755 VkDevice device,
1756 VkDescriptorPool descriptorPool)
1757 {
1758 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1759 return VK_SUCCESS;
1760 }
1761
1762 VkResult
1763 anv_descriptor_set_create(struct anv_device *device,
1764 const struct anv_descriptor_set_layout *layout,
1765 struct anv_descriptor_set **out_set)
1766 {
1767 struct anv_descriptor_set *set;
1768 size_t size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
1769
1770 set = anv_device_alloc(device, size, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1771 if (!set)
1772 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1773
1774 /* A descriptor set may not be 100% filled. Clear the set so we can can
1775 * later detect holes in it.
1776 */
1777 memset(set, 0, size);
1778
1779 *out_set = set;
1780
1781 return VK_SUCCESS;
1782 }
1783
1784 void
1785 anv_descriptor_set_destroy(struct anv_device *device,
1786 struct anv_descriptor_set *set)
1787 {
1788 anv_device_free(device, set);
1789 }
1790
1791 VkResult anv_AllocDescriptorSets(
1792 VkDevice _device,
1793 VkDescriptorPool descriptorPool,
1794 VkDescriptorSetUsage setUsage,
1795 uint32_t count,
1796 const VkDescriptorSetLayout* pSetLayouts,
1797 VkDescriptorSet* pDescriptorSets,
1798 uint32_t* pCount)
1799 {
1800 ANV_FROM_HANDLE(anv_device, device, _device);
1801
1802 VkResult result;
1803 struct anv_descriptor_set *set;
1804
1805 for (uint32_t i = 0; i < count; i++) {
1806 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, pSetLayouts[i]);
1807
1808 result = anv_descriptor_set_create(device, layout, &set);
1809 if (result != VK_SUCCESS) {
1810 *pCount = i;
1811 return result;
1812 }
1813
1814 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1815 }
1816
1817 *pCount = count;
1818
1819 return VK_SUCCESS;
1820 }
1821
1822 VkResult anv_FreeDescriptorSets(
1823 VkDevice _device,
1824 VkDescriptorPool descriptorPool,
1825 uint32_t count,
1826 const VkDescriptorSet* pDescriptorSets)
1827 {
1828 ANV_FROM_HANDLE(anv_device, device, _device);
1829
1830 for (uint32_t i = 0; i < count; i++) {
1831 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1832
1833 anv_descriptor_set_destroy(device, set);
1834 }
1835
1836 return VK_SUCCESS;
1837 }
1838
1839 VkResult anv_UpdateDescriptorSets(
1840 VkDevice device,
1841 uint32_t writeCount,
1842 const VkWriteDescriptorSet* pDescriptorWrites,
1843 uint32_t copyCount,
1844 const VkCopyDescriptorSet* pDescriptorCopies)
1845 {
1846 for (uint32_t i = 0; i < writeCount; i++) {
1847 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1848 ANV_FROM_HANDLE(anv_descriptor_set, set, write->destSet);
1849
1850 switch (write->descriptorType) {
1851 case VK_DESCRIPTOR_TYPE_SAMPLER:
1852 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1853 for (uint32_t j = 0; j < write->count; j++) {
1854 set->descriptors[write->destBinding + j].sampler =
1855 anv_sampler_from_handle(write->pDescriptors[j].sampler);
1856 }
1857
1858 if (write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
1859 break;
1860
1861 /* fallthrough */
1862
1863 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1864 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1865 for (uint32_t j = 0; j < write->count; j++) {
1866 ANV_FROM_HANDLE(anv_image_view, iview,
1867 write->pDescriptors[j].imageView);
1868 set->descriptors[write->destBinding + j].view = &iview->view;
1869 }
1870 break;
1871
1872 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1873 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1874 anv_finishme("texel buffers not implemented");
1875 break;
1876
1877 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1878 anv_finishme("input attachments not implemented");
1879 break;
1880
1881 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1882 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1883 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1884 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1885 for (uint32_t j = 0; j < write->count; j++) {
1886 ANV_FROM_HANDLE(anv_buffer_view, bview,
1887 write->pDescriptors[j].bufferView);
1888 set->descriptors[write->destBinding + j].view = &bview->view;
1889 }
1890
1891 default:
1892 break;
1893 }
1894 }
1895
1896 for (uint32_t i = 0; i < copyCount; i++) {
1897 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1898 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->destSet);
1899 ANV_FROM_HANDLE(anv_descriptor_set, dest, copy->destSet);
1900 for (uint32_t j = 0; j < copy->count; j++) {
1901 dest->descriptors[copy->destBinding + j] =
1902 src->descriptors[copy->srcBinding + j];
1903 }
1904 }
1905
1906 return VK_SUCCESS;
1907 }
1908
1909 // State object functions
1910
1911 static inline int64_t
1912 clamp_int64(int64_t x, int64_t min, int64_t max)
1913 {
1914 if (x < min)
1915 return min;
1916 else if (x < max)
1917 return x;
1918 else
1919 return max;
1920 }
1921
1922 VkResult anv_CreateDynamicViewportState(
1923 VkDevice _device,
1924 const VkDynamicViewportStateCreateInfo* pCreateInfo,
1925 VkDynamicViewportState* pState)
1926 {
1927 ANV_FROM_HANDLE(anv_device, device, _device);
1928 struct anv_dynamic_vp_state *state;
1929
1930 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO);
1931
1932 state = anv_device_alloc(device, sizeof(*state), 8,
1933 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1934 if (state == NULL)
1935 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1936
1937 unsigned count = pCreateInfo->viewportAndScissorCount;
1938 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
1939 count * 64, 64);
1940 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
1941 count * 8, 32);
1942 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
1943 count * 32, 32);
1944
1945 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1946 const VkViewport *vp = &pCreateInfo->pViewports[i];
1947 const VkRect2D *s = &pCreateInfo->pScissors[i];
1948
1949 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1950 .ViewportMatrixElementm00 = vp->width / 2,
1951 .ViewportMatrixElementm11 = vp->height / 2,
1952 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1953 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1954 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1955 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1956 .XMinClipGuardband = -1.0f,
1957 .XMaxClipGuardband = 1.0f,
1958 .YMinClipGuardband = -1.0f,
1959 .YMaxClipGuardband = 1.0f,
1960 .XMinViewPort = vp->originX,
1961 .XMaxViewPort = vp->originX + vp->width - 1,
1962 .YMinViewPort = vp->originY,
1963 .YMaxViewPort = vp->originY + vp->height - 1,
1964 };
1965
1966 struct GEN8_CC_VIEWPORT cc_viewport = {
1967 .MinimumDepth = vp->minDepth,
1968 .MaximumDepth = vp->maxDepth
1969 };
1970
1971 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1972 * ymax < ymin for empty clips. In case clip x, y, width height are all
1973 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1974 * what we want. Just special case empty clips and produce a canonical
1975 * empty clip. */
1976 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1977 .ScissorRectangleYMin = 1,
1978 .ScissorRectangleXMin = 1,
1979 .ScissorRectangleYMax = 0,
1980 .ScissorRectangleXMax = 0
1981 };
1982
1983 const int max = 0xffff;
1984 struct GEN8_SCISSOR_RECT scissor = {
1985 /* Do this math using int64_t so overflow gets clamped correctly. */
1986 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
1987 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
1988 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
1989 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
1990 };
1991
1992 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
1993 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
1994
1995 if (s->extent.width <= 0 || s->extent.height <= 0) {
1996 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
1997 } else {
1998 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
1999 }
2000 }
2001
2002 *pState = anv_dynamic_vp_state_to_handle(state);
2003
2004 return VK_SUCCESS;
2005 }
2006
2007 VkResult anv_DestroyDynamicViewportState(
2008 VkDevice _device,
2009 VkDynamicViewportState _vp_state)
2010 {
2011 ANV_FROM_HANDLE(anv_device, device, _device);
2012 ANV_FROM_HANDLE(anv_dynamic_vp_state, vp_state, _vp_state);
2013
2014 anv_state_pool_free(&device->dynamic_state_pool, vp_state->sf_clip_vp);
2015 anv_state_pool_free(&device->dynamic_state_pool, vp_state->cc_vp);
2016 anv_state_pool_free(&device->dynamic_state_pool, vp_state->scissor);
2017
2018 anv_device_free(device, vp_state);
2019
2020 return VK_SUCCESS;
2021 }
2022
2023 VkResult anv_CreateDynamicRasterState(
2024 VkDevice _device,
2025 const VkDynamicRasterStateCreateInfo* pCreateInfo,
2026 VkDynamicRasterState* pState)
2027 {
2028 ANV_FROM_HANDLE(anv_device, device, _device);
2029 struct anv_dynamic_rs_state *state;
2030
2031 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RASTER_STATE_CREATE_INFO);
2032
2033 state = anv_device_alloc(device, sizeof(*state), 8,
2034 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2035 if (state == NULL)
2036 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2037
2038 struct GEN8_3DSTATE_SF sf = {
2039 GEN8_3DSTATE_SF_header,
2040 .LineWidth = pCreateInfo->lineWidth,
2041 };
2042
2043 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
2044
2045 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
2046 pCreateInfo->slopeScaledDepthBias != 0.0f;
2047 struct GEN8_3DSTATE_RASTER raster = {
2048 .GlobalDepthOffsetEnableSolid = enable_bias,
2049 .GlobalDepthOffsetEnableWireframe = enable_bias,
2050 .GlobalDepthOffsetEnablePoint = enable_bias,
2051 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
2052 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
2053 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
2054 };
2055
2056 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
2057
2058 *pState = anv_dynamic_rs_state_to_handle(state);
2059
2060 return VK_SUCCESS;
2061 }
2062
2063 VkResult anv_DestroyDynamicRasterState(
2064 VkDevice _device,
2065 VkDynamicRasterState _rs_state)
2066 {
2067 ANV_FROM_HANDLE(anv_device, device, _device);
2068 ANV_FROM_HANDLE(anv_dynamic_rs_state, rs_state, _rs_state);
2069
2070 anv_device_free(device, rs_state);
2071
2072 return VK_SUCCESS;
2073 }
2074
2075 VkResult anv_CreateDynamicColorBlendState(
2076 VkDevice _device,
2077 const VkDynamicColorBlendStateCreateInfo* pCreateInfo,
2078 VkDynamicColorBlendState* pState)
2079 {
2080 ANV_FROM_HANDLE(anv_device, device, _device);
2081 struct anv_dynamic_cb_state *state;
2082
2083 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_COLOR_BLEND_STATE_CREATE_INFO);
2084
2085 state = anv_device_alloc(device, sizeof(*state), 8,
2086 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2087 if (state == NULL)
2088 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2089
2090 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2091 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2092 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2093 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2094 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2095 };
2096
2097 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2098
2099 *pState = anv_dynamic_cb_state_to_handle(state);
2100
2101 return VK_SUCCESS;
2102 }
2103
2104 VkResult anv_DestroyDynamicColorBlendState(
2105 VkDevice _device,
2106 VkDynamicColorBlendState _cb_state)
2107 {
2108 ANV_FROM_HANDLE(anv_device, device, _device);
2109 ANV_FROM_HANDLE(anv_dynamic_cb_state, cb_state, _cb_state);
2110
2111 anv_device_free(device, cb_state);
2112
2113 return VK_SUCCESS;
2114 }
2115
2116 VkResult anv_CreateDynamicDepthStencilState(
2117 VkDevice _device,
2118 const VkDynamicDepthStencilStateCreateInfo* pCreateInfo,
2119 VkDynamicDepthStencilState* pState)
2120 {
2121 ANV_FROM_HANDLE(anv_device, device, _device);
2122 struct anv_dynamic_ds_state *state;
2123
2124 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DEPTH_STENCIL_STATE_CREATE_INFO);
2125
2126 state = anv_device_alloc(device, sizeof(*state), 8,
2127 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2128 if (state == NULL)
2129 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2130
2131 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2132 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2133
2134 /* Is this what we need to do? */
2135 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2136
2137 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2138 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
2139
2140 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2141 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
2142 };
2143
2144 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2145 &wm_depth_stencil);
2146
2147 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2148 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2149 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2150 };
2151
2152 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2153
2154 *pState = anv_dynamic_ds_state_to_handle(state);
2155
2156 return VK_SUCCESS;
2157 }
2158
2159 VkResult anv_DestroyDynamicDepthStencilState(
2160 VkDevice _device,
2161 VkDynamicDepthStencilState _ds_state)
2162 {
2163 ANV_FROM_HANDLE(anv_device, device, _device);
2164 ANV_FROM_HANDLE(anv_dynamic_ds_state, ds_state, _ds_state);
2165
2166 anv_device_free(device, ds_state);
2167
2168 return VK_SUCCESS;
2169 }
2170
2171 // Command buffer functions
2172
2173 VkResult anv_CreateCommandPool(
2174 VkDevice device,
2175 const VkCmdPoolCreateInfo* pCreateInfo,
2176 VkCmdPool* pCmdPool)
2177 {
2178 pCmdPool->handle = 7;
2179
2180 stub_return(VK_SUCCESS);
2181 }
2182
2183 VkResult anv_DestroyCommandPool(
2184 VkDevice device,
2185 VkCmdPool cmdPool)
2186 {
2187 stub_return(VK_SUCCESS);
2188 }
2189
2190 VkResult anv_ResetCommandPool(
2191 VkDevice device,
2192 VkCmdPool cmdPool,
2193 VkCmdPoolResetFlags flags)
2194 {
2195 stub_return(VK_UNSUPPORTED);
2196 }
2197
2198 VkResult anv_CreateFramebuffer(
2199 VkDevice _device,
2200 const VkFramebufferCreateInfo* pCreateInfo,
2201 VkFramebuffer* pFramebuffer)
2202 {
2203 ANV_FROM_HANDLE(anv_device, device, _device);
2204 struct anv_framebuffer *framebuffer;
2205
2206 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2207
2208 size_t size = sizeof(*framebuffer) +
2209 sizeof(struct anv_attachment_view *) * pCreateInfo->attachmentCount;
2210 framebuffer = anv_device_alloc(device, size, 8,
2211 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2212 if (framebuffer == NULL)
2213 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2214
2215 framebuffer->attachment_count = pCreateInfo->attachmentCount;
2216 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2217 ANV_FROM_HANDLE(anv_attachment_view, view,
2218 pCreateInfo->pAttachments[i].view);
2219
2220 framebuffer->attachments[i] = view;
2221 }
2222
2223 framebuffer->width = pCreateInfo->width;
2224 framebuffer->height = pCreateInfo->height;
2225 framebuffer->layers = pCreateInfo->layers;
2226
2227 anv_CreateDynamicViewportState(anv_device_to_handle(device),
2228 &(VkDynamicViewportStateCreateInfo) {
2229 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO,
2230 .viewportAndScissorCount = 1,
2231 .pViewports = (VkViewport[]) {
2232 {
2233 .originX = 0,
2234 .originY = 0,
2235 .width = pCreateInfo->width,
2236 .height = pCreateInfo->height,
2237 .minDepth = 0,
2238 .maxDepth = 1
2239 },
2240 },
2241 .pScissors = (VkRect2D[]) {
2242 { { 0, 0 },
2243 { pCreateInfo->width, pCreateInfo->height } },
2244 }
2245 },
2246 &framebuffer->vp_state);
2247
2248 *pFramebuffer = anv_framebuffer_to_handle(framebuffer);
2249
2250 return VK_SUCCESS;
2251 }
2252
2253 VkResult anv_DestroyFramebuffer(
2254 VkDevice _device,
2255 VkFramebuffer _fb)
2256 {
2257 ANV_FROM_HANDLE(anv_device, device, _device);
2258 ANV_FROM_HANDLE(anv_framebuffer, fb, _fb);
2259
2260 anv_DestroyDynamicViewportState(anv_device_to_handle(device),
2261 fb->vp_state);
2262 anv_device_free(device, fb);
2263
2264 return VK_SUCCESS;
2265 }
2266
2267 VkResult anv_CreateRenderPass(
2268 VkDevice _device,
2269 const VkRenderPassCreateInfo* pCreateInfo,
2270 VkRenderPass* pRenderPass)
2271 {
2272 ANV_FROM_HANDLE(anv_device, device, _device);
2273 struct anv_render_pass *pass;
2274 size_t size;
2275
2276 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
2277
2278 size = sizeof(*pass) +
2279 pCreateInfo->subpassCount * sizeof(struct anv_subpass);
2280 pass = anv_device_alloc(device, size, 8,
2281 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2282 if (pass == NULL)
2283 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2284
2285 /* Clear the subpasses along with the parent pass. This required because
2286 * each array member of anv_subpass must be a valid pointer if not NULL.
2287 */
2288 memset(pass, 0, size);
2289
2290 pass->attachment_count = pCreateInfo->attachmentCount;
2291 pass->subpass_count = pCreateInfo->subpassCount;
2292
2293 size = pCreateInfo->attachmentCount * sizeof(*pass->attachments);
2294 pass->attachments = anv_device_alloc(device, size, 8,
2295 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2296 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2297 pass->attachments[i].format = pCreateInfo->pAttachments[i].format;
2298 pass->attachments[i].samples = pCreateInfo->pAttachments[i].samples;
2299 pass->attachments[i].load_op = pCreateInfo->pAttachments[i].loadOp;
2300 pass->attachments[i].stencil_load_op = pCreateInfo->pAttachments[i].stencilLoadOp;
2301 // pass->attachments[i].store_op = pCreateInfo->pAttachments[i].storeOp;
2302 // pass->attachments[i].stencil_store_op = pCreateInfo->pAttachments[i].stencilStoreOp;
2303 }
2304
2305 for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
2306 const VkSubpassDescription *desc = &pCreateInfo->pSubpasses[i];
2307 struct anv_subpass *subpass = &pass->subpasses[i];
2308
2309 subpass->input_count = desc->inputCount;
2310 subpass->color_count = desc->colorCount;
2311
2312 if (desc->inputCount > 0) {
2313 subpass->input_attachments =
2314 anv_device_alloc(device, desc->inputCount * sizeof(uint32_t),
2315 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2316
2317 for (uint32_t j = 0; j < desc->inputCount; j++) {
2318 subpass->input_attachments[j]
2319 = desc->inputAttachments[j].attachment;
2320 }
2321 }
2322
2323 if (desc->colorCount > 0) {
2324 subpass->color_attachments =
2325 anv_device_alloc(device, desc->colorCount * sizeof(uint32_t),
2326 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2327
2328 for (uint32_t j = 0; j < desc->colorCount; j++) {
2329 subpass->color_attachments[j]
2330 = desc->colorAttachments[j].attachment;
2331 }
2332 }
2333
2334 if (desc->resolveAttachments) {
2335 subpass->resolve_attachments =
2336 anv_device_alloc(device, desc->colorCount * sizeof(uint32_t),
2337 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2338
2339 for (uint32_t j = 0; j < desc->colorCount; j++) {
2340 subpass->resolve_attachments[j]
2341 = desc->resolveAttachments[j].attachment;
2342 }
2343 }
2344
2345 subpass->depth_stencil_attachment = desc->depthStencilAttachment.attachment;
2346 }
2347
2348 *pRenderPass = anv_render_pass_to_handle(pass);
2349
2350 return VK_SUCCESS;
2351 }
2352
2353 VkResult anv_DestroyRenderPass(
2354 VkDevice _device,
2355 VkRenderPass _pass)
2356 {
2357 ANV_FROM_HANDLE(anv_device, device, _device);
2358 ANV_FROM_HANDLE(anv_render_pass, pass, _pass);
2359
2360 anv_device_free(device, pass->attachments);
2361
2362 for (uint32_t i = 0; i < pass->subpass_count; i++) {
2363 /* In VkSubpassCreateInfo, each of the attachment arrays may be null.
2364 * Don't free the null arrays.
2365 */
2366 struct anv_subpass *subpass = &pass->subpasses[i];
2367
2368 anv_device_free(device, subpass->input_attachments);
2369 anv_device_free(device, subpass->color_attachments);
2370 anv_device_free(device, subpass->resolve_attachments);
2371 }
2372
2373 anv_device_free(device, pass);
2374
2375 return VK_SUCCESS;
2376 }
2377
2378 VkResult anv_GetRenderAreaGranularity(
2379 VkDevice device,
2380 VkRenderPass renderPass,
2381 VkExtent2D* pGranularity)
2382 {
2383 *pGranularity = (VkExtent2D) { 1, 1 };
2384
2385 return VK_SUCCESS;
2386 }
2387
2388 void vkCmdDbgMarkerBegin(
2389 VkCmdBuffer cmdBuffer,
2390 const char* pMarker)
2391 __attribute__ ((visibility ("default")));
2392
2393 void vkCmdDbgMarkerEnd(
2394 VkCmdBuffer cmdBuffer)
2395 __attribute__ ((visibility ("default")));
2396
2397 void vkCmdDbgMarkerBegin(
2398 VkCmdBuffer cmdBuffer,
2399 const char* pMarker)
2400 {
2401 }
2402
2403 void vkCmdDbgMarkerEnd(
2404 VkCmdBuffer cmdBuffer)
2405 {
2406 }