Merge remote-tracking branch 'mesa-public/master' into vulkan
[mesa.git] / src / vulkan / anv_device.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "anv_private.h"
31 #include "mesa/main/git_sha1.h"
32 #include "util/strtod.h"
33
34 static int
35 anv_env_get_int(const char *name)
36 {
37 const char *val = getenv(name);
38
39 if (!val)
40 return 0;
41
42 return strtol(val, NULL, 0);
43 }
44
45 static VkResult
46 anv_physical_device_init(struct anv_physical_device *device,
47 struct anv_instance *instance,
48 const char *path)
49 {
50 int fd;
51
52 fd = open(path, O_RDWR | O_CLOEXEC);
53 if (fd < 0)
54 return vk_error(VK_ERROR_UNAVAILABLE);
55
56 device->instance = instance;
57 device->path = path;
58
59 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
60 device->no_hw = false;
61 if (device->chipset_id) {
62 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
63 device->no_hw = true;
64 } else {
65 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
66 }
67 if (!device->chipset_id)
68 goto fail;
69
70 device->name = brw_get_device_name(device->chipset_id);
71 device->info = brw_get_device_info(device->chipset_id, -1);
72 if (!device->info)
73 goto fail;
74
75 if (anv_gem_get_aperture(fd, &device->aperture_size) == -1)
76 goto fail;
77
78 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
79 goto fail;
80
81 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
82 goto fail;
83
84 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
85 goto fail;
86
87 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
88 goto fail;
89
90 close(fd);
91
92 return VK_SUCCESS;
93
94 fail:
95 close(fd);
96 return vk_error(VK_ERROR_UNAVAILABLE);
97 }
98
99 static void *default_alloc(
100 void* pUserData,
101 size_t size,
102 size_t alignment,
103 VkSystemAllocType allocType)
104 {
105 return malloc(size);
106 }
107
108 static void default_free(
109 void* pUserData,
110 void* pMem)
111 {
112 free(pMem);
113 }
114
115 static const VkAllocCallbacks default_alloc_callbacks = {
116 .pUserData = NULL,
117 .pfnAlloc = default_alloc,
118 .pfnFree = default_free
119 };
120
121 VkResult anv_CreateInstance(
122 const VkInstanceCreateInfo* pCreateInfo,
123 VkInstance* pInstance)
124 {
125 struct anv_instance *instance;
126 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
127 void *user_data = NULL;
128
129 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
130
131 if (pCreateInfo->pAllocCb) {
132 alloc_callbacks = pCreateInfo->pAllocCb;
133 user_data = pCreateInfo->pAllocCb->pUserData;
134 }
135 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
136 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
137 if (!instance)
138 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
139
140 instance->pAllocUserData = alloc_callbacks->pUserData;
141 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
142 instance->pfnFree = alloc_callbacks->pfnFree;
143 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
144 instance->physicalDeviceCount = 0;
145
146 _mesa_locale_init();
147
148 VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
149
150 *pInstance = anv_instance_to_handle(instance);
151
152 return VK_SUCCESS;
153 }
154
155 VkResult anv_DestroyInstance(
156 VkInstance _instance)
157 {
158 ANV_FROM_HANDLE(anv_instance, instance, _instance);
159
160 VG(VALGRIND_DESTROY_MEMPOOL(instance));
161
162 _mesa_locale_fini();
163
164 instance->pfnFree(instance->pAllocUserData, instance);
165
166 return VK_SUCCESS;
167 }
168
169 static void *
170 anv_instance_alloc(struct anv_instance *instance, size_t size,
171 size_t alignment, VkSystemAllocType allocType)
172 {
173 void *mem = instance->pfnAlloc(instance->pAllocUserData,
174 size, alignment, allocType);
175 if (mem) {
176 VALGRIND_MEMPOOL_ALLOC(instance, mem, size);
177 VALGRIND_MAKE_MEM_UNDEFINED(mem, size);
178 }
179 return mem;
180 }
181
182 static void
183 anv_instance_free(struct anv_instance *instance, void *mem)
184 {
185 if (mem == NULL)
186 return;
187
188 VALGRIND_MEMPOOL_FREE(instance, mem);
189
190 instance->pfnFree(instance->pAllocUserData, mem);
191 }
192
193 VkResult anv_EnumeratePhysicalDevices(
194 VkInstance _instance,
195 uint32_t* pPhysicalDeviceCount,
196 VkPhysicalDevice* pPhysicalDevices)
197 {
198 ANV_FROM_HANDLE(anv_instance, instance, _instance);
199 VkResult result;
200
201 if (instance->physicalDeviceCount == 0) {
202 result = anv_physical_device_init(&instance->physicalDevice,
203 instance, "/dev/dri/renderD128");
204 if (result != VK_SUCCESS)
205 return result;
206
207 instance->physicalDeviceCount = 1;
208 }
209
210 /* pPhysicalDeviceCount is an out parameter if pPhysicalDevices is NULL;
211 * otherwise it's an inout parameter.
212 *
213 * The Vulkan spec (git aaed022) says:
214 *
215 * pPhysicalDeviceCount is a pointer to an unsigned integer variable
216 * that is initialized with the number of devices the application is
217 * prepared to receive handles to. pname:pPhysicalDevices is pointer to
218 * an array of at least this many VkPhysicalDevice handles [...].
219 *
220 * Upon success, if pPhysicalDevices is NULL, vkEnumeratePhysicalDevices
221 * overwrites the contents of the variable pointed to by
222 * pPhysicalDeviceCount with the number of physical devices in in the
223 * instance; otherwise, vkEnumeratePhysicalDevices overwrites
224 * pPhysicalDeviceCount with the number of physical handles written to
225 * pPhysicalDevices.
226 */
227 if (!pPhysicalDevices) {
228 *pPhysicalDeviceCount = instance->physicalDeviceCount;
229 } else if (*pPhysicalDeviceCount >= 1) {
230 pPhysicalDevices[0] = anv_physical_device_to_handle(&instance->physicalDevice);
231 *pPhysicalDeviceCount = 1;
232 } else {
233 *pPhysicalDeviceCount = 0;
234 }
235
236 return VK_SUCCESS;
237 }
238
239 VkResult anv_GetPhysicalDeviceFeatures(
240 VkPhysicalDevice physicalDevice,
241 VkPhysicalDeviceFeatures* pFeatures)
242 {
243 anv_finishme("Get correct values for PhysicalDeviceFeatures");
244
245 *pFeatures = (VkPhysicalDeviceFeatures) {
246 .robustBufferAccess = false,
247 .fullDrawIndexUint32 = false,
248 .imageCubeArray = false,
249 .independentBlend = false,
250 .geometryShader = true,
251 .tessellationShader = false,
252 .sampleRateShading = false,
253 .dualSourceBlend = true,
254 .logicOp = true,
255 .instancedDrawIndirect = true,
256 .depthClip = false,
257 .depthBiasClamp = false,
258 .fillModeNonSolid = true,
259 .depthBounds = false,
260 .wideLines = true,
261 .largePoints = true,
262 .textureCompressionETC2 = true,
263 .textureCompressionASTC_LDR = true,
264 .textureCompressionBC = true,
265 .pipelineStatisticsQuery = true,
266 .vertexSideEffects = false,
267 .tessellationSideEffects = false,
268 .geometrySideEffects = false,
269 .fragmentSideEffects = false,
270 .shaderTessellationPointSize = false,
271 .shaderGeometryPointSize = true,
272 .shaderTextureGatherExtended = true,
273 .shaderStorageImageExtendedFormats = false,
274 .shaderStorageImageMultisample = false,
275 .shaderStorageBufferArrayConstantIndexing = false,
276 .shaderStorageImageArrayConstantIndexing = false,
277 .shaderUniformBufferArrayDynamicIndexing = true,
278 .shaderSampledImageArrayDynamicIndexing = false,
279 .shaderStorageBufferArrayDynamicIndexing = false,
280 .shaderStorageImageArrayDynamicIndexing = false,
281 .shaderClipDistance = false,
282 .shaderCullDistance = false,
283 .shaderFloat64 = false,
284 .shaderInt64 = false,
285 .shaderFloat16 = false,
286 .shaderInt16 = false,
287 };
288
289 return VK_SUCCESS;
290 }
291
292 VkResult anv_GetPhysicalDeviceLimits(
293 VkPhysicalDevice physicalDevice,
294 VkPhysicalDeviceLimits* pLimits)
295 {
296 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
297 const struct brw_device_info *devinfo = physical_device->info;
298
299 anv_finishme("Get correct values for PhysicalDeviceLimits");
300
301 *pLimits = (VkPhysicalDeviceLimits) {
302 .maxImageDimension1D = (1 << 14),
303 .maxImageDimension2D = (1 << 14),
304 .maxImageDimension3D = (1 << 10),
305 .maxImageDimensionCube = (1 << 14),
306 .maxImageArrayLayers = (1 << 10),
307 .maxTexelBufferSize = (1 << 14),
308 .maxUniformBufferSize = UINT32_MAX,
309 .maxStorageBufferSize = UINT32_MAX,
310 .maxPushConstantsSize = 128,
311 .maxMemoryAllocationCount = UINT32_MAX,
312 .bufferImageGranularity = 64, /* A cache line */
313 .maxBoundDescriptorSets = MAX_SETS,
314 .maxDescriptorSets = UINT32_MAX,
315 .maxPerStageDescriptorSamplers = 64,
316 .maxPerStageDescriptorUniformBuffers = 64,
317 .maxPerStageDescriptorStorageBuffers = 64,
318 .maxPerStageDescriptorSampledImages = 64,
319 .maxPerStageDescriptorStorageImages = 64,
320 .maxDescriptorSetSamplers = 256,
321 .maxDescriptorSetUniformBuffers = 256,
322 .maxDescriptorSetStorageBuffers = 256,
323 .maxDescriptorSetSampledImages = 256,
324 .maxDescriptorSetStorageImages = 256,
325 .maxVertexInputAttributes = 32,
326 .maxVertexInputAttributeOffset = 256,
327 .maxVertexInputBindingStride = 256,
328 .maxVertexOutputComponents = 32,
329 .maxTessGenLevel = 0,
330 .maxTessPatchSize = 0,
331 .maxTessControlPerVertexInputComponents = 0,
332 .maxTessControlPerVertexOutputComponents = 0,
333 .maxTessControlPerPatchOutputComponents = 0,
334 .maxTessControlTotalOutputComponents = 0,
335 .maxTessEvaluationInputComponents = 0,
336 .maxTessEvaluationOutputComponents = 0,
337 .maxGeometryShaderInvocations = 6,
338 .maxGeometryInputComponents = 16,
339 .maxGeometryOutputComponents = 16,
340 .maxGeometryOutputVertices = 16,
341 .maxGeometryTotalOutputComponents = 16,
342 .maxFragmentInputComponents = 16,
343 .maxFragmentOutputBuffers = 8,
344 .maxFragmentDualSourceBuffers = 2,
345 .maxFragmentCombinedOutputResources = 8,
346 .maxComputeSharedMemorySize = 1024,
347 .maxComputeWorkGroupCount = {
348 16 * devinfo->max_cs_threads,
349 16 * devinfo->max_cs_threads,
350 16 * devinfo->max_cs_threads,
351 },
352 .maxComputeWorkGroupInvocations = 16 * devinfo->max_cs_threads,
353 .maxComputeWorkGroupSize = {
354 16 * devinfo->max_cs_threads,
355 16 * devinfo->max_cs_threads,
356 16 * devinfo->max_cs_threads,
357 },
358 .subPixelPrecisionBits = 4 /* FIXME */,
359 .subTexelPrecisionBits = 4 /* FIXME */,
360 .mipmapPrecisionBits = 4 /* FIXME */,
361 .maxDrawIndexedIndexValue = UINT32_MAX,
362 .maxDrawIndirectInstanceCount = UINT32_MAX,
363 .primitiveRestartForPatches = UINT32_MAX,
364 .maxSamplerLodBias = 16,
365 .maxSamplerAnisotropy = 16,
366 .maxViewports = 16,
367 .maxDynamicViewportStates = UINT32_MAX,
368 .maxViewportDimensions = { (1 << 14), (1 << 14) },
369 .viewportBoundsRange = { -1.0, 1.0 }, /* FIXME */
370 .viewportSubPixelBits = 13, /* We take a float? */
371 .minMemoryMapAlignment = 64, /* A cache line */
372 .minTexelBufferOffsetAlignment = 1,
373 .minUniformBufferOffsetAlignment = 1,
374 .minStorageBufferOffsetAlignment = 1,
375 .minTexelOffset = 0, /* FIXME */
376 .maxTexelOffset = 0, /* FIXME */
377 .minTexelGatherOffset = 0, /* FIXME */
378 .maxTexelGatherOffset = 0, /* FIXME */
379 .minInterpolationOffset = 0, /* FIXME */
380 .maxInterpolationOffset = 0, /* FIXME */
381 .subPixelInterpolationOffsetBits = 0, /* FIXME */
382 .maxFramebufferWidth = (1 << 14),
383 .maxFramebufferHeight = (1 << 14),
384 .maxFramebufferLayers = (1 << 10),
385 .maxFramebufferColorSamples = 8,
386 .maxFramebufferDepthSamples = 8,
387 .maxFramebufferStencilSamples = 8,
388 .maxColorAttachments = MAX_RTS,
389 .maxSampledImageColorSamples = 8,
390 .maxSampledImageDepthSamples = 8,
391 .maxSampledImageIntegerSamples = 1,
392 .maxStorageImageSamples = 1,
393 .maxSampleMaskWords = 1,
394 .timestampFrequency = 1000 * 1000 * 1000 / 80,
395 .maxClipDistances = 0 /* FIXME */,
396 .maxCullDistances = 0 /* FIXME */,
397 .maxCombinedClipAndCullDistances = 0 /* FIXME */,
398 .pointSizeRange = { 0.125, 255.875 },
399 .lineWidthRange = { 0.0, 7.9921875 },
400 .pointSizeGranularity = (1.0 / 8.0),
401 .lineWidthGranularity = (1.0 / 128.0),
402 };
403
404 return VK_SUCCESS;
405 }
406
407 VkResult anv_GetPhysicalDeviceProperties(
408 VkPhysicalDevice physicalDevice,
409 VkPhysicalDeviceProperties* pProperties)
410 {
411 ANV_FROM_HANDLE(anv_physical_device, pdevice, physicalDevice);
412
413 *pProperties = (VkPhysicalDeviceProperties) {
414 .apiVersion = VK_MAKE_VERSION(0, 138, 1),
415 .driverVersion = 1,
416 .vendorId = 0x8086,
417 .deviceId = pdevice->chipset_id,
418 .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
419 };
420
421 strcpy(pProperties->deviceName, pdevice->name);
422 snprintf((char *)pProperties->pipelineCacheUUID, VK_UUID_LENGTH,
423 "anv-%s", MESA_GIT_SHA1 + 4);
424
425 return VK_SUCCESS;
426 }
427
428 VkResult anv_GetPhysicalDeviceQueueCount(
429 VkPhysicalDevice physicalDevice,
430 uint32_t* pCount)
431 {
432 *pCount = 1;
433
434 return VK_SUCCESS;
435 }
436
437 VkResult anv_GetPhysicalDeviceQueueProperties(
438 VkPhysicalDevice physicalDevice,
439 uint32_t count,
440 VkPhysicalDeviceQueueProperties* pQueueProperties)
441 {
442 assert(count == 1);
443
444 *pQueueProperties = (VkPhysicalDeviceQueueProperties) {
445 .queueFlags = VK_QUEUE_GRAPHICS_BIT |
446 VK_QUEUE_COMPUTE_BIT |
447 VK_QUEUE_DMA_BIT,
448 .queueCount = 1,
449 .supportsTimestamps = true,
450 };
451
452 return VK_SUCCESS;
453 }
454
455 VkResult anv_GetPhysicalDeviceMemoryProperties(
456 VkPhysicalDevice physicalDevice,
457 VkPhysicalDeviceMemoryProperties* pMemoryProperties)
458 {
459 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
460 VkDeviceSize heap_size;
461
462 /* Reserve some wiggle room for the driver by exposing only 75% of the
463 * aperture to the heap.
464 */
465 heap_size = 3 * physical_device->aperture_size / 4;
466
467 /* The property flags below are valid only for llc platforms. */
468 pMemoryProperties->memoryTypeCount = 1;
469 pMemoryProperties->memoryTypes[0] = (VkMemoryType) {
470 .propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
471 .heapIndex = 1,
472 };
473
474 pMemoryProperties->memoryHeapCount = 1;
475 pMemoryProperties->memoryHeaps[0] = (VkMemoryHeap) {
476 .size = heap_size,
477 .flags = VK_MEMORY_HEAP_HOST_LOCAL,
478 };
479
480 return VK_SUCCESS;
481 }
482
483 PFN_vkVoidFunction anv_GetInstanceProcAddr(
484 VkInstance instance,
485 const char* pName)
486 {
487 return anv_lookup_entrypoint(pName);
488 }
489
490 PFN_vkVoidFunction anv_GetDeviceProcAddr(
491 VkDevice device,
492 const char* pName)
493 {
494 return anv_lookup_entrypoint(pName);
495 }
496
497 static void
498 parse_debug_flags(struct anv_device *device)
499 {
500 const char *debug, *p, *end;
501
502 debug = getenv("INTEL_DEBUG");
503 device->dump_aub = false;
504 if (debug) {
505 for (p = debug; *p; p = end + 1) {
506 end = strchrnul(p, ',');
507 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
508 device->dump_aub = true;
509 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
510 device->no_hw = true;
511 if (*end == '\0')
512 break;
513 }
514 }
515 }
516
517 static VkResult
518 anv_queue_init(struct anv_device *device, struct anv_queue *queue)
519 {
520 queue->device = device;
521 queue->pool = &device->surface_state_pool;
522
523 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
524 if (queue->completed_serial.map == NULL)
525 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
526
527 *(uint32_t *)queue->completed_serial.map = 0;
528 queue->next_serial = 1;
529
530 return VK_SUCCESS;
531 }
532
533 static void
534 anv_queue_finish(struct anv_queue *queue)
535 {
536 #ifdef HAVE_VALGRIND
537 /* This gets torn down with the device so we only need to do this if
538 * valgrind is present.
539 */
540 anv_state_pool_free(queue->pool, queue->completed_serial);
541 #endif
542 }
543
544 static void
545 anv_device_init_border_colors(struct anv_device *device)
546 {
547 static const VkClearColorValue border_colors[] = {
548 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 0.0 } },
549 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 1.0 } },
550 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE] = { .f32 = { 1.0, 1.0, 1.0, 1.0 } },
551 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK] = { .u32 = { 0, 0, 0, 0 } },
552 [VK_BORDER_COLOR_INT_OPAQUE_BLACK] = { .u32 = { 0, 0, 0, 1 } },
553 [VK_BORDER_COLOR_INT_OPAQUE_WHITE] = { .u32 = { 1, 1, 1, 1 } },
554 };
555
556 device->border_colors =
557 anv_state_pool_alloc(&device->dynamic_state_pool,
558 sizeof(border_colors), 32);
559 memcpy(device->border_colors.map, border_colors, sizeof(border_colors));
560 }
561
562 VkResult anv_CreateDevice(
563 VkPhysicalDevice physicalDevice,
564 const VkDeviceCreateInfo* pCreateInfo,
565 VkDevice* pDevice)
566 {
567 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
568 struct anv_instance *instance = physical_device->instance;
569 struct anv_device *device;
570
571 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
572
573 device = anv_instance_alloc(instance, sizeof(*device), 8,
574 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
575 if (!device)
576 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
577
578 device->no_hw = physical_device->no_hw;
579 parse_debug_flags(device);
580
581 device->instance = physical_device->instance;
582
583 /* XXX(chadv): Can we dup() physicalDevice->fd here? */
584 device->fd = open(physical_device->path, O_RDWR | O_CLOEXEC);
585 if (device->fd == -1)
586 goto fail_device;
587
588 device->context_id = anv_gem_create_context(device);
589 if (device->context_id == -1)
590 goto fail_fd;
591
592 anv_bo_pool_init(&device->batch_bo_pool, device, ANV_CMD_BUFFER_BATCH_SIZE);
593
594 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
595
596 anv_state_pool_init(&device->dynamic_state_pool,
597 &device->dynamic_state_block_pool);
598
599 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
600 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
601
602 anv_state_pool_init(&device->surface_state_pool,
603 &device->surface_state_block_pool);
604
605 anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
606
607 device->info = *physical_device->info;
608
609 device->compiler = anv_compiler_create(device);
610 device->aub_writer = NULL;
611
612 pthread_mutex_init(&device->mutex, NULL);
613
614 anv_queue_init(device, &device->queue);
615
616 anv_device_init_meta(device);
617
618 anv_device_init_border_colors(device);
619
620 *pDevice = anv_device_to_handle(device);
621
622 return VK_SUCCESS;
623
624 fail_fd:
625 close(device->fd);
626 fail_device:
627 anv_device_free(device, device);
628
629 return vk_error(VK_ERROR_UNAVAILABLE);
630 }
631
632 VkResult anv_DestroyDevice(
633 VkDevice _device)
634 {
635 ANV_FROM_HANDLE(anv_device, device, _device);
636
637 anv_compiler_destroy(device->compiler);
638
639 anv_queue_finish(&device->queue);
640
641 anv_device_finish_meta(device);
642
643 #ifdef HAVE_VALGRIND
644 /* We only need to free these to prevent valgrind errors. The backing
645 * BO will go away in a couple of lines so we don't actually leak.
646 */
647 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
648 #endif
649
650 anv_bo_pool_finish(&device->batch_bo_pool);
651 anv_state_pool_finish(&device->dynamic_state_pool);
652 anv_block_pool_finish(&device->dynamic_state_block_pool);
653 anv_block_pool_finish(&device->instruction_block_pool);
654 anv_state_pool_finish(&device->surface_state_pool);
655 anv_block_pool_finish(&device->surface_state_block_pool);
656 anv_block_pool_finish(&device->scratch_block_pool);
657
658 close(device->fd);
659
660 if (device->aub_writer)
661 anv_aub_writer_destroy(device->aub_writer);
662
663 anv_instance_free(device->instance, device);
664
665 return VK_SUCCESS;
666 }
667
668 static const VkExtensionProperties global_extensions[] = {
669 {
670 .extName = "VK_WSI_LunarG",
671 .specVersion = 3
672 }
673 };
674
675 VkResult anv_GetGlobalExtensionProperties(
676 const char* pLayerName,
677 uint32_t* pCount,
678 VkExtensionProperties* pProperties)
679 {
680 if (pProperties == NULL) {
681 *pCount = ARRAY_SIZE(global_extensions);
682 return VK_SUCCESS;
683 }
684
685 assert(*pCount < ARRAY_SIZE(global_extensions));
686
687 *pCount = ARRAY_SIZE(global_extensions);
688 memcpy(pProperties, global_extensions, sizeof(global_extensions));
689
690 return VK_SUCCESS;
691 }
692
693 VkResult anv_GetPhysicalDeviceExtensionProperties(
694 VkPhysicalDevice physicalDevice,
695 const char* pLayerName,
696 uint32_t* pCount,
697 VkExtensionProperties* pProperties)
698 {
699 if (pProperties == NULL) {
700 *pCount = 0;
701 return VK_SUCCESS;
702 }
703
704 /* None supported at this time */
705 return vk_error(VK_ERROR_INVALID_EXTENSION);
706 }
707
708 VkResult anv_GetGlobalLayerProperties(
709 uint32_t* pCount,
710 VkLayerProperties* pProperties)
711 {
712 if (pProperties == NULL) {
713 *pCount = 0;
714 return VK_SUCCESS;
715 }
716
717 /* None supported at this time */
718 return vk_error(VK_ERROR_INVALID_LAYER);
719 }
720
721 VkResult anv_GetPhysicalDeviceLayerProperties(
722 VkPhysicalDevice physicalDevice,
723 uint32_t* pCount,
724 VkLayerProperties* pProperties)
725 {
726 if (pProperties == NULL) {
727 *pCount = 0;
728 return VK_SUCCESS;
729 }
730
731 /* None supported at this time */
732 return vk_error(VK_ERROR_INVALID_LAYER);
733 }
734
735 VkResult anv_GetDeviceQueue(
736 VkDevice _device,
737 uint32_t queueNodeIndex,
738 uint32_t queueIndex,
739 VkQueue* pQueue)
740 {
741 ANV_FROM_HANDLE(anv_device, device, _device);
742
743 assert(queueIndex == 0);
744
745 *pQueue = anv_queue_to_handle(&device->queue);
746
747 return VK_SUCCESS;
748 }
749
750 VkResult anv_QueueSubmit(
751 VkQueue _queue,
752 uint32_t cmdBufferCount,
753 const VkCmdBuffer* pCmdBuffers,
754 VkFence _fence)
755 {
756 ANV_FROM_HANDLE(anv_queue, queue, _queue);
757 ANV_FROM_HANDLE(anv_fence, fence, _fence);
758 struct anv_device *device = queue->device;
759 int ret;
760
761 for (uint32_t i = 0; i < cmdBufferCount; i++) {
762 ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, pCmdBuffers[i]);
763
764 assert(cmd_buffer->level == VK_CMD_BUFFER_LEVEL_PRIMARY);
765
766 if (device->dump_aub)
767 anv_cmd_buffer_dump(cmd_buffer);
768
769 if (!device->no_hw) {
770 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf2.execbuf);
771 if (ret != 0)
772 return vk_error(VK_ERROR_UNKNOWN);
773
774 if (fence) {
775 ret = anv_gem_execbuffer(device, &fence->execbuf);
776 if (ret != 0)
777 return vk_error(VK_ERROR_UNKNOWN);
778 }
779
780 for (uint32_t i = 0; i < cmd_buffer->execbuf2.bo_count; i++)
781 cmd_buffer->execbuf2.bos[i]->offset = cmd_buffer->execbuf2.objects[i].offset;
782 } else {
783 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
784 }
785 }
786
787 return VK_SUCCESS;
788 }
789
790 VkResult anv_QueueWaitIdle(
791 VkQueue _queue)
792 {
793 ANV_FROM_HANDLE(anv_queue, queue, _queue);
794
795 return vkDeviceWaitIdle(anv_device_to_handle(queue->device));
796 }
797
798 VkResult anv_DeviceWaitIdle(
799 VkDevice _device)
800 {
801 ANV_FROM_HANDLE(anv_device, device, _device);
802 struct anv_state state;
803 struct anv_batch batch;
804 struct drm_i915_gem_execbuffer2 execbuf;
805 struct drm_i915_gem_exec_object2 exec2_objects[1];
806 struct anv_bo *bo = NULL;
807 VkResult result;
808 int64_t timeout;
809 int ret;
810
811 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
812 bo = &device->dynamic_state_pool.block_pool->bo;
813 batch.start = batch.next = state.map;
814 batch.end = state.map + 32;
815 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
816 anv_batch_emit(&batch, GEN8_MI_NOOP);
817
818 exec2_objects[0].handle = bo->gem_handle;
819 exec2_objects[0].relocation_count = 0;
820 exec2_objects[0].relocs_ptr = 0;
821 exec2_objects[0].alignment = 0;
822 exec2_objects[0].offset = bo->offset;
823 exec2_objects[0].flags = 0;
824 exec2_objects[0].rsvd1 = 0;
825 exec2_objects[0].rsvd2 = 0;
826
827 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
828 execbuf.buffer_count = 1;
829 execbuf.batch_start_offset = state.offset;
830 execbuf.batch_len = batch.next - state.map;
831 execbuf.cliprects_ptr = 0;
832 execbuf.num_cliprects = 0;
833 execbuf.DR1 = 0;
834 execbuf.DR4 = 0;
835
836 execbuf.flags =
837 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
838 execbuf.rsvd1 = device->context_id;
839 execbuf.rsvd2 = 0;
840
841 if (!device->no_hw) {
842 ret = anv_gem_execbuffer(device, &execbuf);
843 if (ret != 0) {
844 result = vk_error(VK_ERROR_UNKNOWN);
845 goto fail;
846 }
847
848 timeout = INT64_MAX;
849 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
850 if (ret != 0) {
851 result = vk_error(VK_ERROR_UNKNOWN);
852 goto fail;
853 }
854 }
855
856 anv_state_pool_free(&device->dynamic_state_pool, state);
857
858 return VK_SUCCESS;
859
860 fail:
861 anv_state_pool_free(&device->dynamic_state_pool, state);
862
863 return result;
864 }
865
866 void *
867 anv_device_alloc(struct anv_device * device,
868 size_t size,
869 size_t alignment,
870 VkSystemAllocType allocType)
871 {
872 return anv_instance_alloc(device->instance, size, alignment, allocType);
873 }
874
875 void
876 anv_device_free(struct anv_device * device,
877 void * mem)
878 {
879 anv_instance_free(device->instance, mem);
880 }
881
882 VkResult
883 anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
884 {
885 bo->gem_handle = anv_gem_create(device, size);
886 if (!bo->gem_handle)
887 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
888
889 bo->map = NULL;
890 bo->index = 0;
891 bo->offset = 0;
892 bo->size = size;
893
894 return VK_SUCCESS;
895 }
896
897 VkResult anv_AllocMemory(
898 VkDevice _device,
899 const VkMemoryAllocInfo* pAllocInfo,
900 VkDeviceMemory* pMem)
901 {
902 ANV_FROM_HANDLE(anv_device, device, _device);
903 struct anv_device_memory *mem;
904 VkResult result;
905
906 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
907
908 if (pAllocInfo->memoryTypeIndex != 0) {
909 /* We support exactly one memory heap. */
910 return vk_error(VK_ERROR_INVALID_VALUE);
911 }
912
913 /* FINISHME: Fail if allocation request exceeds heap size. */
914
915 mem = anv_device_alloc(device, sizeof(*mem), 8,
916 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
917 if (mem == NULL)
918 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
919
920 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
921 if (result != VK_SUCCESS)
922 goto fail;
923
924 *pMem = anv_device_memory_to_handle(mem);
925
926 return VK_SUCCESS;
927
928 fail:
929 anv_device_free(device, mem);
930
931 return result;
932 }
933
934 VkResult anv_FreeMemory(
935 VkDevice _device,
936 VkDeviceMemory _mem)
937 {
938 ANV_FROM_HANDLE(anv_device, device, _device);
939 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
940
941 if (mem->bo.map)
942 anv_gem_munmap(mem->bo.map, mem->bo.size);
943
944 if (mem->bo.gem_handle != 0)
945 anv_gem_close(device, mem->bo.gem_handle);
946
947 anv_device_free(device, mem);
948
949 return VK_SUCCESS;
950 }
951
952 VkResult anv_MapMemory(
953 VkDevice _device,
954 VkDeviceMemory _mem,
955 VkDeviceSize offset,
956 VkDeviceSize size,
957 VkMemoryMapFlags flags,
958 void** ppData)
959 {
960 ANV_FROM_HANDLE(anv_device, device, _device);
961 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
962
963 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
964 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
965 * at a time is valid. We could just mmap up front and return an offset
966 * pointer here, but that may exhaust virtual memory on 32 bit
967 * userspace. */
968
969 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
970 mem->map_size = size;
971
972 *ppData = mem->map;
973
974 return VK_SUCCESS;
975 }
976
977 VkResult anv_UnmapMemory(
978 VkDevice _device,
979 VkDeviceMemory _mem)
980 {
981 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
982
983 anv_gem_munmap(mem->map, mem->map_size);
984
985 return VK_SUCCESS;
986 }
987
988 VkResult anv_FlushMappedMemoryRanges(
989 VkDevice device,
990 uint32_t memRangeCount,
991 const VkMappedMemoryRange* pMemRanges)
992 {
993 /* clflush here for !llc platforms */
994
995 return VK_SUCCESS;
996 }
997
998 VkResult anv_InvalidateMappedMemoryRanges(
999 VkDevice device,
1000 uint32_t memRangeCount,
1001 const VkMappedMemoryRange* pMemRanges)
1002 {
1003 return anv_FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
1004 }
1005
1006 VkResult anv_GetBufferMemoryRequirements(
1007 VkDevice device,
1008 VkBuffer _buffer,
1009 VkMemoryRequirements* pMemoryRequirements)
1010 {
1011 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1012
1013 /* The Vulkan spec (git aaed022) says:
1014 *
1015 * memoryTypeBits is a bitfield and contains one bit set for every
1016 * supported memory type for the resource. The bit `1<<i` is set if and
1017 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1018 * structure for the physical device is supported.
1019 *
1020 * We support exactly one memory type.
1021 */
1022 pMemoryRequirements->memoryTypeBits = 1;
1023
1024 pMemoryRequirements->size = buffer->size;
1025 pMemoryRequirements->alignment = 16;
1026
1027 return VK_SUCCESS;
1028 }
1029
1030 VkResult anv_GetImageMemoryRequirements(
1031 VkDevice device,
1032 VkImage _image,
1033 VkMemoryRequirements* pMemoryRequirements)
1034 {
1035 ANV_FROM_HANDLE(anv_image, image, _image);
1036
1037 /* The Vulkan spec (git aaed022) says:
1038 *
1039 * memoryTypeBits is a bitfield and contains one bit set for every
1040 * supported memory type for the resource. The bit `1<<i` is set if and
1041 * only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1042 * structure for the physical device is supported.
1043 *
1044 * We support exactly one memory type.
1045 */
1046 pMemoryRequirements->memoryTypeBits = 1;
1047
1048 pMemoryRequirements->size = image->size;
1049 pMemoryRequirements->alignment = image->alignment;
1050
1051 return VK_SUCCESS;
1052 }
1053
1054 VkResult anv_GetImageSparseMemoryRequirements(
1055 VkDevice device,
1056 VkImage image,
1057 uint32_t* pNumRequirements,
1058 VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
1059 {
1060 return vk_error(VK_UNSUPPORTED);
1061 }
1062
1063 VkResult anv_GetDeviceMemoryCommitment(
1064 VkDevice device,
1065 VkDeviceMemory memory,
1066 VkDeviceSize* pCommittedMemoryInBytes)
1067 {
1068 *pCommittedMemoryInBytes = 0;
1069 stub_return(VK_SUCCESS);
1070 }
1071
1072 VkResult anv_BindBufferMemory(
1073 VkDevice device,
1074 VkBuffer _buffer,
1075 VkDeviceMemory _mem,
1076 VkDeviceSize memOffset)
1077 {
1078 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
1079 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1080
1081 buffer->bo = &mem->bo;
1082 buffer->offset = memOffset;
1083
1084 return VK_SUCCESS;
1085 }
1086
1087 VkResult anv_BindImageMemory(
1088 VkDevice device,
1089 VkImage _image,
1090 VkDeviceMemory _mem,
1091 VkDeviceSize memOffset)
1092 {
1093 ANV_FROM_HANDLE(anv_device_memory, mem, _mem);
1094 ANV_FROM_HANDLE(anv_image, image, _image);
1095
1096 image->bo = &mem->bo;
1097 image->offset = memOffset;
1098
1099 return VK_SUCCESS;
1100 }
1101
1102 VkResult anv_QueueBindSparseBufferMemory(
1103 VkQueue queue,
1104 VkBuffer buffer,
1105 uint32_t numBindings,
1106 const VkSparseMemoryBindInfo* pBindInfo)
1107 {
1108 stub_return(VK_UNSUPPORTED);
1109 }
1110
1111 VkResult anv_QueueBindSparseImageOpaqueMemory(
1112 VkQueue queue,
1113 VkImage image,
1114 uint32_t numBindings,
1115 const VkSparseMemoryBindInfo* pBindInfo)
1116 {
1117 stub_return(VK_UNSUPPORTED);
1118 }
1119
1120 VkResult anv_QueueBindSparseImageMemory(
1121 VkQueue queue,
1122 VkImage image,
1123 uint32_t numBindings,
1124 const VkSparseImageMemoryBindInfo* pBindInfo)
1125 {
1126 stub_return(VK_UNSUPPORTED);
1127 }
1128
1129 VkResult anv_CreateFence(
1130 VkDevice _device,
1131 const VkFenceCreateInfo* pCreateInfo,
1132 VkFence* pFence)
1133 {
1134 ANV_FROM_HANDLE(anv_device, device, _device);
1135 struct anv_fence *fence;
1136 struct anv_batch batch;
1137 VkResult result;
1138
1139 const uint32_t fence_size = 128;
1140
1141 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1142
1143 fence = anv_device_alloc(device, sizeof(*fence), 8,
1144 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1145 if (fence == NULL)
1146 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1147
1148 result = anv_bo_init_new(&fence->bo, device, fence_size);
1149 if (result != VK_SUCCESS)
1150 goto fail;
1151
1152 fence->bo.map =
1153 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
1154 batch.next = batch.start = fence->bo.map;
1155 batch.end = fence->bo.map + fence->bo.size;
1156 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1157 anv_batch_emit(&batch, GEN8_MI_NOOP);
1158
1159 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1160 fence->exec2_objects[0].relocation_count = 0;
1161 fence->exec2_objects[0].relocs_ptr = 0;
1162 fence->exec2_objects[0].alignment = 0;
1163 fence->exec2_objects[0].offset = fence->bo.offset;
1164 fence->exec2_objects[0].flags = 0;
1165 fence->exec2_objects[0].rsvd1 = 0;
1166 fence->exec2_objects[0].rsvd2 = 0;
1167
1168 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1169 fence->execbuf.buffer_count = 1;
1170 fence->execbuf.batch_start_offset = 0;
1171 fence->execbuf.batch_len = batch.next - fence->bo.map;
1172 fence->execbuf.cliprects_ptr = 0;
1173 fence->execbuf.num_cliprects = 0;
1174 fence->execbuf.DR1 = 0;
1175 fence->execbuf.DR4 = 0;
1176
1177 fence->execbuf.flags =
1178 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1179 fence->execbuf.rsvd1 = device->context_id;
1180 fence->execbuf.rsvd2 = 0;
1181
1182 *pFence = anv_fence_to_handle(fence);
1183
1184 return VK_SUCCESS;
1185
1186 fail:
1187 anv_device_free(device, fence);
1188
1189 return result;
1190 }
1191
1192 VkResult anv_DestroyFence(
1193 VkDevice _device,
1194 VkFence _fence)
1195 {
1196 ANV_FROM_HANDLE(anv_device, device, _device);
1197 ANV_FROM_HANDLE(anv_fence, fence, _fence);
1198
1199 anv_gem_munmap(fence->bo.map, fence->bo.size);
1200 anv_gem_close(device, fence->bo.gem_handle);
1201 anv_device_free(device, fence);
1202
1203 return VK_SUCCESS;
1204 }
1205
1206 VkResult anv_ResetFences(
1207 VkDevice _device,
1208 uint32_t fenceCount,
1209 const VkFence* pFences)
1210 {
1211 for (uint32_t i = 0; i < fenceCount; i++) {
1212 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1213 fence->ready = false;
1214 }
1215
1216 return VK_SUCCESS;
1217 }
1218
1219 VkResult anv_GetFenceStatus(
1220 VkDevice _device,
1221 VkFence _fence)
1222 {
1223 ANV_FROM_HANDLE(anv_device, device, _device);
1224 ANV_FROM_HANDLE(anv_fence, fence, _fence);
1225 int64_t t = 0;
1226 int ret;
1227
1228 if (fence->ready)
1229 return VK_SUCCESS;
1230
1231 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1232 if (ret == 0) {
1233 fence->ready = true;
1234 return VK_SUCCESS;
1235 }
1236
1237 return VK_NOT_READY;
1238 }
1239
1240 VkResult anv_WaitForFences(
1241 VkDevice _device,
1242 uint32_t fenceCount,
1243 const VkFence* pFences,
1244 VkBool32 waitAll,
1245 uint64_t timeout)
1246 {
1247 ANV_FROM_HANDLE(anv_device, device, _device);
1248 int64_t t = timeout;
1249 int ret;
1250
1251 /* FIXME: handle !waitAll */
1252
1253 for (uint32_t i = 0; i < fenceCount; i++) {
1254 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1255 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1256 if (ret == -1 && errno == ETIME)
1257 return VK_TIMEOUT;
1258 else if (ret == -1)
1259 return vk_error(VK_ERROR_UNKNOWN);
1260 }
1261
1262 return VK_SUCCESS;
1263 }
1264
1265 // Queue semaphore functions
1266
1267 VkResult anv_CreateSemaphore(
1268 VkDevice device,
1269 const VkSemaphoreCreateInfo* pCreateInfo,
1270 VkSemaphore* pSemaphore)
1271 {
1272 stub_return(VK_UNSUPPORTED);
1273 }
1274
1275 VkResult anv_DestroySemaphore(
1276 VkDevice device,
1277 VkSemaphore semaphore)
1278 {
1279 stub_return(VK_UNSUPPORTED);
1280 }
1281
1282 VkResult anv_QueueSignalSemaphore(
1283 VkQueue queue,
1284 VkSemaphore semaphore)
1285 {
1286 stub_return(VK_UNSUPPORTED);
1287 }
1288
1289 VkResult anv_QueueWaitSemaphore(
1290 VkQueue queue,
1291 VkSemaphore semaphore)
1292 {
1293 stub_return(VK_UNSUPPORTED);
1294 }
1295
1296 // Event functions
1297
1298 VkResult anv_CreateEvent(
1299 VkDevice device,
1300 const VkEventCreateInfo* pCreateInfo,
1301 VkEvent* pEvent)
1302 {
1303 stub_return(VK_UNSUPPORTED);
1304 }
1305
1306 VkResult anv_DestroyEvent(
1307 VkDevice device,
1308 VkEvent event)
1309 {
1310 stub_return(VK_UNSUPPORTED);
1311 }
1312
1313 VkResult anv_GetEventStatus(
1314 VkDevice device,
1315 VkEvent event)
1316 {
1317 stub_return(VK_UNSUPPORTED);
1318 }
1319
1320 VkResult anv_SetEvent(
1321 VkDevice device,
1322 VkEvent event)
1323 {
1324 stub_return(VK_UNSUPPORTED);
1325 }
1326
1327 VkResult anv_ResetEvent(
1328 VkDevice device,
1329 VkEvent event)
1330 {
1331 stub_return(VK_UNSUPPORTED);
1332 }
1333
1334 // Buffer functions
1335
1336 VkResult anv_CreateBuffer(
1337 VkDevice _device,
1338 const VkBufferCreateInfo* pCreateInfo,
1339 VkBuffer* pBuffer)
1340 {
1341 ANV_FROM_HANDLE(anv_device, device, _device);
1342 struct anv_buffer *buffer;
1343
1344 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1345
1346 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1347 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1348 if (buffer == NULL)
1349 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1350
1351 buffer->size = pCreateInfo->size;
1352 buffer->bo = NULL;
1353 buffer->offset = 0;
1354
1355 *pBuffer = anv_buffer_to_handle(buffer);
1356
1357 return VK_SUCCESS;
1358 }
1359
1360 VkResult anv_DestroyBuffer(
1361 VkDevice _device,
1362 VkBuffer _buffer)
1363 {
1364 ANV_FROM_HANDLE(anv_device, device, _device);
1365 ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
1366
1367 anv_device_free(device, buffer);
1368
1369 return VK_SUCCESS;
1370 }
1371
1372 // Buffer view functions
1373
1374 void
1375 anv_fill_buffer_surface_state(void *state, VkFormat format,
1376 uint32_t offset, uint32_t range)
1377 {
1378 const struct anv_format *info;
1379
1380 info = anv_format_for_vk_format(format);
1381 /* This assumes RGBA float format. */
1382 uint32_t stride = 4;
1383 uint32_t num_elements = range / stride;
1384
1385 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1386 .SurfaceType = SURFTYPE_BUFFER,
1387 .SurfaceArray = false,
1388 .SurfaceFormat = info->surface_format,
1389 .SurfaceVerticalAlignment = VALIGN4,
1390 .SurfaceHorizontalAlignment = HALIGN4,
1391 .TileMode = LINEAR,
1392 .VerticalLineStride = 0,
1393 .VerticalLineStrideOffset = 0,
1394 .SamplerL2BypassModeDisable = true,
1395 .RenderCacheReadWriteMode = WriteOnlyCache,
1396 .MemoryObjectControlState = GEN8_MOCS,
1397 .BaseMipLevel = 0.0,
1398 .SurfaceQPitch = 0,
1399 .Height = (num_elements >> 7) & 0x3fff,
1400 .Width = num_elements & 0x7f,
1401 .Depth = (num_elements >> 21) & 0x3f,
1402 .SurfacePitch = stride - 1,
1403 .MinimumArrayElement = 0,
1404 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1405 .XOffset = 0,
1406 .YOffset = 0,
1407 .SurfaceMinLOD = 0,
1408 .MIPCountLOD = 0,
1409 .AuxiliarySurfaceMode = AUX_NONE,
1410 .RedClearColor = 0,
1411 .GreenClearColor = 0,
1412 .BlueClearColor = 0,
1413 .AlphaClearColor = 0,
1414 .ShaderChannelSelectRed = SCS_RED,
1415 .ShaderChannelSelectGreen = SCS_GREEN,
1416 .ShaderChannelSelectBlue = SCS_BLUE,
1417 .ShaderChannelSelectAlpha = SCS_ALPHA,
1418 .ResourceMinLOD = 0.0,
1419 /* FIXME: We assume that the image must be bound at this time. */
1420 .SurfaceBaseAddress = { NULL, offset },
1421 };
1422
1423 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1424 }
1425
1426 VkResult anv_CreateBufferView(
1427 VkDevice _device,
1428 const VkBufferViewCreateInfo* pCreateInfo,
1429 VkBufferView* pView)
1430 {
1431 ANV_FROM_HANDLE(anv_device, device, _device);
1432 ANV_FROM_HANDLE(anv_buffer, buffer, pCreateInfo->buffer);
1433 struct anv_buffer_view *bview;
1434 struct anv_surface_view *view;
1435
1436 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1437
1438 bview = anv_device_alloc(device, sizeof(*view), 8,
1439 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1440 if (bview == NULL)
1441 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1442
1443 view = &bview->view;
1444 view->bo = buffer->bo;
1445 view->offset = buffer->offset + pCreateInfo->offset;
1446 view->surface_state =
1447 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1448 view->format = pCreateInfo->format;
1449 view->range = pCreateInfo->range;
1450
1451 anv_fill_buffer_surface_state(view->surface_state.map,
1452 pCreateInfo->format,
1453 view->offset, pCreateInfo->range);
1454
1455 *pView = anv_buffer_view_to_handle(bview);
1456
1457 return VK_SUCCESS;
1458 }
1459
1460 VkResult anv_DestroyBufferView(
1461 VkDevice _device,
1462 VkBufferView _bview)
1463 {
1464 ANV_FROM_HANDLE(anv_device, device, _device);
1465 ANV_FROM_HANDLE(anv_buffer_view, bview, _bview);
1466
1467 anv_surface_view_fini(device, &bview->view);
1468 anv_device_free(device, bview);
1469
1470 return VK_SUCCESS;
1471 }
1472
1473 // Sampler functions
1474
1475 VkResult anv_CreateSampler(
1476 VkDevice _device,
1477 const VkSamplerCreateInfo* pCreateInfo,
1478 VkSampler* pSampler)
1479 {
1480 ANV_FROM_HANDLE(anv_device, device, _device);
1481 struct anv_sampler *sampler;
1482 uint32_t mag_filter, min_filter, max_anisotropy;
1483
1484 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
1485
1486 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1487 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1488 if (!sampler)
1489 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1490
1491 static const uint32_t vk_to_gen_tex_filter[] = {
1492 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1493 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
1494 };
1495
1496 static const uint32_t vk_to_gen_mipmap_mode[] = {
1497 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1498 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1499 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
1500 };
1501
1502 static const uint32_t vk_to_gen_tex_address[] = {
1503 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1504 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1505 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1506 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1507 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
1508 };
1509
1510 static const uint32_t vk_to_gen_compare_op[] = {
1511 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1512 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1513 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1514 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1515 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1516 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1517 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1518 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
1519 };
1520
1521 if (pCreateInfo->maxAnisotropy > 1) {
1522 mag_filter = MAPFILTER_ANISOTROPIC;
1523 min_filter = MAPFILTER_ANISOTROPIC;
1524 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1525 } else {
1526 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1527 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1528 max_anisotropy = RATIO21;
1529 }
1530
1531 struct GEN8_SAMPLER_STATE sampler_state = {
1532 .SamplerDisable = false,
1533 .TextureBorderColorMode = DX10OGL,
1534 .LODPreClampMode = 0,
1535 .BaseMipLevel = 0.0,
1536 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
1537 .MagModeFilter = mag_filter,
1538 .MinModeFilter = min_filter,
1539 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1540 .AnisotropicAlgorithm = EWAApproximation,
1541 .MinLOD = pCreateInfo->minLod,
1542 .MaxLOD = pCreateInfo->maxLod,
1543 .ChromaKeyEnable = 0,
1544 .ChromaKeyIndex = 0,
1545 .ChromaKeyMode = 0,
1546 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
1547 .CubeSurfaceControlMode = 0,
1548
1549 .IndirectStatePointer =
1550 device->border_colors.offset +
1551 pCreateInfo->borderColor * sizeof(float) * 4,
1552
1553 .LODClampMagnificationMode = MIPNONE,
1554 .MaximumAnisotropy = max_anisotropy,
1555 .RAddressMinFilterRoundingEnable = 0,
1556 .RAddressMagFilterRoundingEnable = 0,
1557 .VAddressMinFilterRoundingEnable = 0,
1558 .VAddressMagFilterRoundingEnable = 0,
1559 .UAddressMinFilterRoundingEnable = 0,
1560 .UAddressMagFilterRoundingEnable = 0,
1561 .TrilinearFilterQuality = 0,
1562 .NonnormalizedCoordinateEnable = 0,
1563 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1564 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1565 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
1566 };
1567
1568 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1569
1570 *pSampler = anv_sampler_to_handle(sampler);
1571
1572 return VK_SUCCESS;
1573 }
1574
1575 VkResult anv_DestroySampler(
1576 VkDevice _device,
1577 VkSampler _sampler)
1578 {
1579 ANV_FROM_HANDLE(anv_device, device, _device);
1580 ANV_FROM_HANDLE(anv_sampler, sampler, _sampler);
1581
1582 anv_device_free(device, sampler);
1583
1584 return VK_SUCCESS;
1585 }
1586
1587 // Descriptor set functions
1588
1589 VkResult anv_CreateDescriptorSetLayout(
1590 VkDevice _device,
1591 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1592 VkDescriptorSetLayout* pSetLayout)
1593 {
1594 ANV_FROM_HANDLE(anv_device, device, _device);
1595 struct anv_descriptor_set_layout *set_layout;
1596
1597 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1598
1599 uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
1600 uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
1601 uint32_t num_dynamic_buffers = 0;
1602 uint32_t count = 0;
1603 uint32_t stages = 0;
1604 uint32_t s;
1605
1606 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1607 switch (pCreateInfo->pBinding[i].descriptorType) {
1608 case VK_DESCRIPTOR_TYPE_SAMPLER:
1609 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1610 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1611 sampler_count[s] += pCreateInfo->pBinding[i].arraySize;
1612 break;
1613 default:
1614 break;
1615 }
1616
1617 switch (pCreateInfo->pBinding[i].descriptorType) {
1618 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1619 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1620 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1621 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1622 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1623 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1624 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1625 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1626 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1627 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1628 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1629 surface_count[s] += pCreateInfo->pBinding[i].arraySize;
1630 break;
1631 default:
1632 break;
1633 }
1634
1635 switch (pCreateInfo->pBinding[i].descriptorType) {
1636 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1637 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1638 num_dynamic_buffers += pCreateInfo->pBinding[i].arraySize;
1639 break;
1640 default:
1641 break;
1642 }
1643
1644 stages |= pCreateInfo->pBinding[i].stageFlags;
1645 count += pCreateInfo->pBinding[i].arraySize;
1646 }
1647
1648 uint32_t sampler_total = 0;
1649 uint32_t surface_total = 0;
1650 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
1651 sampler_total += sampler_count[s];
1652 surface_total += surface_count[s];
1653 }
1654
1655 size_t size = sizeof(*set_layout) +
1656 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
1657 set_layout = anv_device_alloc(device, size, 8,
1658 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1659 if (!set_layout)
1660 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1661
1662 set_layout->num_dynamic_buffers = num_dynamic_buffers;
1663 set_layout->count = count;
1664 set_layout->shader_stages = stages;
1665
1666 struct anv_descriptor_slot *p = set_layout->entries;
1667 struct anv_descriptor_slot *sampler[VK_SHADER_STAGE_NUM];
1668 struct anv_descriptor_slot *surface[VK_SHADER_STAGE_NUM];
1669 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
1670 set_layout->stage[s].surface_count = surface_count[s];
1671 set_layout->stage[s].surface_start = surface[s] = p;
1672 p += surface_count[s];
1673 set_layout->stage[s].sampler_count = sampler_count[s];
1674 set_layout->stage[s].sampler_start = sampler[s] = p;
1675 p += sampler_count[s];
1676 }
1677
1678 uint32_t descriptor = 0;
1679 int8_t dynamic_slot = 0;
1680 bool is_dynamic;
1681 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1682 switch (pCreateInfo->pBinding[i].descriptorType) {
1683 case VK_DESCRIPTOR_TYPE_SAMPLER:
1684 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1685 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1686 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
1687 sampler[s]->index = descriptor + j;
1688 sampler[s]->dynamic_slot = -1;
1689 sampler[s]++;
1690 }
1691 break;
1692 default:
1693 break;
1694 }
1695
1696 switch (pCreateInfo->pBinding[i].descriptorType) {
1697 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1698 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1699 is_dynamic = true;
1700 break;
1701 default:
1702 is_dynamic = false;
1703 break;
1704 }
1705
1706 switch (pCreateInfo->pBinding[i].descriptorType) {
1707 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1708 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1709 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1710 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1711 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1712 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1713 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1714 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1715 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1716 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1717 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1718 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
1719 surface[s]->index = descriptor + j;
1720 if (is_dynamic)
1721 surface[s]->dynamic_slot = dynamic_slot + j;
1722 else
1723 surface[s]->dynamic_slot = -1;
1724 surface[s]++;
1725 }
1726 break;
1727 default:
1728 break;
1729 }
1730
1731 if (is_dynamic)
1732 dynamic_slot += pCreateInfo->pBinding[i].arraySize;
1733
1734 descriptor += pCreateInfo->pBinding[i].arraySize;
1735 }
1736
1737 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
1738
1739 return VK_SUCCESS;
1740 }
1741
1742 VkResult anv_DestroyDescriptorSetLayout(
1743 VkDevice _device,
1744 VkDescriptorSetLayout _set_layout)
1745 {
1746 ANV_FROM_HANDLE(anv_device, device, _device);
1747 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
1748
1749 anv_device_free(device, set_layout);
1750
1751 return VK_SUCCESS;
1752 }
1753
1754 VkResult anv_CreateDescriptorPool(
1755 VkDevice device,
1756 VkDescriptorPoolUsage poolUsage,
1757 uint32_t maxSets,
1758 const VkDescriptorPoolCreateInfo* pCreateInfo,
1759 VkDescriptorPool* pDescriptorPool)
1760 {
1761 anv_finishme("VkDescriptorPool is a stub");
1762 pDescriptorPool->handle = 1;
1763 return VK_SUCCESS;
1764 }
1765
1766 VkResult anv_DestroyDescriptorPool(
1767 VkDevice _device,
1768 VkDescriptorPool _pool)
1769 {
1770 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1771 return VK_SUCCESS;
1772 }
1773
1774 VkResult anv_ResetDescriptorPool(
1775 VkDevice device,
1776 VkDescriptorPool descriptorPool)
1777 {
1778 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
1779 return VK_SUCCESS;
1780 }
1781
1782 VkResult
1783 anv_descriptor_set_create(struct anv_device *device,
1784 const struct anv_descriptor_set_layout *layout,
1785 struct anv_descriptor_set **out_set)
1786 {
1787 struct anv_descriptor_set *set;
1788 size_t size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
1789
1790 set = anv_device_alloc(device, size, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1791 if (!set)
1792 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1793
1794 /* A descriptor set may not be 100% filled. Clear the set so we can can
1795 * later detect holes in it.
1796 */
1797 memset(set, 0, size);
1798
1799 *out_set = set;
1800
1801 return VK_SUCCESS;
1802 }
1803
1804 void
1805 anv_descriptor_set_destroy(struct anv_device *device,
1806 struct anv_descriptor_set *set)
1807 {
1808 anv_device_free(device, set);
1809 }
1810
1811 VkResult anv_AllocDescriptorSets(
1812 VkDevice _device,
1813 VkDescriptorPool descriptorPool,
1814 VkDescriptorSetUsage setUsage,
1815 uint32_t count,
1816 const VkDescriptorSetLayout* pSetLayouts,
1817 VkDescriptorSet* pDescriptorSets,
1818 uint32_t* pCount)
1819 {
1820 ANV_FROM_HANDLE(anv_device, device, _device);
1821
1822 VkResult result;
1823 struct anv_descriptor_set *set;
1824
1825 for (uint32_t i = 0; i < count; i++) {
1826 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, pSetLayouts[i]);
1827
1828 result = anv_descriptor_set_create(device, layout, &set);
1829 if (result != VK_SUCCESS) {
1830 *pCount = i;
1831 return result;
1832 }
1833
1834 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1835 }
1836
1837 *pCount = count;
1838
1839 return VK_SUCCESS;
1840 }
1841
1842 VkResult anv_FreeDescriptorSets(
1843 VkDevice _device,
1844 VkDescriptorPool descriptorPool,
1845 uint32_t count,
1846 const VkDescriptorSet* pDescriptorSets)
1847 {
1848 ANV_FROM_HANDLE(anv_device, device, _device);
1849
1850 for (uint32_t i = 0; i < count; i++) {
1851 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1852
1853 anv_descriptor_set_destroy(device, set);
1854 }
1855
1856 return VK_SUCCESS;
1857 }
1858
1859 VkResult anv_UpdateDescriptorSets(
1860 VkDevice device,
1861 uint32_t writeCount,
1862 const VkWriteDescriptorSet* pDescriptorWrites,
1863 uint32_t copyCount,
1864 const VkCopyDescriptorSet* pDescriptorCopies)
1865 {
1866 for (uint32_t i = 0; i < writeCount; i++) {
1867 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1868 ANV_FROM_HANDLE(anv_descriptor_set, set, write->destSet);
1869
1870 switch (write->descriptorType) {
1871 case VK_DESCRIPTOR_TYPE_SAMPLER:
1872 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1873 for (uint32_t j = 0; j < write->count; j++) {
1874 set->descriptors[write->destBinding + j].sampler =
1875 anv_sampler_from_handle(write->pDescriptors[j].sampler);
1876 }
1877
1878 if (write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
1879 break;
1880
1881 /* fallthrough */
1882
1883 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1884 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1885 for (uint32_t j = 0; j < write->count; j++) {
1886 ANV_FROM_HANDLE(anv_image_view, iview,
1887 write->pDescriptors[j].imageView);
1888 set->descriptors[write->destBinding + j].view = &iview->view;
1889 }
1890 break;
1891
1892 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1893 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1894 anv_finishme("texel buffers not implemented");
1895 break;
1896
1897 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1898 anv_finishme("input attachments not implemented");
1899 break;
1900
1901 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1902 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1903 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1904 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1905 for (uint32_t j = 0; j < write->count; j++) {
1906 ANV_FROM_HANDLE(anv_buffer_view, bview,
1907 write->pDescriptors[j].bufferView);
1908 set->descriptors[write->destBinding + j].view = &bview->view;
1909 }
1910
1911 default:
1912 break;
1913 }
1914 }
1915
1916 for (uint32_t i = 0; i < copyCount; i++) {
1917 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1918 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->destSet);
1919 ANV_FROM_HANDLE(anv_descriptor_set, dest, copy->destSet);
1920 for (uint32_t j = 0; j < copy->count; j++) {
1921 dest->descriptors[copy->destBinding + j] =
1922 src->descriptors[copy->srcBinding + j];
1923 }
1924 }
1925
1926 return VK_SUCCESS;
1927 }
1928
1929 // State object functions
1930
1931 static inline int64_t
1932 clamp_int64(int64_t x, int64_t min, int64_t max)
1933 {
1934 if (x < min)
1935 return min;
1936 else if (x < max)
1937 return x;
1938 else
1939 return max;
1940 }
1941
1942 VkResult anv_CreateDynamicViewportState(
1943 VkDevice _device,
1944 const VkDynamicViewportStateCreateInfo* pCreateInfo,
1945 VkDynamicViewportState* pState)
1946 {
1947 ANV_FROM_HANDLE(anv_device, device, _device);
1948 struct anv_dynamic_vp_state *state;
1949
1950 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO);
1951
1952 state = anv_device_alloc(device, sizeof(*state), 8,
1953 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1954 if (state == NULL)
1955 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1956
1957 unsigned count = pCreateInfo->viewportAndScissorCount;
1958 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
1959 count * 64, 64);
1960 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
1961 count * 8, 32);
1962 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
1963 count * 32, 32);
1964
1965 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1966 const VkViewport *vp = &pCreateInfo->pViewports[i];
1967 const VkRect2D *s = &pCreateInfo->pScissors[i];
1968
1969 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1970 .ViewportMatrixElementm00 = vp->width / 2,
1971 .ViewportMatrixElementm11 = vp->height / 2,
1972 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1973 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1974 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1975 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1976 .XMinClipGuardband = -1.0f,
1977 .XMaxClipGuardband = 1.0f,
1978 .YMinClipGuardband = -1.0f,
1979 .YMaxClipGuardband = 1.0f,
1980 .XMinViewPort = vp->originX,
1981 .XMaxViewPort = vp->originX + vp->width - 1,
1982 .YMinViewPort = vp->originY,
1983 .YMaxViewPort = vp->originY + vp->height - 1,
1984 };
1985
1986 struct GEN8_CC_VIEWPORT cc_viewport = {
1987 .MinimumDepth = vp->minDepth,
1988 .MaximumDepth = vp->maxDepth
1989 };
1990
1991 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1992 * ymax < ymin for empty clips. In case clip x, y, width height are all
1993 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1994 * what we want. Just special case empty clips and produce a canonical
1995 * empty clip. */
1996 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1997 .ScissorRectangleYMin = 1,
1998 .ScissorRectangleXMin = 1,
1999 .ScissorRectangleYMax = 0,
2000 .ScissorRectangleXMax = 0
2001 };
2002
2003 const int max = 0xffff;
2004 struct GEN8_SCISSOR_RECT scissor = {
2005 /* Do this math using int64_t so overflow gets clamped correctly. */
2006 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
2007 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
2008 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
2009 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
2010 };
2011
2012 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
2013 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
2014
2015 if (s->extent.width <= 0 || s->extent.height <= 0) {
2016 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
2017 } else {
2018 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
2019 }
2020 }
2021
2022 *pState = anv_dynamic_vp_state_to_handle(state);
2023
2024 return VK_SUCCESS;
2025 }
2026
2027 VkResult anv_DestroyDynamicViewportState(
2028 VkDevice _device,
2029 VkDynamicViewportState _vp_state)
2030 {
2031 ANV_FROM_HANDLE(anv_device, device, _device);
2032 ANV_FROM_HANDLE(anv_dynamic_vp_state, vp_state, _vp_state);
2033
2034 anv_state_pool_free(&device->dynamic_state_pool, vp_state->sf_clip_vp);
2035 anv_state_pool_free(&device->dynamic_state_pool, vp_state->cc_vp);
2036 anv_state_pool_free(&device->dynamic_state_pool, vp_state->scissor);
2037
2038 anv_device_free(device, vp_state);
2039
2040 return VK_SUCCESS;
2041 }
2042
2043 VkResult anv_CreateDynamicRasterState(
2044 VkDevice _device,
2045 const VkDynamicRasterStateCreateInfo* pCreateInfo,
2046 VkDynamicRasterState* pState)
2047 {
2048 ANV_FROM_HANDLE(anv_device, device, _device);
2049 struct anv_dynamic_rs_state *state;
2050
2051 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RASTER_STATE_CREATE_INFO);
2052
2053 state = anv_device_alloc(device, sizeof(*state), 8,
2054 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2055 if (state == NULL)
2056 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2057
2058 struct GEN8_3DSTATE_SF sf = {
2059 GEN8_3DSTATE_SF_header,
2060 .LineWidth = pCreateInfo->lineWidth,
2061 };
2062
2063 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
2064
2065 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
2066 pCreateInfo->slopeScaledDepthBias != 0.0f;
2067 struct GEN8_3DSTATE_RASTER raster = {
2068 .GlobalDepthOffsetEnableSolid = enable_bias,
2069 .GlobalDepthOffsetEnableWireframe = enable_bias,
2070 .GlobalDepthOffsetEnablePoint = enable_bias,
2071 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
2072 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
2073 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
2074 };
2075
2076 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
2077
2078 *pState = anv_dynamic_rs_state_to_handle(state);
2079
2080 return VK_SUCCESS;
2081 }
2082
2083 VkResult anv_DestroyDynamicRasterState(
2084 VkDevice _device,
2085 VkDynamicRasterState _rs_state)
2086 {
2087 ANV_FROM_HANDLE(anv_device, device, _device);
2088 ANV_FROM_HANDLE(anv_dynamic_rs_state, rs_state, _rs_state);
2089
2090 anv_device_free(device, rs_state);
2091
2092 return VK_SUCCESS;
2093 }
2094
2095 VkResult anv_CreateDynamicColorBlendState(
2096 VkDevice _device,
2097 const VkDynamicColorBlendStateCreateInfo* pCreateInfo,
2098 VkDynamicColorBlendState* pState)
2099 {
2100 ANV_FROM_HANDLE(anv_device, device, _device);
2101 struct anv_dynamic_cb_state *state;
2102
2103 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_COLOR_BLEND_STATE_CREATE_INFO);
2104
2105 state = anv_device_alloc(device, sizeof(*state), 8,
2106 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2107 if (state == NULL)
2108 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2109
2110 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2111 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2112 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2113 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2114 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2115 };
2116
2117 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2118
2119 *pState = anv_dynamic_cb_state_to_handle(state);
2120
2121 return VK_SUCCESS;
2122 }
2123
2124 VkResult anv_DestroyDynamicColorBlendState(
2125 VkDevice _device,
2126 VkDynamicColorBlendState _cb_state)
2127 {
2128 ANV_FROM_HANDLE(anv_device, device, _device);
2129 ANV_FROM_HANDLE(anv_dynamic_cb_state, cb_state, _cb_state);
2130
2131 anv_device_free(device, cb_state);
2132
2133 return VK_SUCCESS;
2134 }
2135
2136 VkResult anv_CreateDynamicDepthStencilState(
2137 VkDevice _device,
2138 const VkDynamicDepthStencilStateCreateInfo* pCreateInfo,
2139 VkDynamicDepthStencilState* pState)
2140 {
2141 ANV_FROM_HANDLE(anv_device, device, _device);
2142 struct anv_dynamic_ds_state *state;
2143
2144 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DEPTH_STENCIL_STATE_CREATE_INFO);
2145
2146 state = anv_device_alloc(device, sizeof(*state), 8,
2147 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2148 if (state == NULL)
2149 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2150
2151 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2152 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2153
2154 /* Is this what we need to do? */
2155 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2156
2157 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2158 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
2159
2160 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2161 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
2162 };
2163
2164 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2165 &wm_depth_stencil);
2166
2167 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2168 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2169 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2170 };
2171
2172 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2173
2174 *pState = anv_dynamic_ds_state_to_handle(state);
2175
2176 return VK_SUCCESS;
2177 }
2178
2179 VkResult anv_DestroyDynamicDepthStencilState(
2180 VkDevice _device,
2181 VkDynamicDepthStencilState _ds_state)
2182 {
2183 ANV_FROM_HANDLE(anv_device, device, _device);
2184 ANV_FROM_HANDLE(anv_dynamic_ds_state, ds_state, _ds_state);
2185
2186 anv_device_free(device, ds_state);
2187
2188 return VK_SUCCESS;
2189 }
2190
2191 VkResult anv_CreateFramebuffer(
2192 VkDevice _device,
2193 const VkFramebufferCreateInfo* pCreateInfo,
2194 VkFramebuffer* pFramebuffer)
2195 {
2196 ANV_FROM_HANDLE(anv_device, device, _device);
2197 struct anv_framebuffer *framebuffer;
2198
2199 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2200
2201 size_t size = sizeof(*framebuffer) +
2202 sizeof(struct anv_attachment_view *) * pCreateInfo->attachmentCount;
2203 framebuffer = anv_device_alloc(device, size, 8,
2204 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2205 if (framebuffer == NULL)
2206 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2207
2208 framebuffer->attachment_count = pCreateInfo->attachmentCount;
2209 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2210 ANV_FROM_HANDLE(anv_attachment_view, view,
2211 pCreateInfo->pAttachments[i].view);
2212
2213 framebuffer->attachments[i] = view;
2214 }
2215
2216 framebuffer->width = pCreateInfo->width;
2217 framebuffer->height = pCreateInfo->height;
2218 framebuffer->layers = pCreateInfo->layers;
2219
2220 anv_CreateDynamicViewportState(anv_device_to_handle(device),
2221 &(VkDynamicViewportStateCreateInfo) {
2222 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VIEWPORT_STATE_CREATE_INFO,
2223 .viewportAndScissorCount = 1,
2224 .pViewports = (VkViewport[]) {
2225 {
2226 .originX = 0,
2227 .originY = 0,
2228 .width = pCreateInfo->width,
2229 .height = pCreateInfo->height,
2230 .minDepth = 0,
2231 .maxDepth = 1
2232 },
2233 },
2234 .pScissors = (VkRect2D[]) {
2235 { { 0, 0 },
2236 { pCreateInfo->width, pCreateInfo->height } },
2237 }
2238 },
2239 &framebuffer->vp_state);
2240
2241 *pFramebuffer = anv_framebuffer_to_handle(framebuffer);
2242
2243 return VK_SUCCESS;
2244 }
2245
2246 VkResult anv_DestroyFramebuffer(
2247 VkDevice _device,
2248 VkFramebuffer _fb)
2249 {
2250 ANV_FROM_HANDLE(anv_device, device, _device);
2251 ANV_FROM_HANDLE(anv_framebuffer, fb, _fb);
2252
2253 anv_DestroyDynamicViewportState(anv_device_to_handle(device),
2254 fb->vp_state);
2255 anv_device_free(device, fb);
2256
2257 return VK_SUCCESS;
2258 }
2259
2260 VkResult anv_CreateRenderPass(
2261 VkDevice _device,
2262 const VkRenderPassCreateInfo* pCreateInfo,
2263 VkRenderPass* pRenderPass)
2264 {
2265 ANV_FROM_HANDLE(anv_device, device, _device);
2266 struct anv_render_pass *pass;
2267 size_t size;
2268
2269 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
2270
2271 size = sizeof(*pass) +
2272 pCreateInfo->subpassCount * sizeof(struct anv_subpass);
2273 pass = anv_device_alloc(device, size, 8,
2274 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2275 if (pass == NULL)
2276 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2277
2278 /* Clear the subpasses along with the parent pass. This required because
2279 * each array member of anv_subpass must be a valid pointer if not NULL.
2280 */
2281 memset(pass, 0, size);
2282
2283 pass->attachment_count = pCreateInfo->attachmentCount;
2284 pass->subpass_count = pCreateInfo->subpassCount;
2285
2286 size = pCreateInfo->attachmentCount * sizeof(*pass->attachments);
2287 pass->attachments = anv_device_alloc(device, size, 8,
2288 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2289 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2290 pass->attachments[i].format = pCreateInfo->pAttachments[i].format;
2291 pass->attachments[i].samples = pCreateInfo->pAttachments[i].samples;
2292 pass->attachments[i].load_op = pCreateInfo->pAttachments[i].loadOp;
2293 pass->attachments[i].stencil_load_op = pCreateInfo->pAttachments[i].stencilLoadOp;
2294 // pass->attachments[i].store_op = pCreateInfo->pAttachments[i].storeOp;
2295 // pass->attachments[i].stencil_store_op = pCreateInfo->pAttachments[i].stencilStoreOp;
2296 }
2297
2298 for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++) {
2299 const VkSubpassDescription *desc = &pCreateInfo->pSubpasses[i];
2300 struct anv_subpass *subpass = &pass->subpasses[i];
2301
2302 subpass->input_count = desc->inputCount;
2303 subpass->color_count = desc->colorCount;
2304
2305 if (desc->inputCount > 0) {
2306 subpass->input_attachments =
2307 anv_device_alloc(device, desc->inputCount * sizeof(uint32_t),
2308 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2309
2310 for (uint32_t j = 0; j < desc->inputCount; j++) {
2311 subpass->input_attachments[j]
2312 = desc->inputAttachments[j].attachment;
2313 }
2314 }
2315
2316 if (desc->colorCount > 0) {
2317 subpass->color_attachments =
2318 anv_device_alloc(device, desc->colorCount * sizeof(uint32_t),
2319 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2320
2321 for (uint32_t j = 0; j < desc->colorCount; j++) {
2322 subpass->color_attachments[j]
2323 = desc->colorAttachments[j].attachment;
2324 }
2325 }
2326
2327 if (desc->resolveAttachments) {
2328 subpass->resolve_attachments =
2329 anv_device_alloc(device, desc->colorCount * sizeof(uint32_t),
2330 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2331
2332 for (uint32_t j = 0; j < desc->colorCount; j++) {
2333 subpass->resolve_attachments[j]
2334 = desc->resolveAttachments[j].attachment;
2335 }
2336 }
2337
2338 subpass->depth_stencil_attachment = desc->depthStencilAttachment.attachment;
2339 }
2340
2341 *pRenderPass = anv_render_pass_to_handle(pass);
2342
2343 return VK_SUCCESS;
2344 }
2345
2346 VkResult anv_DestroyRenderPass(
2347 VkDevice _device,
2348 VkRenderPass _pass)
2349 {
2350 ANV_FROM_HANDLE(anv_device, device, _device);
2351 ANV_FROM_HANDLE(anv_render_pass, pass, _pass);
2352
2353 anv_device_free(device, pass->attachments);
2354
2355 for (uint32_t i = 0; i < pass->subpass_count; i++) {
2356 /* In VkSubpassCreateInfo, each of the attachment arrays may be null.
2357 * Don't free the null arrays.
2358 */
2359 struct anv_subpass *subpass = &pass->subpasses[i];
2360
2361 anv_device_free(device, subpass->input_attachments);
2362 anv_device_free(device, subpass->color_attachments);
2363 anv_device_free(device, subpass->resolve_attachments);
2364 }
2365
2366 anv_device_free(device, pass);
2367
2368 return VK_SUCCESS;
2369 }
2370
2371 VkResult anv_GetRenderAreaGranularity(
2372 VkDevice device,
2373 VkRenderPass renderPass,
2374 VkExtent2D* pGranularity)
2375 {
2376 *pGranularity = (VkExtent2D) { 1, 1 };
2377
2378 return VK_SUCCESS;
2379 }
2380
2381 void vkCmdDbgMarkerBegin(
2382 VkCmdBuffer cmdBuffer,
2383 const char* pMarker)
2384 __attribute__ ((visibility ("default")));
2385
2386 void vkCmdDbgMarkerEnd(
2387 VkCmdBuffer cmdBuffer)
2388 __attribute__ ((visibility ("default")));
2389
2390 void vkCmdDbgMarkerBegin(
2391 VkCmdBuffer cmdBuffer,
2392 const char* pMarker)
2393 {
2394 }
2395
2396 void vkCmdDbgMarkerEnd(
2397 VkCmdBuffer cmdBuffer)
2398 {
2399 }