Added few more stubs so that control reaches to DestroyDevice().
[mesa.git] / src / freedreno / vulkan / tu_device.c
1 /*
2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
4 *
5 * based in part on anv driver which is:
6 * Copyright © 2015 Intel Corporation
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a
9 * copy of this software and associated documentation files (the "Software"),
10 * to deal in the Software without restriction, including without limitation
11 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 * and/or sell copies of the Software, and to permit persons to whom the
13 * Software is furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the next
16 * paragraph) shall be included in all copies or substantial portions of the
17 * Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 * DEALINGS IN THE SOFTWARE.
26 */
27
28 #include "tu_private.h"
29
30 #include <fcntl.h>
31 #include <libsync.h>
32 #include <stdbool.h>
33 #include <string.h>
34 #include <sys/sysinfo.h>
35 #include <unistd.h>
36
37 #include "compiler/glsl_types.h"
38 #include "util/debug.h"
39 #include "util/disk_cache.h"
40 #include "util/u_atomic.h"
41 #include "vk_format.h"
42 #include "vk_util.h"
43
44 /* for fd_get_driver/device_uuid() */
45 #include "freedreno/common/freedreno_uuid.h"
46
47 static int
48 tu_device_get_cache_uuid(uint16_t family, void *uuid)
49 {
50 uint32_t mesa_timestamp;
51 uint16_t f = family;
52 memset(uuid, 0, VK_UUID_SIZE);
53 if (!disk_cache_get_function_timestamp(tu_device_get_cache_uuid,
54 &mesa_timestamp))
55 return -1;
56
57 memcpy(uuid, &mesa_timestamp, 4);
58 memcpy((char *) uuid + 4, &f, 2);
59 snprintf((char *) uuid + 6, VK_UUID_SIZE - 10, "tu");
60 return 0;
61 }
62
63 VkResult
64 tu_physical_device_init(struct tu_physical_device *device,
65 struct tu_instance *instance)
66 {
67 VkResult result = VK_SUCCESS;
68
69 memset(device->name, 0, sizeof(device->name));
70 sprintf(device->name, "FD%d", device->gpu_id);
71
72 device->limited_z24s8 = (device->gpu_id == 630);
73
74 switch (device->gpu_id) {
75 case 618:
76 device->ccu_offset_gmem = 0x7c000; /* 0x7e000 in some cases? */
77 device->ccu_offset_bypass = 0x10000;
78 device->tile_align_w = 32;
79 device->magic.PC_UNKNOWN_9805 = 0x0;
80 device->magic.SP_UNKNOWN_A0F8 = 0x0;
81 device->supports_multiview_mask = false; /* TODO */
82 break;
83 case 630:
84 case 640:
85 device->ccu_offset_gmem = 0xf8000;
86 device->ccu_offset_bypass = 0x20000;
87 device->tile_align_w = 32;
88 device->magic.PC_UNKNOWN_9805 = 0x1;
89 device->magic.SP_UNKNOWN_A0F8 = 0x1;
90 device->supports_multiview_mask = device->gpu_id != 630;
91 break;
92 case 650:
93 device->ccu_offset_gmem = 0x114000;
94 device->ccu_offset_bypass = 0x30000;
95 device->tile_align_w = 96;
96 device->magic.PC_UNKNOWN_9805 = 0x2;
97 device->magic.SP_UNKNOWN_A0F8 = 0x2;
98 device->supports_multiview_mask = true;
99 break;
100 default:
101 result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
102 "device %s is unsupported", device->name);
103 goto fail;
104 }
105 if (tu_device_get_cache_uuid(device->gpu_id, device->cache_uuid)) {
106 result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
107 "cannot generate UUID");
108 goto fail;
109 }
110
111 /* The gpu id is already embedded in the uuid so we just pass "tu"
112 * when creating the cache.
113 */
114 char buf[VK_UUID_SIZE * 2 + 1];
115 disk_cache_format_hex_id(buf, device->cache_uuid, VK_UUID_SIZE * 2);
116 device->disk_cache = disk_cache_create(device->name, buf, 0);
117
118 fprintf(stderr, "WARNING: tu is not a conformant vulkan implementation, "
119 "testing use only.\n");
120
121 fd_get_driver_uuid(device->driver_uuid);
122 fd_get_device_uuid(device->device_uuid, device->gpu_id);
123
124 tu_physical_device_get_supported_extensions(device, &device->supported_extensions);
125
126 if (result != VK_SUCCESS) {
127 vk_error(instance, result);
128 goto fail;
129 }
130
131 result = tu_wsi_init(device);
132 if (result != VK_SUCCESS) {
133 vk_error(instance, result);
134 goto fail;
135 }
136
137 return VK_SUCCESS;
138
139 fail:
140 close(device->local_fd);
141 if (device->master_fd != -1)
142 close(device->master_fd);
143 return result;
144 }
145
146 static void
147 tu_physical_device_finish(struct tu_physical_device *device)
148 {
149 tu_wsi_finish(device);
150
151 disk_cache_destroy(device->disk_cache);
152 close(device->local_fd);
153 if (device->master_fd != -1)
154 close(device->master_fd);
155
156 vk_object_base_finish(&device->base);
157 }
158
159 static VKAPI_ATTR void *
160 default_alloc_func(void *pUserData,
161 size_t size,
162 size_t align,
163 VkSystemAllocationScope allocationScope)
164 {
165 return malloc(size);
166 }
167
168 static VKAPI_ATTR void *
169 default_realloc_func(void *pUserData,
170 void *pOriginal,
171 size_t size,
172 size_t align,
173 VkSystemAllocationScope allocationScope)
174 {
175 return realloc(pOriginal, size);
176 }
177
178 static VKAPI_ATTR void
179 default_free_func(void *pUserData, void *pMemory)
180 {
181 free(pMemory);
182 }
183
184 static const VkAllocationCallbacks default_alloc = {
185 .pUserData = NULL,
186 .pfnAllocation = default_alloc_func,
187 .pfnReallocation = default_realloc_func,
188 .pfnFree = default_free_func,
189 };
190
191 static const struct debug_control tu_debug_options[] = {
192 { "startup", TU_DEBUG_STARTUP },
193 { "nir", TU_DEBUG_NIR },
194 { "ir3", TU_DEBUG_IR3 },
195 { "nobin", TU_DEBUG_NOBIN },
196 { "sysmem", TU_DEBUG_SYSMEM },
197 { "forcebin", TU_DEBUG_FORCEBIN },
198 { "noubwc", TU_DEBUG_NOUBWC },
199 { NULL, 0 }
200 };
201
202 const char *
203 tu_get_debug_option_name(int id)
204 {
205 assert(id < ARRAY_SIZE(tu_debug_options) - 1);
206 return tu_debug_options[id].string;
207 }
208
209 static int
210 tu_get_instance_extension_index(const char *name)
211 {
212 for (unsigned i = 0; i < TU_INSTANCE_EXTENSION_COUNT; ++i) {
213 if (strcmp(name, tu_instance_extensions[i].extensionName) == 0)
214 return i;
215 }
216 return -1;
217 }
218
219 VkResult
220 tu_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
221 const VkAllocationCallbacks *pAllocator,
222 VkInstance *pInstance)
223 {
224 struct tu_instance *instance;
225 VkResult result;
226
227 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
228
229 uint32_t client_version;
230 if (pCreateInfo->pApplicationInfo &&
231 pCreateInfo->pApplicationInfo->apiVersion != 0) {
232 client_version = pCreateInfo->pApplicationInfo->apiVersion;
233 } else {
234 tu_EnumerateInstanceVersion(&client_version);
235 }
236
237 instance = vk_zalloc2(&default_alloc, pAllocator, sizeof(*instance), 8,
238 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
239
240 if (!instance)
241 return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
242
243 vk_object_base_init(NULL, &instance->base, VK_OBJECT_TYPE_INSTANCE);
244
245 if (pAllocator)
246 instance->alloc = *pAllocator;
247 else
248 instance->alloc = default_alloc;
249
250 instance->api_version = client_version;
251 instance->physical_device_count = -1;
252
253 instance->debug_flags =
254 parse_debug_string(getenv("TU_DEBUG"), tu_debug_options);
255
256 if (instance->debug_flags & TU_DEBUG_STARTUP)
257 tu_logi("Created an instance");
258
259 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
260 const char *ext_name = pCreateInfo->ppEnabledExtensionNames[i];
261 int index = tu_get_instance_extension_index(ext_name);
262
263 if (index < 0 || !tu_instance_extensions_supported.extensions[index]) {
264 vk_object_base_finish(&instance->base);
265 vk_free2(&default_alloc, pAllocator, instance);
266 return vk_error(instance, VK_ERROR_EXTENSION_NOT_PRESENT);
267 }
268
269 instance->enabled_extensions.extensions[index] = true;
270 }
271
272 result = vk_debug_report_instance_init(&instance->debug_report_callbacks);
273 if (result != VK_SUCCESS) {
274 vk_object_base_finish(&instance->base);
275 vk_free2(&default_alloc, pAllocator, instance);
276 return vk_error(instance, result);
277 }
278
279 glsl_type_singleton_init_or_ref();
280
281 VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
282
283 *pInstance = tu_instance_to_handle(instance);
284
285 return VK_SUCCESS;
286 }
287
288 void
289 tu_DestroyInstance(VkInstance _instance,
290 const VkAllocationCallbacks *pAllocator)
291 {
292 TU_FROM_HANDLE(tu_instance, instance, _instance);
293
294 if (!instance)
295 return;
296
297 for (int i = 0; i < instance->physical_device_count; ++i) {
298 tu_physical_device_finish(instance->physical_devices + i);
299 }
300
301 VG(VALGRIND_DESTROY_MEMPOOL(instance));
302
303 glsl_type_singleton_decref();
304
305 vk_debug_report_instance_destroy(&instance->debug_report_callbacks);
306
307 vk_object_base_finish(&instance->base);
308 vk_free(&instance->alloc, instance);
309 }
310
311 VkResult
312 tu_EnumeratePhysicalDevices(VkInstance _instance,
313 uint32_t *pPhysicalDeviceCount,
314 VkPhysicalDevice *pPhysicalDevices)
315 {
316 TU_FROM_HANDLE(tu_instance, instance, _instance);
317 VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
318
319 VkResult result;
320
321 if (instance->physical_device_count < 0) {
322 result = tu_enumerate_devices(instance);
323 if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
324 return result;
325 }
326
327 for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
328 vk_outarray_append(&out, p)
329 {
330 *p = tu_physical_device_to_handle(instance->physical_devices + i);
331 }
332 }
333
334 return vk_outarray_status(&out);
335 }
336
337 VkResult
338 tu_EnumeratePhysicalDeviceGroups(
339 VkInstance _instance,
340 uint32_t *pPhysicalDeviceGroupCount,
341 VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
342 {
343 TU_FROM_HANDLE(tu_instance, instance, _instance);
344 VK_OUTARRAY_MAKE(out, pPhysicalDeviceGroupProperties,
345 pPhysicalDeviceGroupCount);
346 VkResult result;
347
348 if (instance->physical_device_count < 0) {
349 result = tu_enumerate_devices(instance);
350 if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
351 return result;
352 }
353
354 for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
355 vk_outarray_append(&out, p)
356 {
357 p->physicalDeviceCount = 1;
358 p->physicalDevices[0] =
359 tu_physical_device_to_handle(instance->physical_devices + i);
360 p->subsetAllocation = false;
361 }
362 }
363
364 return vk_outarray_status(&out);
365 }
366
367 void
368 tu_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
369 VkPhysicalDeviceFeatures *pFeatures)
370 {
371 memset(pFeatures, 0, sizeof(*pFeatures));
372
373 *pFeatures = (VkPhysicalDeviceFeatures) {
374 .robustBufferAccess = true,
375 .fullDrawIndexUint32 = true,
376 .imageCubeArray = true,
377 .independentBlend = true,
378 .geometryShader = true,
379 .tessellationShader = true,
380 .sampleRateShading = true,
381 .dualSrcBlend = true,
382 .logicOp = true,
383 .multiDrawIndirect = true,
384 .drawIndirectFirstInstance = true,
385 .depthClamp = true,
386 .depthBiasClamp = true,
387 .fillModeNonSolid = true,
388 .depthBounds = true,
389 .wideLines = false,
390 .largePoints = true,
391 .alphaToOne = true,
392 .multiViewport = false,
393 .samplerAnisotropy = true,
394 .textureCompressionETC2 = true,
395 .textureCompressionASTC_LDR = true,
396 .textureCompressionBC = true,
397 .occlusionQueryPrecise = true,
398 .pipelineStatisticsQuery = true,
399 .vertexPipelineStoresAndAtomics = true,
400 .fragmentStoresAndAtomics = true,
401 .shaderTessellationAndGeometryPointSize = false,
402 .shaderImageGatherExtended = false,
403 .shaderStorageImageExtendedFormats = false,
404 .shaderStorageImageMultisample = false,
405 .shaderUniformBufferArrayDynamicIndexing = true,
406 .shaderSampledImageArrayDynamicIndexing = true,
407 .shaderStorageBufferArrayDynamicIndexing = true,
408 .shaderStorageImageArrayDynamicIndexing = true,
409 .shaderStorageImageReadWithoutFormat = false,
410 .shaderStorageImageWriteWithoutFormat = false,
411 .shaderClipDistance = false,
412 .shaderCullDistance = false,
413 .shaderFloat64 = false,
414 .shaderInt64 = false,
415 .shaderInt16 = false,
416 .sparseBinding = false,
417 .variableMultisampleRate = false,
418 .inheritedQueries = false,
419 };
420 }
421
422 void
423 tu_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
424 VkPhysicalDeviceFeatures2 *pFeatures)
425 {
426 vk_foreach_struct(ext, pFeatures->pNext)
427 {
428 switch (ext->sType) {
429 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: {
430 VkPhysicalDeviceVulkan11Features *features = (void *) ext;
431 features->storageBuffer16BitAccess = false;
432 features->uniformAndStorageBuffer16BitAccess = false;
433 features->storagePushConstant16 = false;
434 features->storageInputOutput16 = false;
435 features->multiview = true;
436 features->multiviewGeometryShader = false;
437 features->multiviewTessellationShader = false;
438 features->variablePointersStorageBuffer = true;
439 features->variablePointers = true;
440 features->protectedMemory = false;
441 features->samplerYcbcrConversion = true;
442 features->shaderDrawParameters = true;
443 break;
444 }
445 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: {
446 VkPhysicalDeviceVulkan12Features *features = (void *) ext;
447 features->samplerMirrorClampToEdge = true;
448 features->drawIndirectCount = true;
449 features->storageBuffer8BitAccess = false;
450 features->uniformAndStorageBuffer8BitAccess = false;
451 features->storagePushConstant8 = false;
452 features->shaderBufferInt64Atomics = false;
453 features->shaderSharedInt64Atomics = false;
454 features->shaderFloat16 = false;
455 features->shaderInt8 = false;
456
457 features->descriptorIndexing = false;
458 features->shaderInputAttachmentArrayDynamicIndexing = false;
459 features->shaderUniformTexelBufferArrayDynamicIndexing = false;
460 features->shaderStorageTexelBufferArrayDynamicIndexing = false;
461 features->shaderUniformBufferArrayNonUniformIndexing = false;
462 features->shaderSampledImageArrayNonUniformIndexing = false;
463 features->shaderStorageBufferArrayNonUniformIndexing = false;
464 features->shaderStorageImageArrayNonUniformIndexing = false;
465 features->shaderInputAttachmentArrayNonUniformIndexing = false;
466 features->shaderUniformTexelBufferArrayNonUniformIndexing = false;
467 features->shaderStorageTexelBufferArrayNonUniformIndexing = false;
468 features->descriptorBindingUniformBufferUpdateAfterBind = false;
469 features->descriptorBindingSampledImageUpdateAfterBind = false;
470 features->descriptorBindingStorageImageUpdateAfterBind = false;
471 features->descriptorBindingStorageBufferUpdateAfterBind = false;
472 features->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
473 features->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
474 features->descriptorBindingUpdateUnusedWhilePending = false;
475 features->descriptorBindingPartiallyBound = false;
476 features->descriptorBindingVariableDescriptorCount = false;
477 features->runtimeDescriptorArray = false;
478
479 features->samplerFilterMinmax = true;
480 features->scalarBlockLayout = false;
481 features->imagelessFramebuffer = false;
482 features->uniformBufferStandardLayout = false;
483 features->shaderSubgroupExtendedTypes = false;
484 features->separateDepthStencilLayouts = false;
485 features->hostQueryReset = true;
486 features->timelineSemaphore = false;
487 features->bufferDeviceAddress = false;
488 features->bufferDeviceAddressCaptureReplay = false;
489 features->bufferDeviceAddressMultiDevice = false;
490 features->vulkanMemoryModel = false;
491 features->vulkanMemoryModelDeviceScope = false;
492 features->vulkanMemoryModelAvailabilityVisibilityChains = false;
493 features->shaderOutputViewportIndex = false;
494 features->shaderOutputLayer = false;
495 features->subgroupBroadcastDynamicId = false;
496 break;
497 }
498 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
499 VkPhysicalDeviceVariablePointersFeatures *features = (void *) ext;
500 features->variablePointersStorageBuffer = true;
501 features->variablePointers = true;
502 break;
503 }
504 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
505 VkPhysicalDeviceMultiviewFeatures *features =
506 (VkPhysicalDeviceMultiviewFeatures *) ext;
507 features->multiview = true;
508 features->multiviewGeometryShader = false;
509 features->multiviewTessellationShader = false;
510 break;
511 }
512 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
513 VkPhysicalDeviceShaderDrawParametersFeatures *features =
514 (VkPhysicalDeviceShaderDrawParametersFeatures *) ext;
515 features->shaderDrawParameters = true;
516 break;
517 }
518 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
519 VkPhysicalDeviceProtectedMemoryFeatures *features =
520 (VkPhysicalDeviceProtectedMemoryFeatures *) ext;
521 features->protectedMemory = false;
522 break;
523 }
524 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
525 VkPhysicalDevice16BitStorageFeatures *features =
526 (VkPhysicalDevice16BitStorageFeatures *) ext;
527 features->storageBuffer16BitAccess = false;
528 features->uniformAndStorageBuffer16BitAccess = false;
529 features->storagePushConstant16 = false;
530 features->storageInputOutput16 = false;
531 break;
532 }
533 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
534 VkPhysicalDeviceSamplerYcbcrConversionFeatures *features =
535 (VkPhysicalDeviceSamplerYcbcrConversionFeatures *) ext;
536 features->samplerYcbcrConversion = true;
537 break;
538 }
539 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: {
540 VkPhysicalDeviceDescriptorIndexingFeaturesEXT *features =
541 (VkPhysicalDeviceDescriptorIndexingFeaturesEXT *) ext;
542 features->shaderInputAttachmentArrayDynamicIndexing = false;
543 features->shaderUniformTexelBufferArrayDynamicIndexing = false;
544 features->shaderStorageTexelBufferArrayDynamicIndexing = false;
545 features->shaderUniformBufferArrayNonUniformIndexing = false;
546 features->shaderSampledImageArrayNonUniformIndexing = false;
547 features->shaderStorageBufferArrayNonUniformIndexing = false;
548 features->shaderStorageImageArrayNonUniformIndexing = false;
549 features->shaderInputAttachmentArrayNonUniformIndexing = false;
550 features->shaderUniformTexelBufferArrayNonUniformIndexing = false;
551 features->shaderStorageTexelBufferArrayNonUniformIndexing = false;
552 features->descriptorBindingUniformBufferUpdateAfterBind = false;
553 features->descriptorBindingSampledImageUpdateAfterBind = false;
554 features->descriptorBindingStorageImageUpdateAfterBind = false;
555 features->descriptorBindingStorageBufferUpdateAfterBind = false;
556 features->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
557 features->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
558 features->descriptorBindingUpdateUnusedWhilePending = false;
559 features->descriptorBindingPartiallyBound = false;
560 features->descriptorBindingVariableDescriptorCount = false;
561 features->runtimeDescriptorArray = false;
562 break;
563 }
564 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
565 VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
566 (VkPhysicalDeviceConditionalRenderingFeaturesEXT *) ext;
567 features->conditionalRendering = true;
568 features->inheritedConditionalRendering = true;
569 break;
570 }
571 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
572 VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
573 (VkPhysicalDeviceTransformFeedbackFeaturesEXT *) ext;
574 features->transformFeedback = true;
575 features->geometryStreams = false;
576 break;
577 }
578 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
579 VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
580 (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
581 features->indexTypeUint8 = true;
582 break;
583 }
584 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
585 VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
586 (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
587 features->vertexAttributeInstanceRateDivisor = true;
588 features->vertexAttributeInstanceRateZeroDivisor = true;
589 break;
590 }
591 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
592 VkPhysicalDevicePrivateDataFeaturesEXT *features =
593 (VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
594 features->privateData = true;
595 break;
596 }
597 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
598 VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
599 (VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
600 features->depthClipEnable = true;
601 break;
602 }
603 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
604 VkPhysicalDevice4444FormatsFeaturesEXT *features = (void *)ext;
605 features->formatA4R4G4B4 = true;
606 features->formatA4B4G4R4 = true;
607 break;
608 }
609 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
610 VkPhysicalDeviceCustomBorderColorFeaturesEXT *features = (void *) ext;
611 features->customBorderColors = true;
612 features->customBorderColorWithoutFormat = true;
613 break;
614 }
615 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT: {
616 VkPhysicalDeviceHostQueryResetFeaturesEXT *features =
617 (VkPhysicalDeviceHostQueryResetFeaturesEXT *)ext;
618 features->hostQueryReset = true;
619 break;
620 }
621 default:
622 break;
623 }
624 }
625 return tu_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
626 }
627
628 void
629 tu_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
630 VkPhysicalDeviceProperties *pProperties)
631 {
632 TU_FROM_HANDLE(tu_physical_device, pdevice, physicalDevice);
633 VkSampleCountFlags sample_counts =
634 VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT;
635
636 /* I have no idea what the maximum size is, but the hardware supports very
637 * large numbers of descriptors (at least 2^16). This limit is based on
638 * CP_LOAD_STATE6, which has a 28-bit field for the DWORD offset, so that
639 * we don't have to think about what to do if that overflows, but really
640 * nothing is likely to get close to this.
641 */
642 const size_t max_descriptor_set_size = (1 << 28) / A6XX_TEX_CONST_DWORDS;
643
644 VkPhysicalDeviceLimits limits = {
645 .maxImageDimension1D = (1 << 14),
646 .maxImageDimension2D = (1 << 14),
647 .maxImageDimension3D = (1 << 11),
648 .maxImageDimensionCube = (1 << 14),
649 .maxImageArrayLayers = (1 << 11),
650 .maxTexelBufferElements = 128 * 1024 * 1024,
651 .maxUniformBufferRange = MAX_UNIFORM_BUFFER_RANGE,
652 .maxStorageBufferRange = MAX_STORAGE_BUFFER_RANGE,
653 .maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
654 .maxMemoryAllocationCount = UINT32_MAX,
655 .maxSamplerAllocationCount = 64 * 1024,
656 .bufferImageGranularity = 64, /* A cache line */
657 .sparseAddressSpaceSize = 0xffffffffu, /* buffer max size */
658 .maxBoundDescriptorSets = MAX_SETS,
659 .maxPerStageDescriptorSamplers = max_descriptor_set_size,
660 .maxPerStageDescriptorUniformBuffers = max_descriptor_set_size,
661 .maxPerStageDescriptorStorageBuffers = max_descriptor_set_size,
662 .maxPerStageDescriptorSampledImages = max_descriptor_set_size,
663 .maxPerStageDescriptorStorageImages = max_descriptor_set_size,
664 .maxPerStageDescriptorInputAttachments = MAX_RTS,
665 .maxPerStageResources = max_descriptor_set_size,
666 .maxDescriptorSetSamplers = max_descriptor_set_size,
667 .maxDescriptorSetUniformBuffers = max_descriptor_set_size,
668 .maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
669 .maxDescriptorSetStorageBuffers = max_descriptor_set_size,
670 .maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
671 .maxDescriptorSetSampledImages = max_descriptor_set_size,
672 .maxDescriptorSetStorageImages = max_descriptor_set_size,
673 .maxDescriptorSetInputAttachments = MAX_RTS,
674 .maxVertexInputAttributes = 32,
675 .maxVertexInputBindings = 32,
676 .maxVertexInputAttributeOffset = 4095,
677 .maxVertexInputBindingStride = 2048,
678 .maxVertexOutputComponents = 128,
679 .maxTessellationGenerationLevel = 64,
680 .maxTessellationPatchSize = 32,
681 .maxTessellationControlPerVertexInputComponents = 128,
682 .maxTessellationControlPerVertexOutputComponents = 128,
683 .maxTessellationControlPerPatchOutputComponents = 120,
684 .maxTessellationControlTotalOutputComponents = 4096,
685 .maxTessellationEvaluationInputComponents = 128,
686 .maxTessellationEvaluationOutputComponents = 128,
687 .maxGeometryShaderInvocations = 32,
688 .maxGeometryInputComponents = 64,
689 .maxGeometryOutputComponents = 128,
690 .maxGeometryOutputVertices = 256,
691 .maxGeometryTotalOutputComponents = 1024,
692 .maxFragmentInputComponents = 124,
693 .maxFragmentOutputAttachments = 8,
694 .maxFragmentDualSrcAttachments = 1,
695 .maxFragmentCombinedOutputResources = 8,
696 .maxComputeSharedMemorySize = 32768,
697 .maxComputeWorkGroupCount = { 65535, 65535, 65535 },
698 .maxComputeWorkGroupInvocations = 2048,
699 .maxComputeWorkGroupSize = { 2048, 2048, 2048 },
700 .subPixelPrecisionBits = 8,
701 .subTexelPrecisionBits = 8,
702 .mipmapPrecisionBits = 8,
703 .maxDrawIndexedIndexValue = UINT32_MAX,
704 .maxDrawIndirectCount = UINT32_MAX,
705 .maxSamplerLodBias = 4095.0 / 256.0, /* [-16, 15.99609375] */
706 .maxSamplerAnisotropy = 16,
707 .maxViewports = MAX_VIEWPORTS,
708 .maxViewportDimensions = { (1 << 14), (1 << 14) },
709 .viewportBoundsRange = { INT16_MIN, INT16_MAX },
710 .viewportSubPixelBits = 8,
711 .minMemoryMapAlignment = 4096, /* A page */
712 .minTexelBufferOffsetAlignment = 64,
713 .minUniformBufferOffsetAlignment = 64,
714 .minStorageBufferOffsetAlignment = 64,
715 .minTexelOffset = -16,
716 .maxTexelOffset = 15,
717 .minTexelGatherOffset = -32,
718 .maxTexelGatherOffset = 31,
719 .minInterpolationOffset = -0.5,
720 .maxInterpolationOffset = 0.4375,
721 .subPixelInterpolationOffsetBits = 4,
722 .maxFramebufferWidth = (1 << 14),
723 .maxFramebufferHeight = (1 << 14),
724 .maxFramebufferLayers = (1 << 10),
725 .framebufferColorSampleCounts = sample_counts,
726 .framebufferDepthSampleCounts = sample_counts,
727 .framebufferStencilSampleCounts = sample_counts,
728 .framebufferNoAttachmentsSampleCounts = sample_counts,
729 .maxColorAttachments = MAX_RTS,
730 .sampledImageColorSampleCounts = sample_counts,
731 .sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT,
732 .sampledImageDepthSampleCounts = sample_counts,
733 .sampledImageStencilSampleCounts = sample_counts,
734 .storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
735 .maxSampleMaskWords = 1,
736 .timestampComputeAndGraphics = true,
737 .timestampPeriod = 1000000000.0 / 19200000.0, /* CP_ALWAYS_ON_COUNTER is fixed 19.2MHz */
738 .maxClipDistances = 8,
739 .maxCullDistances = 8,
740 .maxCombinedClipAndCullDistances = 8,
741 .discreteQueuePriorities = 1,
742 .pointSizeRange = { 1, 4092 },
743 .lineWidthRange = { 0.0, 7.9921875 },
744 .pointSizeGranularity = 0.0625,
745 .lineWidthGranularity = (1.0 / 128.0),
746 .strictLines = false, /* FINISHME */
747 .standardSampleLocations = true,
748 .optimalBufferCopyOffsetAlignment = 128,
749 .optimalBufferCopyRowPitchAlignment = 128,
750 .nonCoherentAtomSize = 64,
751 };
752
753 *pProperties = (VkPhysicalDeviceProperties) {
754 .apiVersion = tu_physical_device_api_version(pdevice),
755 .driverVersion = vk_get_driver_version(),
756 .vendorID = 0, /* TODO */
757 .deviceID = 0,
758 .deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
759 .limits = limits,
760 .sparseProperties = { 0 },
761 };
762
763 strcpy(pProperties->deviceName, pdevice->name);
764 memcpy(pProperties->pipelineCacheUUID, pdevice->cache_uuid, VK_UUID_SIZE);
765 }
766
767 void
768 tu_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
769 VkPhysicalDeviceProperties2 *pProperties)
770 {
771 TU_FROM_HANDLE(tu_physical_device, pdevice, physicalDevice);
772 tu_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
773
774 vk_foreach_struct(ext, pProperties->pNext)
775 {
776 switch (ext->sType) {
777 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
778 VkPhysicalDevicePushDescriptorPropertiesKHR *properties =
779 (VkPhysicalDevicePushDescriptorPropertiesKHR *) ext;
780 properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
781 break;
782 }
783 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
784 VkPhysicalDeviceIDProperties *properties =
785 (VkPhysicalDeviceIDProperties *) ext;
786 memcpy(properties->driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
787 memcpy(properties->deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
788 properties->deviceLUIDValid = false;
789 break;
790 }
791 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
792 VkPhysicalDeviceMultiviewProperties *properties =
793 (VkPhysicalDeviceMultiviewProperties *) ext;
794 properties->maxMultiviewViewCount = MAX_VIEWS;
795 properties->maxMultiviewInstanceIndex = INT_MAX;
796 break;
797 }
798 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
799 VkPhysicalDevicePointClippingProperties *properties =
800 (VkPhysicalDevicePointClippingProperties *) ext;
801 properties->pointClippingBehavior =
802 VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
803 break;
804 }
805 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
806 VkPhysicalDeviceMaintenance3Properties *properties =
807 (VkPhysicalDeviceMaintenance3Properties *) ext;
808 /* Make sure everything is addressable by a signed 32-bit int, and
809 * our largest descriptors are 96 bytes. */
810 properties->maxPerSetDescriptors = (1ull << 31) / 96;
811 /* Our buffer size fields allow only this much */
812 properties->maxMemoryAllocationSize = 0xFFFFFFFFull;
813 break;
814 }
815 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: {
816 VkPhysicalDeviceTransformFeedbackPropertiesEXT *properties =
817 (VkPhysicalDeviceTransformFeedbackPropertiesEXT *)ext;
818
819 properties->maxTransformFeedbackStreams = IR3_MAX_SO_STREAMS;
820 properties->maxTransformFeedbackBuffers = IR3_MAX_SO_BUFFERS;
821 properties->maxTransformFeedbackBufferSize = UINT32_MAX;
822 properties->maxTransformFeedbackStreamDataSize = 512;
823 properties->maxTransformFeedbackBufferDataSize = 512;
824 properties->maxTransformFeedbackBufferDataStride = 512;
825 properties->transformFeedbackQueries = true;
826 properties->transformFeedbackStreamsLinesTriangles = false;
827 properties->transformFeedbackRasterizationStreamSelect = false;
828 properties->transformFeedbackDraw = true;
829 break;
830 }
831 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: {
832 VkPhysicalDeviceSampleLocationsPropertiesEXT *properties =
833 (VkPhysicalDeviceSampleLocationsPropertiesEXT *)ext;
834 properties->sampleLocationSampleCounts = 0;
835 if (pdevice->supported_extensions.EXT_sample_locations) {
836 properties->sampleLocationSampleCounts =
837 VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_2_BIT | VK_SAMPLE_COUNT_4_BIT;
838 }
839 properties->maxSampleLocationGridSize = (VkExtent2D) { 1 , 1 };
840 properties->sampleLocationCoordinateRange[0] = 0.0f;
841 properties->sampleLocationCoordinateRange[1] = 0.9375f;
842 properties->sampleLocationSubPixelBits = 4;
843 properties->variableSampleLocations = true;
844 break;
845 }
846 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: {
847 VkPhysicalDeviceSamplerFilterMinmaxProperties *properties =
848 (VkPhysicalDeviceSamplerFilterMinmaxProperties *)ext;
849 properties->filterMinmaxImageComponentMapping = true;
850 properties->filterMinmaxSingleComponentFormats = true;
851 break;
852 }
853 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
854 VkPhysicalDeviceSubgroupProperties *properties =
855 (VkPhysicalDeviceSubgroupProperties *)ext;
856 properties->subgroupSize = 64;
857 properties->supportedStages = VK_SHADER_STAGE_COMPUTE_BIT;
858 properties->supportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT |
859 VK_SUBGROUP_FEATURE_VOTE_BIT;
860 properties->quadOperationsInAllStages = false;
861 break;
862 }
863 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
864 VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *props =
865 (VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
866 props->maxVertexAttribDivisor = UINT32_MAX;
867 break;
868 }
869 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: {
870 VkPhysicalDeviceCustomBorderColorPropertiesEXT *props = (void *)ext;
871 props->maxCustomBorderColorSamplers = TU_BORDER_COLOR_COUNT;
872 break;
873 }
874 default:
875 break;
876 }
877 }
878 }
879
880 static const VkQueueFamilyProperties tu_queue_family_properties = {
881 .queueFlags =
882 VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
883 .queueCount = 1,
884 .timestampValidBits = 48,
885 .minImageTransferGranularity = { 1, 1, 1 },
886 };
887
888 void
889 tu_GetPhysicalDeviceQueueFamilyProperties(
890 VkPhysicalDevice physicalDevice,
891 uint32_t *pQueueFamilyPropertyCount,
892 VkQueueFamilyProperties *pQueueFamilyProperties)
893 {
894 VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
895
896 vk_outarray_append(&out, p) { *p = tu_queue_family_properties; }
897 }
898
899 void
900 tu_GetPhysicalDeviceQueueFamilyProperties2(
901 VkPhysicalDevice physicalDevice,
902 uint32_t *pQueueFamilyPropertyCount,
903 VkQueueFamilyProperties2 *pQueueFamilyProperties)
904 {
905 VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
906
907 vk_outarray_append(&out, p)
908 {
909 p->queueFamilyProperties = tu_queue_family_properties;
910 }
911 }
912
913 static uint64_t
914 tu_get_system_heap_size()
915 {
916 struct sysinfo info;
917 sysinfo(&info);
918
919 uint64_t total_ram = (uint64_t) info.totalram * (uint64_t) info.mem_unit;
920
921 /* We don't want to burn too much ram with the GPU. If the user has 4GiB
922 * or less, we use at most half. If they have more than 4GiB, we use 3/4.
923 */
924 uint64_t available_ram;
925 if (total_ram <= 4ull * 1024ull * 1024ull * 1024ull)
926 available_ram = total_ram / 2;
927 else
928 available_ram = total_ram * 3 / 4;
929
930 return available_ram;
931 }
932
933 void
934 tu_GetPhysicalDeviceMemoryProperties(
935 VkPhysicalDevice physicalDevice,
936 VkPhysicalDeviceMemoryProperties *pMemoryProperties)
937 {
938 pMemoryProperties->memoryHeapCount = 1;
939 pMemoryProperties->memoryHeaps[0].size = tu_get_system_heap_size();
940 pMemoryProperties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
941
942 pMemoryProperties->memoryTypeCount = 1;
943 pMemoryProperties->memoryTypes[0].propertyFlags =
944 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
945 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
946 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
947 pMemoryProperties->memoryTypes[0].heapIndex = 0;
948 }
949
950 void
951 tu_GetPhysicalDeviceMemoryProperties2(
952 VkPhysicalDevice physicalDevice,
953 VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
954 {
955 return tu_GetPhysicalDeviceMemoryProperties(
956 physicalDevice, &pMemoryProperties->memoryProperties);
957 }
958
959 static VkResult
960 tu_queue_init(struct tu_device *device,
961 struct tu_queue *queue,
962 uint32_t queue_family_index,
963 int idx,
964 VkDeviceQueueCreateFlags flags)
965 {
966 vk_object_base_init(&device->vk, &queue->base, VK_OBJECT_TYPE_QUEUE);
967
968 queue->device = device;
969 queue->queue_family_index = queue_family_index;
970 queue->queue_idx = idx;
971 queue->flags = flags;
972
973 int ret = tu_drm_submitqueue_new(device, 0, &queue->msm_queue_id);
974 if (ret)
975 return VK_ERROR_INITIALIZATION_FAILED;
976
977 tu_fence_init(&queue->submit_fence, false);
978
979 return VK_SUCCESS;
980 }
981
982 static void
983 tu_queue_finish(struct tu_queue *queue)
984 {
985 tu_fence_finish(&queue->submit_fence);
986 tu_drm_submitqueue_close(queue->device, queue->msm_queue_id);
987 }
988
989 static int
990 tu_get_device_extension_index(const char *name)
991 {
992 for (unsigned i = 0; i < TU_DEVICE_EXTENSION_COUNT; ++i) {
993 if (strcmp(name, tu_device_extensions[i].extensionName) == 0)
994 return i;
995 }
996 return -1;
997 }
998
999 VkResult
1000 tu_CreateDevice(VkPhysicalDevice physicalDevice,
1001 const VkDeviceCreateInfo *pCreateInfo,
1002 const VkAllocationCallbacks *pAllocator,
1003 VkDevice *pDevice)
1004 {
1005 TU_FROM_HANDLE(tu_physical_device, physical_device, physicalDevice);
1006 VkResult result;
1007 struct tu_device *device;
1008 bool custom_border_colors = false;
1009
1010 /* Check enabled features */
1011 if (pCreateInfo->pEnabledFeatures) {
1012 VkPhysicalDeviceFeatures supported_features;
1013 tu_GetPhysicalDeviceFeatures(physicalDevice, &supported_features);
1014 VkBool32 *supported_feature = (VkBool32 *) &supported_features;
1015 VkBool32 *enabled_feature = (VkBool32 *) pCreateInfo->pEnabledFeatures;
1016 unsigned num_features =
1017 sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
1018 for (uint32_t i = 0; i < num_features; i++) {
1019 if (enabled_feature[i] && !supported_feature[i])
1020 return vk_error(physical_device->instance,
1021 VK_ERROR_FEATURE_NOT_PRESENT);
1022 }
1023 }
1024
1025 vk_foreach_struct_const(ext, pCreateInfo->pNext) {
1026 switch (ext->sType) {
1027 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
1028 const VkPhysicalDeviceCustomBorderColorFeaturesEXT *border_color_features = (const void *)ext;
1029 custom_border_colors = border_color_features->customBorderColors;
1030 break;
1031 }
1032 default:
1033 break;
1034 }
1035 }
1036
1037 device = vk_zalloc2(&physical_device->instance->alloc, pAllocator,
1038 sizeof(*device), 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1039 if (!device)
1040 return vk_error(physical_device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1041
1042 vk_device_init(&device->vk, pCreateInfo,
1043 &physical_device->instance->alloc, pAllocator);
1044
1045 device->instance = physical_device->instance;
1046 device->physical_device = physical_device;
1047 device->_lost = false;
1048
1049 for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
1050 const char *ext_name = pCreateInfo->ppEnabledExtensionNames[i];
1051 int index = tu_get_device_extension_index(ext_name);
1052 if (index < 0 ||
1053 !physical_device->supported_extensions.extensions[index]) {
1054 vk_free(&device->vk.alloc, device);
1055 return vk_error(physical_device->instance,
1056 VK_ERROR_EXTENSION_NOT_PRESENT);
1057 }
1058
1059 device->enabled_extensions.extensions[index] = true;
1060 }
1061
1062 for (unsigned i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
1063 const VkDeviceQueueCreateInfo *queue_create =
1064 &pCreateInfo->pQueueCreateInfos[i];
1065 uint32_t qfi = queue_create->queueFamilyIndex;
1066 device->queues[qfi] = vk_alloc(
1067 &device->vk.alloc, queue_create->queueCount * sizeof(struct tu_queue),
1068 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1069 if (!device->queues[qfi]) {
1070 result = VK_ERROR_OUT_OF_HOST_MEMORY;
1071 goto fail_queues;
1072 }
1073
1074 memset(device->queues[qfi], 0,
1075 queue_create->queueCount * sizeof(struct tu_queue));
1076
1077 device->queue_count[qfi] = queue_create->queueCount;
1078
1079 for (unsigned q = 0; q < queue_create->queueCount; q++) {
1080 result = tu_queue_init(device, &device->queues[qfi][q], qfi, q,
1081 queue_create->flags);
1082 if (result != VK_SUCCESS)
1083 goto fail_queues;
1084 }
1085 }
1086
1087 device->compiler = ir3_compiler_create(NULL, physical_device->gpu_id);
1088 if (!device->compiler)
1089 goto fail_queues;
1090
1091 /* initial sizes, these will increase if there is overflow */
1092 device->vsc_draw_strm_pitch = 0x1000 + VSC_PAD;
1093 device->vsc_prim_strm_pitch = 0x4000 + VSC_PAD;
1094
1095 uint32_t global_size = sizeof(struct tu6_global);
1096 if (custom_border_colors)
1097 global_size += TU_BORDER_COLOR_COUNT * sizeof(struct bcolor_entry);
1098
1099 result = tu_bo_init_new(device, &device->global_bo, global_size);
1100 if (result != VK_SUCCESS)
1101 goto fail_global_bo;
1102
1103 result = tu_bo_map(device, &device->global_bo);
1104 if (result != VK_SUCCESS)
1105 goto fail_global_bo_map;
1106
1107 struct tu6_global *global = device->global_bo.map;
1108 tu_init_clear_blit_shaders(device->global_bo.map);
1109 global->predicate = 0;
1110 tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK],
1111 &(VkClearColorValue) {}, false);
1112 tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_INT_TRANSPARENT_BLACK],
1113 &(VkClearColorValue) {}, true);
1114 tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK],
1115 &(VkClearColorValue) { .float32[3] = 1.0f }, false);
1116 tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_INT_OPAQUE_BLACK],
1117 &(VkClearColorValue) { .int32[3] = 1 }, true);
1118 tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE],
1119 &(VkClearColorValue) { .float32[0 ... 3] = 1.0f }, false);
1120 tu6_pack_border_color(&global->bcolor_builtin[VK_BORDER_COLOR_INT_OPAQUE_WHITE],
1121 &(VkClearColorValue) { .int32[0 ... 3] = 1 }, true);
1122
1123 /* initialize to ones so ffs can be used to find unused slots */
1124 BITSET_ONES(device->custom_border_color);
1125
1126 VkPipelineCacheCreateInfo ci;
1127 ci.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1128 ci.pNext = NULL;
1129 ci.flags = 0;
1130 ci.pInitialData = NULL;
1131 ci.initialDataSize = 0;
1132 VkPipelineCache pc;
1133 result =
1134 tu_CreatePipelineCache(tu_device_to_handle(device), &ci, NULL, &pc);
1135 if (result != VK_SUCCESS)
1136 goto fail_pipeline_cache;
1137
1138 device->mem_cache = tu_pipeline_cache_from_handle(pc);
1139
1140 for (unsigned i = 0; i < ARRAY_SIZE(device->scratch_bos); i++)
1141 mtx_init(&device->scratch_bos[i].construct_mtx, mtx_plain);
1142
1143 mtx_init(&device->mutex, mtx_plain);
1144
1145 *pDevice = tu_device_to_handle(device);
1146 return VK_SUCCESS;
1147
1148 fail_pipeline_cache:
1149 fail_global_bo_map:
1150 tu_bo_finish(device, &device->global_bo);
1151
1152 fail_global_bo:
1153 ralloc_free(device->compiler);
1154
1155 fail_queues:
1156 for (unsigned i = 0; i < TU_MAX_QUEUE_FAMILIES; i++) {
1157 for (unsigned q = 0; q < device->queue_count[i]; q++)
1158 tu_queue_finish(&device->queues[i][q]);
1159 if (device->queue_count[i])
1160 vk_object_free(&device->vk, NULL, device->queues[i]);
1161 }
1162
1163 vk_free(&device->vk.alloc, device);
1164 return result;
1165 }
1166
1167 void
1168 tu_DestroyDevice(VkDevice _device, const VkAllocationCallbacks *pAllocator)
1169 {
1170 TU_FROM_HANDLE(tu_device, device, _device);
1171
1172 if (!device)
1173 return;
1174
1175 for (unsigned i = 0; i < TU_MAX_QUEUE_FAMILIES; i++) {
1176 for (unsigned q = 0; q < device->queue_count[i]; q++)
1177 tu_queue_finish(&device->queues[i][q]);
1178 if (device->queue_count[i])
1179 vk_object_free(&device->vk, NULL, device->queues[i]);
1180 }
1181
1182 for (unsigned i = 0; i < ARRAY_SIZE(device->scratch_bos); i++) {
1183 if (device->scratch_bos[i].initialized)
1184 tu_bo_finish(device, &device->scratch_bos[i].bo);
1185 }
1186
1187 ir3_compiler_destroy(device->compiler);
1188
1189 VkPipelineCache pc = tu_pipeline_cache_to_handle(device->mem_cache);
1190 tu_DestroyPipelineCache(tu_device_to_handle(device), pc, NULL);
1191
1192 vk_free(&device->vk.alloc, device);
1193 }
1194
1195 VkResult
1196 _tu_device_set_lost(struct tu_device *device,
1197 const char *file, int line,
1198 const char *msg, ...)
1199 {
1200 /* Set the flag indicating that waits should return in finite time even
1201 * after device loss.
1202 */
1203 p_atomic_inc(&device->_lost);
1204
1205 /* TODO: Report the log message through VkDebugReportCallbackEXT instead */
1206 fprintf(stderr, "%s:%d: ", file, line);
1207 va_list ap;
1208 va_start(ap, msg);
1209 vfprintf(stderr, msg, ap);
1210 va_end(ap);
1211
1212 if (env_var_as_boolean("TU_ABORT_ON_DEVICE_LOSS", false))
1213 abort();
1214
1215 return VK_ERROR_DEVICE_LOST;
1216 }
1217
1218 VkResult
1219 tu_get_scratch_bo(struct tu_device *dev, uint64_t size, struct tu_bo **bo)
1220 {
1221 unsigned size_log2 = MAX2(util_logbase2_ceil64(size), MIN_SCRATCH_BO_SIZE_LOG2);
1222 unsigned index = size_log2 - MIN_SCRATCH_BO_SIZE_LOG2;
1223 assert(index < ARRAY_SIZE(dev->scratch_bos));
1224
1225 for (unsigned i = index; i < ARRAY_SIZE(dev->scratch_bos); i++) {
1226 if (p_atomic_read(&dev->scratch_bos[i].initialized)) {
1227 /* Fast path: just return the already-allocated BO. */
1228 *bo = &dev->scratch_bos[i].bo;
1229 return VK_SUCCESS;
1230 }
1231 }
1232
1233 /* Slow path: actually allocate the BO. We take a lock because the process
1234 * of allocating it is slow, and we don't want to block the CPU while it
1235 * finishes.
1236 */
1237 mtx_lock(&dev->scratch_bos[index].construct_mtx);
1238
1239 /* Another thread may have allocated it already while we were waiting on
1240 * the lock. We need to check this in order to avoid double-allocating.
1241 */
1242 if (dev->scratch_bos[index].initialized) {
1243 mtx_unlock(&dev->scratch_bos[index].construct_mtx);
1244 *bo = &dev->scratch_bos[index].bo;
1245 return VK_SUCCESS;
1246 }
1247
1248 unsigned bo_size = 1ull << size_log2;
1249 VkResult result = tu_bo_init_new(dev, &dev->scratch_bos[index].bo, bo_size);
1250 if (result != VK_SUCCESS) {
1251 mtx_unlock(&dev->scratch_bos[index].construct_mtx);
1252 return result;
1253 }
1254
1255 p_atomic_set(&dev->scratch_bos[index].initialized, true);
1256
1257 mtx_unlock(&dev->scratch_bos[index].construct_mtx);
1258
1259 *bo = &dev->scratch_bos[index].bo;
1260 return VK_SUCCESS;
1261 }
1262
1263 VkResult
1264 tu_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1265 VkLayerProperties *pProperties)
1266 {
1267 *pPropertyCount = 0;
1268 return VK_SUCCESS;
1269 }
1270
1271 VkResult
1272 tu_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
1273 uint32_t *pPropertyCount,
1274 VkLayerProperties *pProperties)
1275 {
1276 *pPropertyCount = 0;
1277 return VK_SUCCESS;
1278 }
1279
1280 void
1281 tu_GetDeviceQueue2(VkDevice _device,
1282 const VkDeviceQueueInfo2 *pQueueInfo,
1283 VkQueue *pQueue)
1284 {
1285 TU_FROM_HANDLE(tu_device, device, _device);
1286 struct tu_queue *queue;
1287
1288 queue =
1289 &device->queues[pQueueInfo->queueFamilyIndex][pQueueInfo->queueIndex];
1290 if (pQueueInfo->flags != queue->flags) {
1291 /* From the Vulkan 1.1.70 spec:
1292 *
1293 * "The queue returned by vkGetDeviceQueue2 must have the same
1294 * flags value from this structure as that used at device
1295 * creation time in a VkDeviceQueueCreateInfo instance. If no
1296 * matching flags were specified at device creation time then
1297 * pQueue will return VK_NULL_HANDLE."
1298 */
1299 *pQueue = VK_NULL_HANDLE;
1300 return;
1301 }
1302
1303 *pQueue = tu_queue_to_handle(queue);
1304 }
1305
1306 void
1307 tu_GetDeviceQueue(VkDevice _device,
1308 uint32_t queueFamilyIndex,
1309 uint32_t queueIndex,
1310 VkQueue *pQueue)
1311 {
1312 const VkDeviceQueueInfo2 info =
1313 (VkDeviceQueueInfo2) { .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
1314 .queueFamilyIndex = queueFamilyIndex,
1315 .queueIndex = queueIndex };
1316
1317 tu_GetDeviceQueue2(_device, &info, pQueue);
1318 }
1319
1320 VkResult
1321 tu_QueueWaitIdle(VkQueue _queue)
1322 {
1323 TU_FROM_HANDLE(tu_queue, queue, _queue);
1324
1325 if (tu_device_is_lost(queue->device))
1326 return VK_ERROR_DEVICE_LOST;
1327
1328 tu_fence_wait_idle(&queue->submit_fence);
1329
1330 return VK_SUCCESS;
1331 }
1332
1333 VkResult
1334 tu_DeviceWaitIdle(VkDevice _device)
1335 {
1336 TU_FROM_HANDLE(tu_device, device, _device);
1337
1338 if (tu_device_is_lost(device))
1339 return VK_ERROR_DEVICE_LOST;
1340
1341 for (unsigned i = 0; i < TU_MAX_QUEUE_FAMILIES; i++) {
1342 for (unsigned q = 0; q < device->queue_count[i]; q++) {
1343 tu_QueueWaitIdle(tu_queue_to_handle(&device->queues[i][q]));
1344 }
1345 }
1346 return VK_SUCCESS;
1347 }
1348
1349 VkResult
1350 tu_EnumerateInstanceExtensionProperties(const char *pLayerName,
1351 uint32_t *pPropertyCount,
1352 VkExtensionProperties *pProperties)
1353 {
1354 VK_OUTARRAY_MAKE(out, pProperties, pPropertyCount);
1355
1356 /* We spport no lyaers */
1357 if (pLayerName)
1358 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1359
1360 for (int i = 0; i < TU_INSTANCE_EXTENSION_COUNT; i++) {
1361 if (tu_instance_extensions_supported.extensions[i]) {
1362 vk_outarray_append(&out, prop) { *prop = tu_instance_extensions[i]; }
1363 }
1364 }
1365
1366 return vk_outarray_status(&out);
1367 }
1368
1369 VkResult
1370 tu_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
1371 const char *pLayerName,
1372 uint32_t *pPropertyCount,
1373 VkExtensionProperties *pProperties)
1374 {
1375 /* We spport no lyaers */
1376 TU_FROM_HANDLE(tu_physical_device, device, physicalDevice);
1377 VK_OUTARRAY_MAKE(out, pProperties, pPropertyCount);
1378
1379 /* We spport no lyaers */
1380 if (pLayerName)
1381 return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1382
1383 for (int i = 0; i < TU_DEVICE_EXTENSION_COUNT; i++) {
1384 if (device->supported_extensions.extensions[i]) {
1385 vk_outarray_append(&out, prop) { *prop = tu_device_extensions[i]; }
1386 }
1387 }
1388
1389 return vk_outarray_status(&out);
1390 }
1391
1392 PFN_vkVoidFunction
1393 tu_GetInstanceProcAddr(VkInstance _instance, const char *pName)
1394 {
1395 TU_FROM_HANDLE(tu_instance, instance, _instance);
1396
1397 return tu_lookup_entrypoint_checked(
1398 pName, instance ? instance->api_version : 0,
1399 instance ? &instance->enabled_extensions : NULL, NULL);
1400 }
1401
1402 /* The loader wants us to expose a second GetInstanceProcAddr function
1403 * to work around certain LD_PRELOAD issues seen in apps.
1404 */
1405 PUBLIC
1406 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1407 vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName);
1408
1409 PUBLIC
1410 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1411 vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
1412 {
1413 return tu_GetInstanceProcAddr(instance, pName);
1414 }
1415
1416 PFN_vkVoidFunction
1417 tu_GetDeviceProcAddr(VkDevice _device, const char *pName)
1418 {
1419 TU_FROM_HANDLE(tu_device, device, _device);
1420
1421 return tu_lookup_entrypoint_checked(pName, device->instance->api_version,
1422 &device->instance->enabled_extensions,
1423 &device->enabled_extensions);
1424 }
1425
1426 static VkResult
1427 tu_alloc_memory(struct tu_device *device,
1428 const VkMemoryAllocateInfo *pAllocateInfo,
1429 const VkAllocationCallbacks *pAllocator,
1430 VkDeviceMemory *pMem)
1431 {
1432 struct tu_device_memory *mem;
1433 VkResult result;
1434
1435 assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1436
1437 if (pAllocateInfo->allocationSize == 0) {
1438 /* Apparently, this is allowed */
1439 *pMem = VK_NULL_HANDLE;
1440 return VK_SUCCESS;
1441 }
1442
1443 mem = vk_object_alloc(&device->vk, pAllocator, sizeof(*mem),
1444 VK_OBJECT_TYPE_DEVICE_MEMORY);
1445 if (mem == NULL)
1446 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1447
1448 const VkImportMemoryFdInfoKHR *fd_info =
1449 vk_find_struct_const(pAllocateInfo->pNext, IMPORT_MEMORY_FD_INFO_KHR);
1450 if (fd_info && !fd_info->handleType)
1451 fd_info = NULL;
1452
1453 if (fd_info) {
1454 assert(fd_info->handleType ==
1455 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
1456 fd_info->handleType ==
1457 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1458
1459 /*
1460 * TODO Importing the same fd twice gives us the same handle without
1461 * reference counting. We need to maintain a per-instance handle-to-bo
1462 * table and add reference count to tu_bo.
1463 */
1464 result = tu_bo_init_dmabuf(device, &mem->bo,
1465 pAllocateInfo->allocationSize, fd_info->fd);
1466 if (result == VK_SUCCESS) {
1467 /* take ownership and close the fd */
1468 close(fd_info->fd);
1469 }
1470 } else {
1471 result =
1472 tu_bo_init_new(device, &mem->bo, pAllocateInfo->allocationSize);
1473 }
1474
1475 if (result != VK_SUCCESS) {
1476 vk_object_free(&device->vk, pAllocator, mem);
1477 return result;
1478 }
1479
1480 mem->size = pAllocateInfo->allocationSize;
1481 mem->type_index = pAllocateInfo->memoryTypeIndex;
1482
1483 mem->map = NULL;
1484 mem->user_ptr = NULL;
1485
1486 *pMem = tu_device_memory_to_handle(mem);
1487
1488 return VK_SUCCESS;
1489 }
1490
1491 VkResult
1492 tu_AllocateMemory(VkDevice _device,
1493 const VkMemoryAllocateInfo *pAllocateInfo,
1494 const VkAllocationCallbacks *pAllocator,
1495 VkDeviceMemory *pMem)
1496 {
1497 TU_FROM_HANDLE(tu_device, device, _device);
1498 return tu_alloc_memory(device, pAllocateInfo, pAllocator, pMem);
1499 }
1500
1501 void
1502 tu_FreeMemory(VkDevice _device,
1503 VkDeviceMemory _mem,
1504 const VkAllocationCallbacks *pAllocator)
1505 {
1506 TU_FROM_HANDLE(tu_device, device, _device);
1507 TU_FROM_HANDLE(tu_device_memory, mem, _mem);
1508
1509 if (mem == NULL)
1510 return;
1511
1512 tu_bo_finish(device, &mem->bo);
1513 vk_object_free(&device->vk, pAllocator, mem);
1514 }
1515
1516 VkResult
1517 tu_MapMemory(VkDevice _device,
1518 VkDeviceMemory _memory,
1519 VkDeviceSize offset,
1520 VkDeviceSize size,
1521 VkMemoryMapFlags flags,
1522 void **ppData)
1523 {
1524 TU_FROM_HANDLE(tu_device, device, _device);
1525 TU_FROM_HANDLE(tu_device_memory, mem, _memory);
1526 VkResult result;
1527
1528 if (mem == NULL) {
1529 *ppData = NULL;
1530 return VK_SUCCESS;
1531 }
1532
1533 if (mem->user_ptr) {
1534 *ppData = mem->user_ptr;
1535 } else if (!mem->map) {
1536 result = tu_bo_map(device, &mem->bo);
1537 if (result != VK_SUCCESS)
1538 return result;
1539 *ppData = mem->map = mem->bo.map;
1540 } else
1541 *ppData = mem->map;
1542
1543 if (*ppData) {
1544 *ppData += offset;
1545 return VK_SUCCESS;
1546 }
1547
1548 return vk_error(device->instance, VK_ERROR_MEMORY_MAP_FAILED);
1549 }
1550
1551 void
1552 tu_UnmapMemory(VkDevice _device, VkDeviceMemory _memory)
1553 {
1554 /* I do not see any unmapping done by the freedreno Gallium driver. */
1555 }
1556
1557 VkResult
1558 tu_FlushMappedMemoryRanges(VkDevice _device,
1559 uint32_t memoryRangeCount,
1560 const VkMappedMemoryRange *pMemoryRanges)
1561 {
1562 return VK_SUCCESS;
1563 }
1564
1565 VkResult
1566 tu_InvalidateMappedMemoryRanges(VkDevice _device,
1567 uint32_t memoryRangeCount,
1568 const VkMappedMemoryRange *pMemoryRanges)
1569 {
1570 return VK_SUCCESS;
1571 }
1572
1573 void
1574 tu_GetBufferMemoryRequirements(VkDevice _device,
1575 VkBuffer _buffer,
1576 VkMemoryRequirements *pMemoryRequirements)
1577 {
1578 TU_FROM_HANDLE(tu_buffer, buffer, _buffer);
1579
1580 pMemoryRequirements->memoryTypeBits = 1;
1581 pMemoryRequirements->alignment = 64;
1582 pMemoryRequirements->size =
1583 align64(buffer->size, pMemoryRequirements->alignment);
1584 }
1585
1586 void
1587 tu_GetBufferMemoryRequirements2(
1588 VkDevice device,
1589 const VkBufferMemoryRequirementsInfo2 *pInfo,
1590 VkMemoryRequirements2 *pMemoryRequirements)
1591 {
1592 tu_GetBufferMemoryRequirements(device, pInfo->buffer,
1593 &pMemoryRequirements->memoryRequirements);
1594 }
1595
1596 void
1597 tu_GetImageMemoryRequirements(VkDevice _device,
1598 VkImage _image,
1599 VkMemoryRequirements *pMemoryRequirements)
1600 {
1601 TU_FROM_HANDLE(tu_image, image, _image);
1602
1603 pMemoryRequirements->memoryTypeBits = 1;
1604 pMemoryRequirements->size = image->total_size;
1605 pMemoryRequirements->alignment = image->layout[0].base_align;
1606 }
1607
1608 void
1609 tu_GetImageMemoryRequirements2(VkDevice device,
1610 const VkImageMemoryRequirementsInfo2 *pInfo,
1611 VkMemoryRequirements2 *pMemoryRequirements)
1612 {
1613 tu_GetImageMemoryRequirements(device, pInfo->image,
1614 &pMemoryRequirements->memoryRequirements);
1615 }
1616
1617 void
1618 tu_GetImageSparseMemoryRequirements(
1619 VkDevice device,
1620 VkImage image,
1621 uint32_t *pSparseMemoryRequirementCount,
1622 VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1623 {
1624 tu_stub();
1625 }
1626
1627 void
1628 tu_GetImageSparseMemoryRequirements2(
1629 VkDevice device,
1630 const VkImageSparseMemoryRequirementsInfo2 *pInfo,
1631 uint32_t *pSparseMemoryRequirementCount,
1632 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
1633 {
1634 tu_stub();
1635 }
1636
1637 void
1638 tu_GetDeviceMemoryCommitment(VkDevice device,
1639 VkDeviceMemory memory,
1640 VkDeviceSize *pCommittedMemoryInBytes)
1641 {
1642 *pCommittedMemoryInBytes = 0;
1643 }
1644
1645 VkResult
1646 tu_BindBufferMemory2(VkDevice device,
1647 uint32_t bindInfoCount,
1648 const VkBindBufferMemoryInfo *pBindInfos)
1649 {
1650 for (uint32_t i = 0; i < bindInfoCount; ++i) {
1651 TU_FROM_HANDLE(tu_device_memory, mem, pBindInfos[i].memory);
1652 TU_FROM_HANDLE(tu_buffer, buffer, pBindInfos[i].buffer);
1653
1654 if (mem) {
1655 buffer->bo = &mem->bo;
1656 buffer->bo_offset = pBindInfos[i].memoryOffset;
1657 } else {
1658 buffer->bo = NULL;
1659 }
1660 }
1661 return VK_SUCCESS;
1662 }
1663
1664 VkResult
1665 tu_BindBufferMemory(VkDevice device,
1666 VkBuffer buffer,
1667 VkDeviceMemory memory,
1668 VkDeviceSize memoryOffset)
1669 {
1670 const VkBindBufferMemoryInfo info = {
1671 .sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
1672 .buffer = buffer,
1673 .memory = memory,
1674 .memoryOffset = memoryOffset
1675 };
1676
1677 return tu_BindBufferMemory2(device, 1, &info);
1678 }
1679
1680 VkResult
1681 tu_BindImageMemory2(VkDevice device,
1682 uint32_t bindInfoCount,
1683 const VkBindImageMemoryInfo *pBindInfos)
1684 {
1685 for (uint32_t i = 0; i < bindInfoCount; ++i) {
1686 TU_FROM_HANDLE(tu_image, image, pBindInfos[i].image);
1687 TU_FROM_HANDLE(tu_device_memory, mem, pBindInfos[i].memory);
1688
1689 if (mem) {
1690 image->bo = &mem->bo;
1691 image->bo_offset = pBindInfos[i].memoryOffset;
1692 } else {
1693 image->bo = NULL;
1694 image->bo_offset = 0;
1695 }
1696 }
1697
1698 return VK_SUCCESS;
1699 }
1700
1701 VkResult
1702 tu_BindImageMemory(VkDevice device,
1703 VkImage image,
1704 VkDeviceMemory memory,
1705 VkDeviceSize memoryOffset)
1706 {
1707 const VkBindImageMemoryInfo info = {
1708 .sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
1709 .image = image,
1710 .memory = memory,
1711 .memoryOffset = memoryOffset
1712 };
1713
1714 return tu_BindImageMemory2(device, 1, &info);
1715 }
1716
1717 VkResult
1718 tu_QueueBindSparse(VkQueue _queue,
1719 uint32_t bindInfoCount,
1720 const VkBindSparseInfo *pBindInfo,
1721 VkFence _fence)
1722 {
1723 return VK_SUCCESS;
1724 }
1725
1726
1727 VkResult
1728 tu_CreateEvent(VkDevice _device,
1729 const VkEventCreateInfo *pCreateInfo,
1730 const VkAllocationCallbacks *pAllocator,
1731 VkEvent *pEvent)
1732 {
1733 TU_FROM_HANDLE(tu_device, device, _device);
1734
1735 struct tu_event *event =
1736 vk_object_alloc(&device->vk, pAllocator, sizeof(*event),
1737 VK_OBJECT_TYPE_EVENT);
1738 if (!event)
1739 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1740
1741 VkResult result = tu_bo_init_new(device, &event->bo, 0x1000);
1742 if (result != VK_SUCCESS)
1743 goto fail_alloc;
1744
1745 result = tu_bo_map(device, &event->bo);
1746 if (result != VK_SUCCESS)
1747 goto fail_map;
1748
1749 *pEvent = tu_event_to_handle(event);
1750
1751 return VK_SUCCESS;
1752
1753 fail_map:
1754 tu_bo_finish(device, &event->bo);
1755 fail_alloc:
1756 vk_object_free(&device->vk, pAllocator, event);
1757 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1758 }
1759
1760 void
1761 tu_DestroyEvent(VkDevice _device,
1762 VkEvent _event,
1763 const VkAllocationCallbacks *pAllocator)
1764 {
1765 TU_FROM_HANDLE(tu_device, device, _device);
1766 TU_FROM_HANDLE(tu_event, event, _event);
1767
1768 if (!event)
1769 return;
1770
1771 tu_bo_finish(device, &event->bo);
1772 vk_object_free(&device->vk, pAllocator, event);
1773 }
1774
1775 VkResult
1776 tu_GetEventStatus(VkDevice _device, VkEvent _event)
1777 {
1778 TU_FROM_HANDLE(tu_event, event, _event);
1779
1780 if (*(uint64_t*) event->bo.map == 1)
1781 return VK_EVENT_SET;
1782 return VK_EVENT_RESET;
1783 }
1784
1785 VkResult
1786 tu_SetEvent(VkDevice _device, VkEvent _event)
1787 {
1788 TU_FROM_HANDLE(tu_event, event, _event);
1789 *(uint64_t*) event->bo.map = 1;
1790
1791 return VK_SUCCESS;
1792 }
1793
1794 VkResult
1795 tu_ResetEvent(VkDevice _device, VkEvent _event)
1796 {
1797 TU_FROM_HANDLE(tu_event, event, _event);
1798 *(uint64_t*) event->bo.map = 0;
1799
1800 return VK_SUCCESS;
1801 }
1802
1803 VkResult
1804 tu_CreateBuffer(VkDevice _device,
1805 const VkBufferCreateInfo *pCreateInfo,
1806 const VkAllocationCallbacks *pAllocator,
1807 VkBuffer *pBuffer)
1808 {
1809 TU_FROM_HANDLE(tu_device, device, _device);
1810 struct tu_buffer *buffer;
1811
1812 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1813
1814 buffer = vk_object_alloc(&device->vk, pAllocator, sizeof(*buffer),
1815 VK_OBJECT_TYPE_BUFFER);
1816 if (buffer == NULL)
1817 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1818
1819 buffer->size = pCreateInfo->size;
1820 buffer->usage = pCreateInfo->usage;
1821 buffer->flags = pCreateInfo->flags;
1822
1823 *pBuffer = tu_buffer_to_handle(buffer);
1824
1825 return VK_SUCCESS;
1826 }
1827
1828 void
1829 tu_DestroyBuffer(VkDevice _device,
1830 VkBuffer _buffer,
1831 const VkAllocationCallbacks *pAllocator)
1832 {
1833 TU_FROM_HANDLE(tu_device, device, _device);
1834 TU_FROM_HANDLE(tu_buffer, buffer, _buffer);
1835
1836 if (!buffer)
1837 return;
1838
1839 vk_object_free(&device->vk, pAllocator, buffer);
1840 }
1841
1842 VkResult
1843 tu_CreateFramebuffer(VkDevice _device,
1844 const VkFramebufferCreateInfo *pCreateInfo,
1845 const VkAllocationCallbacks *pAllocator,
1846 VkFramebuffer *pFramebuffer)
1847 {
1848 TU_FROM_HANDLE(tu_device, device, _device);
1849 TU_FROM_HANDLE(tu_render_pass, pass, pCreateInfo->renderPass);
1850 struct tu_framebuffer *framebuffer;
1851
1852 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
1853
1854 size_t size = sizeof(*framebuffer) + sizeof(struct tu_attachment_info) *
1855 pCreateInfo->attachmentCount;
1856 framebuffer = vk_object_alloc(&device->vk, pAllocator, size,
1857 VK_OBJECT_TYPE_FRAMEBUFFER);
1858 if (framebuffer == NULL)
1859 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1860
1861 framebuffer->attachment_count = pCreateInfo->attachmentCount;
1862 framebuffer->width = pCreateInfo->width;
1863 framebuffer->height = pCreateInfo->height;
1864 framebuffer->layers = pCreateInfo->layers;
1865 for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
1866 VkImageView _iview = pCreateInfo->pAttachments[i];
1867 struct tu_image_view *iview = tu_image_view_from_handle(_iview);
1868 framebuffer->attachments[i].attachment = iview;
1869 }
1870
1871 tu_framebuffer_tiling_config(framebuffer, device, pass);
1872
1873 *pFramebuffer = tu_framebuffer_to_handle(framebuffer);
1874 return VK_SUCCESS;
1875 }
1876
1877 void
1878 tu_DestroyFramebuffer(VkDevice _device,
1879 VkFramebuffer _fb,
1880 const VkAllocationCallbacks *pAllocator)
1881 {
1882 TU_FROM_HANDLE(tu_device, device, _device);
1883 TU_FROM_HANDLE(tu_framebuffer, fb, _fb);
1884
1885 if (!fb)
1886 return;
1887
1888 vk_object_free(&device->vk, pAllocator, fb);
1889 }
1890
1891 static void
1892 tu_init_sampler(struct tu_device *device,
1893 struct tu_sampler *sampler,
1894 const VkSamplerCreateInfo *pCreateInfo)
1895 {
1896 const struct VkSamplerReductionModeCreateInfo *reduction =
1897 vk_find_struct_const(pCreateInfo->pNext, SAMPLER_REDUCTION_MODE_CREATE_INFO);
1898 const struct VkSamplerYcbcrConversionInfo *ycbcr_conversion =
1899 vk_find_struct_const(pCreateInfo->pNext, SAMPLER_YCBCR_CONVERSION_INFO);
1900 const VkSamplerCustomBorderColorCreateInfoEXT *custom_border_color =
1901 vk_find_struct_const(pCreateInfo->pNext, SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
1902 /* for non-custom border colors, the VK enum is translated directly to an offset in
1903 * the border color buffer. custom border colors are located immediately after the
1904 * builtin colors, and thus an offset of TU_BORDER_COLOR_BUILTIN is added.
1905 */
1906 uint32_t border_color = (unsigned) pCreateInfo->borderColor;
1907 if (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT ||
1908 pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT) {
1909 mtx_lock(&device->mutex);
1910 border_color = BITSET_FFS(device->custom_border_color);
1911 BITSET_CLEAR(device->custom_border_color, border_color);
1912 mtx_unlock(&device->mutex);
1913 tu6_pack_border_color(device->global_bo.map + gb_offset(bcolor[border_color]),
1914 &custom_border_color->customBorderColor,
1915 pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT);
1916 border_color += TU_BORDER_COLOR_BUILTIN;
1917 }
1918
1919 unsigned aniso = pCreateInfo->anisotropyEnable ?
1920 util_last_bit(MIN2((uint32_t)pCreateInfo->maxAnisotropy >> 1, 8)) : 0;
1921 bool miplinear = (pCreateInfo->mipmapMode == VK_SAMPLER_MIPMAP_MODE_LINEAR);
1922 float min_lod = CLAMP(pCreateInfo->minLod, 0.0f, 4095.0f / 256.0f);
1923 float max_lod = CLAMP(pCreateInfo->maxLod, 0.0f, 4095.0f / 256.0f);
1924
1925 sampler->descriptor[0] =
1926 COND(miplinear, A6XX_TEX_SAMP_0_MIPFILTER_LINEAR_NEAR) |
1927 A6XX_TEX_SAMP_0_XY_MAG(tu6_tex_filter(pCreateInfo->magFilter, aniso)) |
1928 A6XX_TEX_SAMP_0_XY_MIN(tu6_tex_filter(pCreateInfo->minFilter, aniso)) |
1929 A6XX_TEX_SAMP_0_ANISO(aniso) |
1930 A6XX_TEX_SAMP_0_WRAP_S(tu6_tex_wrap(pCreateInfo->addressModeU)) |
1931 A6XX_TEX_SAMP_0_WRAP_T(tu6_tex_wrap(pCreateInfo->addressModeV)) |
1932 A6XX_TEX_SAMP_0_WRAP_R(tu6_tex_wrap(pCreateInfo->addressModeW)) |
1933 A6XX_TEX_SAMP_0_LOD_BIAS(pCreateInfo->mipLodBias);
1934 sampler->descriptor[1] =
1935 /* COND(!cso->seamless_cube_map, A6XX_TEX_SAMP_1_CUBEMAPSEAMLESSFILTOFF) | */
1936 COND(pCreateInfo->unnormalizedCoordinates, A6XX_TEX_SAMP_1_UNNORM_COORDS) |
1937 A6XX_TEX_SAMP_1_MIN_LOD(min_lod) |
1938 A6XX_TEX_SAMP_1_MAX_LOD(max_lod) |
1939 COND(pCreateInfo->compareEnable,
1940 A6XX_TEX_SAMP_1_COMPARE_FUNC(tu6_compare_func(pCreateInfo->compareOp)));
1941 sampler->descriptor[2] = A6XX_TEX_SAMP_2_BCOLOR(border_color);
1942 sampler->descriptor[3] = 0;
1943
1944 if (reduction) {
1945 sampler->descriptor[2] |= A6XX_TEX_SAMP_2_REDUCTION_MODE(
1946 tu6_reduction_mode(reduction->reductionMode));
1947 }
1948
1949 sampler->ycbcr_sampler = ycbcr_conversion ?
1950 tu_sampler_ycbcr_conversion_from_handle(ycbcr_conversion->conversion) : NULL;
1951
1952 if (sampler->ycbcr_sampler &&
1953 sampler->ycbcr_sampler->chroma_filter == VK_FILTER_LINEAR) {
1954 sampler->descriptor[2] |= A6XX_TEX_SAMP_2_CHROMA_LINEAR;
1955 }
1956
1957 /* TODO:
1958 * A6XX_TEX_SAMP_1_MIPFILTER_LINEAR_FAR disables mipmapping, but vk has no NONE mipfilter?
1959 */
1960 }
1961
1962 VkResult
1963 tu_CreateSampler(VkDevice _device,
1964 const VkSamplerCreateInfo *pCreateInfo,
1965 const VkAllocationCallbacks *pAllocator,
1966 VkSampler *pSampler)
1967 {
1968 TU_FROM_HANDLE(tu_device, device, _device);
1969 struct tu_sampler *sampler;
1970
1971 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
1972
1973 sampler = vk_object_alloc(&device->vk, pAllocator, sizeof(*sampler),
1974 VK_OBJECT_TYPE_SAMPLER);
1975 if (!sampler)
1976 return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1977
1978 tu_init_sampler(device, sampler, pCreateInfo);
1979 *pSampler = tu_sampler_to_handle(sampler);
1980
1981 return VK_SUCCESS;
1982 }
1983
1984 void
1985 tu_DestroySampler(VkDevice _device,
1986 VkSampler _sampler,
1987 const VkAllocationCallbacks *pAllocator)
1988 {
1989 TU_FROM_HANDLE(tu_device, device, _device);
1990 TU_FROM_HANDLE(tu_sampler, sampler, _sampler);
1991 uint32_t border_color;
1992
1993 if (!sampler)
1994 return;
1995
1996 border_color = (sampler->descriptor[2] & A6XX_TEX_SAMP_2_BCOLOR__MASK) >> A6XX_TEX_SAMP_2_BCOLOR__SHIFT;
1997 if (border_color >= TU_BORDER_COLOR_BUILTIN) {
1998 border_color -= TU_BORDER_COLOR_BUILTIN;
1999 /* if the sampler had a custom border color, free it. TODO: no lock */
2000 mtx_lock(&device->mutex);
2001 assert(!BITSET_TEST(device->custom_border_color, border_color));
2002 BITSET_SET(device->custom_border_color, border_color);
2003 mtx_unlock(&device->mutex);
2004 }
2005
2006 vk_object_free(&device->vk, pAllocator, sampler);
2007 }
2008
2009 /* vk_icd.h does not declare this function, so we declare it here to
2010 * suppress Wmissing-prototypes.
2011 */
2012 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2013 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion);
2014
2015 PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2016 vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
2017 {
2018 /* For the full details on loader interface versioning, see
2019 * <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2020 * What follows is a condensed summary, to help you navigate the large and
2021 * confusing official doc.
2022 *
2023 * - Loader interface v0 is incompatible with later versions. We don't
2024 * support it.
2025 *
2026 * - In loader interface v1:
2027 * - The first ICD entrypoint called by the loader is
2028 * vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2029 * entrypoint.
2030 * - The ICD must statically expose no other Vulkan symbol unless it
2031 * is linked with -Bsymbolic.
2032 * - Each dispatchable Vulkan handle created by the ICD must be
2033 * a pointer to a struct whose first member is VK_LOADER_DATA. The
2034 * ICD must initialize VK_LOADER_DATA.loadMagic to
2035 * ICD_LOADER_MAGIC.
2036 * - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2037 * vkDestroySurfaceKHR(). The ICD must be capable of working with
2038 * such loader-managed surfaces.
2039 *
2040 * - Loader interface v2 differs from v1 in:
2041 * - The first ICD entrypoint called by the loader is
2042 * vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2043 * statically expose this entrypoint.
2044 *
2045 * - Loader interface v3 differs from v2 in:
2046 * - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2047 * vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2048 * because the loader no longer does so.
2049 */
2050 *pSupportedVersion = MIN2(*pSupportedVersion, 3u);
2051 return VK_SUCCESS;
2052 }
2053
2054 VkResult
2055 tu_GetMemoryFdKHR(VkDevice _device,
2056 const VkMemoryGetFdInfoKHR *pGetFdInfo,
2057 int *pFd)
2058 {
2059 TU_FROM_HANDLE(tu_device, device, _device);
2060 TU_FROM_HANDLE(tu_device_memory, memory, pGetFdInfo->memory);
2061
2062 assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2063
2064 /* At the moment, we support only the below handle types. */
2065 assert(pGetFdInfo->handleType ==
2066 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2067 pGetFdInfo->handleType ==
2068 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2069
2070 int prime_fd = tu_bo_export_dmabuf(device, &memory->bo);
2071 if (prime_fd < 0)
2072 return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY);
2073
2074 *pFd = prime_fd;
2075 return VK_SUCCESS;
2076 }
2077
2078 VkResult
2079 tu_GetMemoryFdPropertiesKHR(VkDevice _device,
2080 VkExternalMemoryHandleTypeFlagBits handleType,
2081 int fd,
2082 VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2083 {
2084 assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2085 pMemoryFdProperties->memoryTypeBits = 1;
2086 return VK_SUCCESS;
2087 }
2088
2089 VkResult
2090 tu_ImportFenceFdKHR(VkDevice _device,
2091 const VkImportFenceFdInfoKHR *pImportFenceFdInfo)
2092 {
2093 tu_stub();
2094
2095 return VK_SUCCESS;
2096 }
2097
2098 VkResult
2099 tu_GetFenceFdKHR(VkDevice _device,
2100 const VkFenceGetFdInfoKHR *pGetFdInfo,
2101 int *pFd)
2102 {
2103 tu_stub();
2104
2105 return VK_SUCCESS;
2106 }
2107
2108 void
2109 tu_GetPhysicalDeviceExternalFenceProperties(
2110 VkPhysicalDevice physicalDevice,
2111 const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
2112 VkExternalFenceProperties *pExternalFenceProperties)
2113 {
2114 pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2115 pExternalFenceProperties->compatibleHandleTypes = 0;
2116 pExternalFenceProperties->externalFenceFeatures = 0;
2117 }
2118
2119 VkResult
2120 tu_CreateDebugReportCallbackEXT(
2121 VkInstance _instance,
2122 const VkDebugReportCallbackCreateInfoEXT *pCreateInfo,
2123 const VkAllocationCallbacks *pAllocator,
2124 VkDebugReportCallbackEXT *pCallback)
2125 {
2126 TU_FROM_HANDLE(tu_instance, instance, _instance);
2127 return vk_create_debug_report_callback(&instance->debug_report_callbacks,
2128 pCreateInfo, pAllocator,
2129 &instance->alloc, pCallback);
2130 }
2131
2132 void
2133 tu_DestroyDebugReportCallbackEXT(VkInstance _instance,
2134 VkDebugReportCallbackEXT _callback,
2135 const VkAllocationCallbacks *pAllocator)
2136 {
2137 TU_FROM_HANDLE(tu_instance, instance, _instance);
2138 vk_destroy_debug_report_callback(&instance->debug_report_callbacks,
2139 _callback, pAllocator, &instance->alloc);
2140 }
2141
2142 void
2143 tu_DebugReportMessageEXT(VkInstance _instance,
2144 VkDebugReportFlagsEXT flags,
2145 VkDebugReportObjectTypeEXT objectType,
2146 uint64_t object,
2147 size_t location,
2148 int32_t messageCode,
2149 const char *pLayerPrefix,
2150 const char *pMessage)
2151 {
2152 TU_FROM_HANDLE(tu_instance, instance, _instance);
2153 vk_debug_report(&instance->debug_report_callbacks, flags, objectType,
2154 object, location, messageCode, pLayerPrefix, pMessage);
2155 }
2156
2157 void
2158 tu_GetDeviceGroupPeerMemoryFeatures(
2159 VkDevice device,
2160 uint32_t heapIndex,
2161 uint32_t localDeviceIndex,
2162 uint32_t remoteDeviceIndex,
2163 VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2164 {
2165 assert(localDeviceIndex == remoteDeviceIndex);
2166
2167 *pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
2168 VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
2169 VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
2170 VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
2171 }
2172
2173 void tu_GetPhysicalDeviceMultisamplePropertiesEXT(
2174 VkPhysicalDevice physicalDevice,
2175 VkSampleCountFlagBits samples,
2176 VkMultisamplePropertiesEXT* pMultisampleProperties)
2177 {
2178 TU_FROM_HANDLE(tu_physical_device, pdevice, physicalDevice);
2179
2180 if (samples <= VK_SAMPLE_COUNT_4_BIT && pdevice->supported_extensions.EXT_sample_locations)
2181 pMultisampleProperties->maxSampleLocationGridSize = (VkExtent2D){ 1, 1 };
2182 else
2183 pMultisampleProperties->maxSampleLocationGridSize = (VkExtent2D){ 0, 0 };
2184 }
2185
2186
2187 VkResult
2188 tu_CreatePrivateDataSlotEXT(VkDevice _device,
2189 const VkPrivateDataSlotCreateInfoEXT* pCreateInfo,
2190 const VkAllocationCallbacks* pAllocator,
2191 VkPrivateDataSlotEXT* pPrivateDataSlot)
2192 {
2193 TU_FROM_HANDLE(tu_device, device, _device);
2194 return vk_private_data_slot_create(&device->vk,
2195 pCreateInfo,
2196 pAllocator,
2197 pPrivateDataSlot);
2198 }
2199
2200 void
2201 tu_DestroyPrivateDataSlotEXT(VkDevice _device,
2202 VkPrivateDataSlotEXT privateDataSlot,
2203 const VkAllocationCallbacks* pAllocator)
2204 {
2205 TU_FROM_HANDLE(tu_device, device, _device);
2206 vk_private_data_slot_destroy(&device->vk, privateDataSlot, pAllocator);
2207 }
2208
2209 VkResult
2210 tu_SetPrivateDataEXT(VkDevice _device,
2211 VkObjectType objectType,
2212 uint64_t objectHandle,
2213 VkPrivateDataSlotEXT privateDataSlot,
2214 uint64_t data)
2215 {
2216 TU_FROM_HANDLE(tu_device, device, _device);
2217 return vk_object_base_set_private_data(&device->vk,
2218 objectType,
2219 objectHandle,
2220 privateDataSlot,
2221 data);
2222 }
2223
2224 void
2225 tu_GetPrivateDataEXT(VkDevice _device,
2226 VkObjectType objectType,
2227 uint64_t objectHandle,
2228 VkPrivateDataSlotEXT privateDataSlot,
2229 uint64_t* pData)
2230 {
2231 TU_FROM_HANDLE(tu_device, device, _device);
2232 vk_object_base_get_private_data(&device->vk,
2233 objectType,
2234 objectHandle,
2235 privateDataSlot,
2236 pData);
2237 }