freedreno/ir3: track # of driver params
[mesa.git] / src / freedreno / vulkan / tu_android.c
1 /*
2 * Copyright © 2017, Google Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "tu_private.h"
25
26 #include <hardware/gralloc.h>
27 #include <hardware/hardware.h>
28 #include <hardware/hwvulkan.h>
29 #include <libsync.h>
30
31 #include <vulkan/vk_android_native_buffer.h>
32 #include <vulkan/vk_icd.h>
33
34 static int
35 tu_hal_open(const struct hw_module_t *mod,
36 const char *id,
37 struct hw_device_t **dev);
38 static int
39 tu_hal_close(struct hw_device_t *dev);
40
41 static void UNUSED
42 static_asserts(void)
43 {
44 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
45 }
46
47 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
48 .common =
49 {
50 .tag = HARDWARE_MODULE_TAG,
51 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
52 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
53 .id = HWVULKAN_HARDWARE_MODULE_ID,
54 .name = "AMD Vulkan HAL",
55 .author = "Google",
56 .methods =
57 &(hw_module_methods_t){
58 .open = tu_hal_open,
59 },
60 },
61 };
62
63 /* If any bits in test_mask are set, then unset them and return true. */
64 static inline bool
65 unmask32(uint32_t *inout_mask, uint32_t test_mask)
66 {
67 uint32_t orig_mask = *inout_mask;
68 *inout_mask &= ~test_mask;
69 return *inout_mask != orig_mask;
70 }
71
72 static int
73 tu_hal_open(const struct hw_module_t *mod,
74 const char *id,
75 struct hw_device_t **dev)
76 {
77 assert(mod == &HAL_MODULE_INFO_SYM.common);
78 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
79
80 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
81 if (!hal_dev)
82 return -1;
83
84 *hal_dev = (hwvulkan_device_t){
85 .common =
86 {
87 .tag = HARDWARE_DEVICE_TAG,
88 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
89 .module = &HAL_MODULE_INFO_SYM.common,
90 .close = tu_hal_close,
91 },
92 .EnumerateInstanceExtensionProperties =
93 tu_EnumerateInstanceExtensionProperties,
94 .CreateInstance = tu_CreateInstance,
95 .GetInstanceProcAddr = tu_GetInstanceProcAddr,
96 };
97
98 *dev = &hal_dev->common;
99 return 0;
100 }
101
102 static int
103 tu_hal_close(struct hw_device_t *dev)
104 {
105 /* hwvulkan.h claims that hw_device_t::close() is never called. */
106 return -1;
107 }
108
109 VkResult
110 tu_image_from_gralloc(VkDevice device_h,
111 const VkImageCreateInfo *base_info,
112 const VkNativeBufferANDROID *gralloc_info,
113 const VkAllocationCallbacks *alloc,
114 VkImage *out_image_h)
115
116 {
117 TU_FROM_HANDLE(tu_device, device, device_h);
118 VkImage image_h = VK_NULL_HANDLE;
119 struct tu_image *image = NULL;
120 struct tu_bo *bo = NULL;
121 VkResult result;
122
123 result = tu_image_create(
124 device_h,
125 &(struct tu_image_create_info) {
126 .vk_info = base_info, .scanout = true, .no_metadata_planes = true },
127 alloc, &image_h);
128
129 if (result != VK_SUCCESS)
130 return result;
131
132 if (gralloc_info->handle->numFds != 1) {
133 return vk_errorf(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE,
134 "VkNativeBufferANDROID::handle::numFds is %d, "
135 "expected 1",
136 gralloc_info->handle->numFds);
137 }
138
139 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
140 * must exceed that of the gralloc handle, and we do not own the gralloc
141 * handle.
142 */
143 int dma_buf = gralloc_info->handle->data[0];
144
145 image = tu_image_from_handle(image_h);
146
147 VkDeviceMemory memory_h;
148
149 const VkMemoryDedicatedAllocateInfo ded_alloc = {
150 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
151 .pNext = NULL,
152 .buffer = VK_NULL_HANDLE,
153 .image = image_h
154 };
155
156 const VkImportMemoryFdInfo import_info = {
157 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO,
158 .pNext = &ded_alloc,
159 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
160 .fd = dup(dma_buf),
161 };
162 /* Find the first VRAM memory type, or GART for PRIME images. */
163 int memory_type_index = -1;
164 for (int i = 0;
165 i < device->physical_device->memory_properties.memoryTypeCount; ++i) {
166 bool is_local =
167 !!(device->physical_device->memory_properties.memoryTypes[i]
168 .propertyFlags &
169 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
170 if (is_local) {
171 memory_type_index = i;
172 break;
173 }
174 }
175
176 /* fallback */
177 if (memory_type_index == -1)
178 memory_type_index = 0;
179
180 result =
181 tu_AllocateMemory(device_h,
182 &(VkMemoryAllocateInfo) {
183 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
184 .pNext = &import_info,
185 .allocationSize = image->size,
186 .memoryTypeIndex = memory_type_index,
187 },
188 alloc, &memory_h);
189 if (result != VK_SUCCESS)
190 goto fail_create_image;
191
192 tu_BindImageMemory(device_h, image_h, memory_h, 0);
193
194 image->owned_memory = memory_h;
195 /* Don't clobber the out-parameter until success is certain. */
196 *out_image_h = image_h;
197
198 return VK_SUCCESS;
199
200 fail_create_image:
201 fail_size:
202 tu_DestroyImage(device_h, image_h, alloc);
203
204 return result;
205 }
206
207 VkResult
208 tu_GetSwapchainGrallocUsageANDROID(VkDevice device_h,
209 VkFormat format,
210 VkImageUsageFlags imageUsage,
211 int *grallocUsage)
212 {
213 TU_FROM_HANDLE(tu_device, device, device_h);
214 struct tu_physical_device *phys_dev = device->physical_device;
215 VkPhysicalDevice phys_dev_h = tu_physical_device_to_handle(phys_dev);
216 VkResult result;
217
218 *grallocUsage = 0;
219
220 /* WARNING: Android Nougat's libvulkan.so hardcodes the VkImageUsageFlags
221 * returned to applications via
222 * VkSurfaceCapabilitiesKHR::supportedUsageFlags.
223 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
224 *
225 * TODO(jessehall): I think these are right, but haven't thought hard
226 * about it. Do we need to query the driver for support of any of
227 * these?
228 *
229 * Any disagreement between this function and the hardcoded
230 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
231 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
232 */
233
234 const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
235 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
236 .format = format,
237 .type = VK_IMAGE_TYPE_2D,
238 .tiling = VK_IMAGE_TILING_OPTIMAL,
239 .usage = imageUsage,
240 };
241
242 VkImageFormatProperties2 image_format_props = {
243 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
244 };
245
246 /* Check that requested format and usage are supported. */
247 result = tu_GetPhysicalDeviceImageFormatProperties2(
248 phys_dev_h, &image_format_info, &image_format_props);
249 if (result != VK_SUCCESS) {
250 return vk_errorf(device->instance, result,
251 "tu_GetPhysicalDeviceImageFormatProperties2 failed "
252 "inside %s",
253 __func__);
254 }
255
256 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
257 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
258 *grallocUsage |= GRALLOC_USAGE_HW_RENDER;
259
260 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
261 VK_IMAGE_USAGE_SAMPLED_BIT |
262 VK_IMAGE_USAGE_STORAGE_BIT |
263 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
264 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
265
266 /* All VkImageUsageFlags not explicitly checked here are unsupported for
267 * gralloc swapchains.
268 */
269 if (imageUsage != 0) {
270 return vk_errorf(device->instance, VK_ERROR_FORMAT_NOT_SUPPORTED,
271 "unsupported VkImageUsageFlags(0x%x) for gralloc "
272 "swapchain",
273 imageUsage);
274 }
275
276 /*
277 * FINISHME: Advertise all display-supported formats. Mostly
278 * DRM_FORMAT_ARGB2101010 and DRM_FORMAT_ABGR2101010, but need to check
279 * what we need for 30-bit colors.
280 */
281 if (format == VK_FORMAT_B8G8R8A8_UNORM ||
282 format == VK_FORMAT_B5G6R5_UNORM_PACK16) {
283 *grallocUsage |= GRALLOC_USAGE_HW_FB | GRALLOC_USAGE_HW_COMPOSER |
284 GRALLOC_USAGE_EXTERNAL_DISP;
285 }
286
287 if (*grallocUsage == 0)
288 return VK_ERROR_FORMAT_NOT_SUPPORTED;
289
290 return VK_SUCCESS;
291 }
292
293 VkResult
294 tu_AcquireImageANDROID(VkDevice device,
295 VkImage image_h,
296 int nativeFenceFd,
297 VkSemaphore semaphore,
298 VkFence fence)
299 {
300 VkResult semaphore_result = VK_SUCCESS, fence_result = VK_SUCCESS;
301
302 if (semaphore != VK_NULL_HANDLE) {
303 int semaphore_fd =
304 nativeFenceFd >= 0 ? dup(nativeFenceFd) : nativeFenceFd;
305 semaphore_result = tu_ImportSemaphoreFdKHR(
306 device, &(VkImportSemaphoreFdInfoKHR) {
307 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
308 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
309 .fd = semaphore_fd,
310 .semaphore = semaphore,
311 });
312 }
313
314 if (fence != VK_NULL_HANDLE) {
315 int fence_fd = nativeFenceFd >= 0 ? dup(nativeFenceFd) : nativeFenceFd;
316 fence_result = tu_ImportFenceFdKHR(
317 device, &(VkImportFenceFdInfoKHR) {
318 .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
319 .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
320 .fd = fence_fd,
321 .fence = fence,
322 });
323 }
324
325 close(nativeFenceFd);
326
327 if (semaphore_result != VK_SUCCESS)
328 return semaphore_result;
329 return fence_result;
330 }
331
332 VkResult
333 tu_QueueSignalReleaseImageANDROID(VkQueue _queue,
334 uint32_t waitSemaphoreCount,
335 const VkSemaphore *pWaitSemaphores,
336 VkImage image,
337 int *pNativeFenceFd)
338 {
339 TU_FROM_HANDLE(tu_queue, queue, _queue);
340 VkResult result = VK_SUCCESS;
341
342 if (waitSemaphoreCount == 0) {
343 if (pNativeFenceFd)
344 *pNativeFenceFd = -1;
345 return VK_SUCCESS;
346 }
347
348 int fd = -1;
349
350 for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
351 int tmp_fd;
352 result = tu_GetSemaphoreFdKHR(
353 tu_device_to_handle(queue->device),
354 &(VkSemaphoreGetFdInfoKHR) {
355 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
356 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
357 .semaphore = pWaitSemaphores[i],
358 },
359 &tmp_fd);
360 if (result != VK_SUCCESS) {
361 if (fd >= 0)
362 close(fd);
363 return result;
364 }
365
366 if (fd < 0)
367 fd = tmp_fd;
368 else if (tmp_fd >= 0) {
369 sync_accumulate("tu", &fd, tmp_fd);
370 close(tmp_fd);
371 }
372 }
373
374 if (pNativeFenceFd) {
375 *pNativeFenceFd = fd;
376 } else if (fd >= 0) {
377 close(fd);
378 /* We still need to do the exports, to reset the semaphores, but
379 * otherwise we don't wait on them. */
380 }
381 return VK_SUCCESS;
382 }