turnip: implement UBWC
[mesa.git] / src / freedreno / vulkan / tu_android.c
1 /*
2 * Copyright © 2017, Google Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24 #include "tu_private.h"
25
26 #include <hardware/gralloc.h>
27 #include <hardware/hardware.h>
28 #include <hardware/hwvulkan.h>
29 #include <libsync.h>
30
31 #include <vulkan/vk_android_native_buffer.h>
32 #include <vulkan/vk_icd.h>
33
34 #include "drm-uapi/drm_fourcc.h"
35
36 static int
37 tu_hal_open(const struct hw_module_t *mod,
38 const char *id,
39 struct hw_device_t **dev);
40 static int
41 tu_hal_close(struct hw_device_t *dev);
42
43 static void UNUSED
44 static_asserts(void)
45 {
46 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
47 }
48
49 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
50 .common =
51 {
52 .tag = HARDWARE_MODULE_TAG,
53 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
54 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
55 .id = HWVULKAN_HARDWARE_MODULE_ID,
56 .name = "AMD Vulkan HAL",
57 .author = "Google",
58 .methods =
59 &(hw_module_methods_t){
60 .open = tu_hal_open,
61 },
62 },
63 };
64
65 /* If any bits in test_mask are set, then unset them and return true. */
66 static inline bool
67 unmask32(uint32_t *inout_mask, uint32_t test_mask)
68 {
69 uint32_t orig_mask = *inout_mask;
70 *inout_mask &= ~test_mask;
71 return *inout_mask != orig_mask;
72 }
73
74 static int
75 tu_hal_open(const struct hw_module_t *mod,
76 const char *id,
77 struct hw_device_t **dev)
78 {
79 assert(mod == &HAL_MODULE_INFO_SYM.common);
80 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
81
82 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
83 if (!hal_dev)
84 return -1;
85
86 *hal_dev = (hwvulkan_device_t){
87 .common =
88 {
89 .tag = HARDWARE_DEVICE_TAG,
90 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
91 .module = &HAL_MODULE_INFO_SYM.common,
92 .close = tu_hal_close,
93 },
94 .EnumerateInstanceExtensionProperties =
95 tu_EnumerateInstanceExtensionProperties,
96 .CreateInstance = tu_CreateInstance,
97 .GetInstanceProcAddr = tu_GetInstanceProcAddr,
98 };
99
100 *dev = &hal_dev->common;
101 return 0;
102 }
103
104 static int
105 tu_hal_close(struct hw_device_t *dev)
106 {
107 /* hwvulkan.h claims that hw_device_t::close() is never called. */
108 return -1;
109 }
110
111 VkResult
112 tu_image_from_gralloc(VkDevice device_h,
113 const VkImageCreateInfo *base_info,
114 const VkNativeBufferANDROID *gralloc_info,
115 const VkAllocationCallbacks *alloc,
116 VkImage *out_image_h)
117
118 {
119 TU_FROM_HANDLE(tu_device, device, device_h);
120 VkImage image_h = VK_NULL_HANDLE;
121 struct tu_image *image = NULL;
122 struct tu_bo *bo = NULL;
123 VkResult result;
124
125 result = tu_image_create(device_h, base_info, alloc, &image_h,
126 DRM_FORMAT_MOD_LINEAR);
127 if (result != VK_SUCCESS)
128 return result;
129
130 if (gralloc_info->handle->numFds != 1) {
131 return vk_errorf(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE,
132 "VkNativeBufferANDROID::handle::numFds is %d, "
133 "expected 1",
134 gralloc_info->handle->numFds);
135 }
136
137 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
138 * must exceed that of the gralloc handle, and we do not own the gralloc
139 * handle.
140 */
141 int dma_buf = gralloc_info->handle->data[0];
142
143 image = tu_image_from_handle(image_h);
144
145 VkDeviceMemory memory_h;
146
147 const VkMemoryDedicatedAllocateInfo ded_alloc = {
148 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
149 .pNext = NULL,
150 .buffer = VK_NULL_HANDLE,
151 .image = image_h
152 };
153
154 const VkImportMemoryFdInfo import_info = {
155 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO,
156 .pNext = &ded_alloc,
157 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
158 .fd = dup(dma_buf),
159 };
160 /* Find the first VRAM memory type, or GART for PRIME images. */
161 int memory_type_index = -1;
162 for (int i = 0;
163 i < device->physical_device->memory_properties.memoryTypeCount; ++i) {
164 bool is_local =
165 !!(device->physical_device->memory_properties.memoryTypes[i]
166 .propertyFlags &
167 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
168 if (is_local) {
169 memory_type_index = i;
170 break;
171 }
172 }
173
174 /* fallback */
175 if (memory_type_index == -1)
176 memory_type_index = 0;
177
178 result =
179 tu_AllocateMemory(device_h,
180 &(VkMemoryAllocateInfo) {
181 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
182 .pNext = &import_info,
183 .allocationSize = image->size,
184 .memoryTypeIndex = memory_type_index,
185 },
186 alloc, &memory_h);
187 if (result != VK_SUCCESS)
188 goto fail_create_image;
189
190 tu_BindImageMemory(device_h, image_h, memory_h, 0);
191
192 image->owned_memory = memory_h;
193 /* Don't clobber the out-parameter until success is certain. */
194 *out_image_h = image_h;
195
196 return VK_SUCCESS;
197
198 fail_create_image:
199 fail_size:
200 tu_DestroyImage(device_h, image_h, alloc);
201
202 return result;
203 }
204
205 VkResult
206 tu_GetSwapchainGrallocUsageANDROID(VkDevice device_h,
207 VkFormat format,
208 VkImageUsageFlags imageUsage,
209 int *grallocUsage)
210 {
211 TU_FROM_HANDLE(tu_device, device, device_h);
212 struct tu_physical_device *phys_dev = device->physical_device;
213 VkPhysicalDevice phys_dev_h = tu_physical_device_to_handle(phys_dev);
214 VkResult result;
215
216 *grallocUsage = 0;
217
218 /* WARNING: Android Nougat's libvulkan.so hardcodes the VkImageUsageFlags
219 * returned to applications via
220 * VkSurfaceCapabilitiesKHR::supportedUsageFlags.
221 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
222 *
223 * TODO(jessehall): I think these are right, but haven't thought hard
224 * about it. Do we need to query the driver for support of any of
225 * these?
226 *
227 * Any disagreement between this function and the hardcoded
228 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
229 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
230 */
231
232 const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
233 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
234 .format = format,
235 .type = VK_IMAGE_TYPE_2D,
236 .tiling = VK_IMAGE_TILING_OPTIMAL,
237 .usage = imageUsage,
238 };
239
240 VkImageFormatProperties2 image_format_props = {
241 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
242 };
243
244 /* Check that requested format and usage are supported. */
245 result = tu_GetPhysicalDeviceImageFormatProperties2(
246 phys_dev_h, &image_format_info, &image_format_props);
247 if (result != VK_SUCCESS) {
248 return vk_errorf(device->instance, result,
249 "tu_GetPhysicalDeviceImageFormatProperties2 failed "
250 "inside %s",
251 __func__);
252 }
253
254 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
255 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
256 *grallocUsage |= GRALLOC_USAGE_HW_RENDER;
257
258 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
259 VK_IMAGE_USAGE_SAMPLED_BIT |
260 VK_IMAGE_USAGE_STORAGE_BIT |
261 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
262 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
263
264 /* All VkImageUsageFlags not explicitly checked here are unsupported for
265 * gralloc swapchains.
266 */
267 if (imageUsage != 0) {
268 return vk_errorf(device->instance, VK_ERROR_FORMAT_NOT_SUPPORTED,
269 "unsupported VkImageUsageFlags(0x%x) for gralloc "
270 "swapchain",
271 imageUsage);
272 }
273
274 /*
275 * FINISHME: Advertise all display-supported formats. Mostly
276 * DRM_FORMAT_ARGB2101010 and DRM_FORMAT_ABGR2101010, but need to check
277 * what we need for 30-bit colors.
278 */
279 if (format == VK_FORMAT_B8G8R8A8_UNORM ||
280 format == VK_FORMAT_B5G6R5_UNORM_PACK16) {
281 *grallocUsage |= GRALLOC_USAGE_HW_FB | GRALLOC_USAGE_HW_COMPOSER |
282 GRALLOC_USAGE_EXTERNAL_DISP;
283 }
284
285 if (*grallocUsage == 0)
286 return VK_ERROR_FORMAT_NOT_SUPPORTED;
287
288 return VK_SUCCESS;
289 }
290
291 VkResult
292 tu_AcquireImageANDROID(VkDevice device,
293 VkImage image_h,
294 int nativeFenceFd,
295 VkSemaphore semaphore,
296 VkFence fence)
297 {
298 VkResult semaphore_result = VK_SUCCESS, fence_result = VK_SUCCESS;
299
300 if (semaphore != VK_NULL_HANDLE) {
301 int semaphore_fd =
302 nativeFenceFd >= 0 ? dup(nativeFenceFd) : nativeFenceFd;
303 semaphore_result = tu_ImportSemaphoreFdKHR(
304 device, &(VkImportSemaphoreFdInfoKHR) {
305 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
306 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
307 .fd = semaphore_fd,
308 .semaphore = semaphore,
309 });
310 }
311
312 if (fence != VK_NULL_HANDLE) {
313 int fence_fd = nativeFenceFd >= 0 ? dup(nativeFenceFd) : nativeFenceFd;
314 fence_result = tu_ImportFenceFdKHR(
315 device, &(VkImportFenceFdInfoKHR) {
316 .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
317 .flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
318 .fd = fence_fd,
319 .fence = fence,
320 });
321 }
322
323 close(nativeFenceFd);
324
325 if (semaphore_result != VK_SUCCESS)
326 return semaphore_result;
327 return fence_result;
328 }
329
330 VkResult
331 tu_QueueSignalReleaseImageANDROID(VkQueue _queue,
332 uint32_t waitSemaphoreCount,
333 const VkSemaphore *pWaitSemaphores,
334 VkImage image,
335 int *pNativeFenceFd)
336 {
337 TU_FROM_HANDLE(tu_queue, queue, _queue);
338 VkResult result = VK_SUCCESS;
339
340 if (waitSemaphoreCount == 0) {
341 if (pNativeFenceFd)
342 *pNativeFenceFd = -1;
343 return VK_SUCCESS;
344 }
345
346 int fd = -1;
347
348 for (uint32_t i = 0; i < waitSemaphoreCount; ++i) {
349 int tmp_fd;
350 result = tu_GetSemaphoreFdKHR(
351 tu_device_to_handle(queue->device),
352 &(VkSemaphoreGetFdInfoKHR) {
353 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
354 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
355 .semaphore = pWaitSemaphores[i],
356 },
357 &tmp_fd);
358 if (result != VK_SUCCESS) {
359 if (fd >= 0)
360 close(fd);
361 return result;
362 }
363
364 if (fd < 0)
365 fd = tmp_fd;
366 else if (tmp_fd >= 0) {
367 sync_accumulate("tu", &fd, tmp_fd);
368 close(tmp_fd);
369 }
370 }
371
372 if (pNativeFenceFd) {
373 *pNativeFenceFd = fd;
374 } else if (fd >= 0) {
375 close(fd);
376 /* We still need to do the exports, to reset the semaphores, but
377 * otherwise we don't wait on them. */
378 }
379 return VK_SUCCESS;
380 }