intel/tools: Disassemble WAIT's argument as a destination
[mesa.git] / src / intel / vulkan / anv_android.c
1 /*
2 * Copyright © 2017, Google Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <hardware/gralloc.h>
25
26 #if ANDROID_API_LEVEL >= 26
27 #include <hardware/gralloc1.h>
28 #endif
29
30 #include <hardware/hardware.h>
31 #include <hardware/hwvulkan.h>
32 #include <vulkan/vk_android_native_buffer.h>
33 #include <vulkan/vk_icd.h>
34 #include <sync/sync.h>
35
36 #include "anv_private.h"
37 #include "vk_format_info.h"
38 #include "vk_util.h"
39
40 static int anv_hal_open(const struct hw_module_t* mod, const char* id, struct hw_device_t** dev);
41 static int anv_hal_close(struct hw_device_t *dev);
42
43 static void UNUSED
44 static_asserts(void)
45 {
46 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
47 }
48
49 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
50 .common = {
51 .tag = HARDWARE_MODULE_TAG,
52 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
53 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
54 .id = HWVULKAN_HARDWARE_MODULE_ID,
55 .name = "Intel Vulkan HAL",
56 .author = "Intel",
57 .methods = &(hw_module_methods_t) {
58 .open = anv_hal_open,
59 },
60 },
61 };
62
63 /* If any bits in test_mask are set, then unset them and return true. */
64 static inline bool
65 unmask32(uint32_t *inout_mask, uint32_t test_mask)
66 {
67 uint32_t orig_mask = *inout_mask;
68 *inout_mask &= ~test_mask;
69 return *inout_mask != orig_mask;
70 }
71
72 static int
73 anv_hal_open(const struct hw_module_t* mod, const char* id,
74 struct hw_device_t** dev)
75 {
76 assert(mod == &HAL_MODULE_INFO_SYM.common);
77 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
78
79 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
80 if (!hal_dev)
81 return -1;
82
83 *hal_dev = (hwvulkan_device_t) {
84 .common = {
85 .tag = HARDWARE_DEVICE_TAG,
86 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
87 .module = &HAL_MODULE_INFO_SYM.common,
88 .close = anv_hal_close,
89 },
90 .EnumerateInstanceExtensionProperties = anv_EnumerateInstanceExtensionProperties,
91 .CreateInstance = anv_CreateInstance,
92 .GetInstanceProcAddr = anv_GetInstanceProcAddr,
93 };
94
95 *dev = &hal_dev->common;
96 return 0;
97 }
98
99 static int
100 anv_hal_close(struct hw_device_t *dev)
101 {
102 /* hwvulkan.h claims that hw_device_t::close() is never called. */
103 return -1;
104 }
105
106 #if ANDROID_API_LEVEL >= 26
107 static VkResult
108 get_ahw_buffer_format_properties(
109 VkDevice device_h,
110 const struct AHardwareBuffer *buffer,
111 VkAndroidHardwareBufferFormatPropertiesANDROID *pProperties)
112 {
113 ANV_FROM_HANDLE(anv_device, device, device_h);
114
115 /* Get a description of buffer contents . */
116 AHardwareBuffer_Desc desc;
117 AHardwareBuffer_describe(buffer, &desc);
118
119 /* Verify description. */
120 uint64_t gpu_usage =
121 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
122 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
123 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
124
125 /* "Buffer must be a valid Android hardware buffer object with at least
126 * one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags."
127 */
128 if (!(desc.usage & (gpu_usage)))
129 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
130
131 /* Fill properties fields based on description. */
132 VkAndroidHardwareBufferFormatPropertiesANDROID *p = pProperties;
133
134 p->format = vk_format_from_android(desc.format, desc.usage);
135
136 const struct anv_format *anv_format = anv_get_format(p->format);
137 p->externalFormat = (uint64_t) (uintptr_t) anv_format;
138
139 /* Default to OPTIMAL tiling but set to linear in case
140 * of AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER usage.
141 */
142 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
143
144 if (desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER)
145 tiling = VK_IMAGE_TILING_LINEAR;
146
147 p->formatFeatures =
148 anv_get_image_format_features(&device->info, p->format, anv_format,
149 tiling);
150
151 /* "Images can be created with an external format even if the Android hardware
152 * buffer has a format which has an equivalent Vulkan format to enable
153 * consistent handling of images from sources that might use either category
154 * of format. However, all images created with an external format are subject
155 * to the valid usage requirements associated with external formats, even if
156 * the Android hardware buffer’s format has a Vulkan equivalent."
157 *
158 * "The formatFeatures member *must* include
159 * VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT and at least one of
160 * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
161 * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"
162 */
163 p->formatFeatures |=
164 VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
165
166 /* "Implementations may not always be able to determine the color model,
167 * numerical range, or chroma offsets of the image contents, so the values
168 * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
169 * Applications should treat these values as sensible defaults to use in
170 * the absence of more reliable information obtained through some other
171 * means."
172 */
173 p->samplerYcbcrConversionComponents.r = VK_COMPONENT_SWIZZLE_IDENTITY;
174 p->samplerYcbcrConversionComponents.g = VK_COMPONENT_SWIZZLE_IDENTITY;
175 p->samplerYcbcrConversionComponents.b = VK_COMPONENT_SWIZZLE_IDENTITY;
176 p->samplerYcbcrConversionComponents.a = VK_COMPONENT_SWIZZLE_IDENTITY;
177
178 p->suggestedYcbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601;
179 p->suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
180
181 p->suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
182 p->suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
183
184 return VK_SUCCESS;
185 }
186
187 VkResult
188 anv_GetAndroidHardwareBufferPropertiesANDROID(
189 VkDevice device_h,
190 const struct AHardwareBuffer *buffer,
191 VkAndroidHardwareBufferPropertiesANDROID *pProperties)
192 {
193 ANV_FROM_HANDLE(anv_device, dev, device_h);
194
195 VkAndroidHardwareBufferFormatPropertiesANDROID *format_prop =
196 vk_find_struct(pProperties->pNext,
197 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
198
199 /* Fill format properties of an Android hardware buffer. */
200 if (format_prop)
201 get_ahw_buffer_format_properties(device_h, buffer, format_prop);
202
203 /* NOTE - We support buffers with only one handle but do not error on
204 * multiple handle case. Reason is that we want to support YUV formats
205 * where we have many logical planes but they all point to the same
206 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
207 */
208 const native_handle_t *handle =
209 AHardwareBuffer_getNativeHandle(buffer);
210 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
211 if (dma_buf < 0)
212 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
213
214 /* All memory types. */
215 uint32_t memory_types = (1ull << dev->physical->memory.type_count) - 1;
216
217 pProperties->allocationSize = lseek(dma_buf, 0, SEEK_END);
218 pProperties->memoryTypeBits = memory_types;
219
220 return VK_SUCCESS;
221 }
222
223 VkResult
224 anv_GetMemoryAndroidHardwareBufferANDROID(
225 VkDevice device_h,
226 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
227 struct AHardwareBuffer **pBuffer)
228 {
229 ANV_FROM_HANDLE(anv_device_memory, mem, pInfo->memory);
230
231 /* Some quotes from Vulkan spec:
232 *
233 * "If the device memory was created by importing an Android hardware
234 * buffer, vkGetMemoryAndroidHardwareBufferANDROID must return that same
235 * Android hardware buffer object."
236 *
237 * "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must
238 * have been included in VkExportMemoryAllocateInfo::handleTypes when
239 * memory was created."
240 */
241 if (mem->ahw) {
242 *pBuffer = mem->ahw;
243 /* Increase refcount. */
244 AHardwareBuffer_acquire(mem->ahw);
245 return VK_SUCCESS;
246 }
247
248 return VK_ERROR_OUT_OF_HOST_MEMORY;
249 }
250
251 #endif
252
253 /* Construct ahw usage mask from image usage bits, see
254 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
255 */
256 uint64_t
257 anv_ahw_usage_from_vk_usage(const VkImageCreateFlags vk_create,
258 const VkImageUsageFlags vk_usage)
259 {
260 uint64_t ahw_usage = 0;
261 #if ANDROID_API_LEVEL >= 26
262 if (vk_usage & VK_IMAGE_USAGE_SAMPLED_BIT)
263 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
264
265 if (vk_usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)
266 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
267
268 if (vk_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
269 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
270
271 if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
272 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
273
274 if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT)
275 ahw_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
276
277 /* No usage bits set - set at least one GPU usage. */
278 if (ahw_usage == 0)
279 ahw_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
280 #endif
281 return ahw_usage;
282 }
283
284 /*
285 * Called from anv_AllocateMemory when import AHardwareBuffer.
286 */
287 VkResult
288 anv_import_ahw_memory(VkDevice device_h,
289 struct anv_device_memory *mem,
290 const VkImportAndroidHardwareBufferInfoANDROID *info)
291 {
292 #if ANDROID_API_LEVEL >= 26
293 ANV_FROM_HANDLE(anv_device, device, device_h);
294
295 /* Import from AHardwareBuffer to anv_device_memory. */
296 const native_handle_t *handle =
297 AHardwareBuffer_getNativeHandle(info->buffer);
298
299 /* NOTE - We support buffers with only one handle but do not error on
300 * multiple handle case. Reason is that we want to support YUV formats
301 * where we have many logical planes but they all point to the same
302 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
303 */
304 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
305 if (dma_buf < 0)
306 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
307
308 VkResult result = anv_device_import_bo(device, dma_buf, 0,
309 0 /* client_address */,
310 &mem->bo);
311 assert(result == VK_SUCCESS);
312
313 /* "If the vkAllocateMemory command succeeds, the implementation must
314 * acquire a reference to the imported hardware buffer, which it must
315 * release when the device memory object is freed. If the command fails,
316 * the implementation must not retain a reference."
317 */
318 AHardwareBuffer_acquire(info->buffer);
319 mem->ahw = info->buffer;
320
321 return VK_SUCCESS;
322 #else
323 return VK_ERROR_EXTENSION_NOT_PRESENT;
324 #endif
325 }
326
327 VkResult
328 anv_create_ahw_memory(VkDevice device_h,
329 struct anv_device_memory *mem,
330 const VkMemoryAllocateInfo *pAllocateInfo)
331 {
332 #if ANDROID_API_LEVEL >= 26
333 ANV_FROM_HANDLE(anv_device, dev, device_h);
334
335 const VkMemoryDedicatedAllocateInfo *dedicated_info =
336 vk_find_struct_const(pAllocateInfo->pNext,
337 MEMORY_DEDICATED_ALLOCATE_INFO);
338
339 uint32_t w = 0;
340 uint32_t h = 1;
341 uint32_t layers = 1;
342 uint32_t format = 0;
343 uint64_t usage = 0;
344
345 /* If caller passed dedicated information. */
346 if (dedicated_info && dedicated_info->image) {
347 ANV_FROM_HANDLE(anv_image, image, dedicated_info->image);
348 w = image->extent.width;
349 h = image->extent.height;
350 layers = image->array_size;
351 format = android_format_from_vk(image->vk_format);
352 usage = anv_ahw_usage_from_vk_usage(image->create_flags, image->usage);
353 } else if (dedicated_info && dedicated_info->buffer) {
354 ANV_FROM_HANDLE(anv_buffer, buffer, dedicated_info->buffer);
355 w = buffer->size;
356 format = AHARDWAREBUFFER_FORMAT_BLOB;
357 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
358 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
359 } else {
360 w = pAllocateInfo->allocationSize;
361 format = AHARDWAREBUFFER_FORMAT_BLOB;
362 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
363 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
364 }
365
366 struct AHardwareBuffer *ahw = NULL;
367 struct AHardwareBuffer_Desc desc = {
368 .width = w,
369 .height = h,
370 .layers = layers,
371 .format = format,
372 .usage = usage,
373 };
374
375 if (AHardwareBuffer_allocate(&desc, &ahw) != 0)
376 return VK_ERROR_OUT_OF_HOST_MEMORY;
377
378 mem->ahw = ahw;
379 return VK_SUCCESS;
380 #else
381 return VK_ERROR_EXTENSION_NOT_PRESENT;
382 #endif
383
384 }
385
386 VkResult
387 anv_image_from_external(
388 VkDevice device_h,
389 const VkImageCreateInfo *base_info,
390 const VkExternalMemoryImageCreateInfo *create_info,
391 const VkAllocationCallbacks *alloc,
392 VkImage *out_image_h)
393 {
394 #if ANDROID_API_LEVEL >= 26
395 ANV_FROM_HANDLE(anv_device, device, device_h);
396
397 const VkExternalFormatANDROID *ext_info =
398 vk_find_struct_const(base_info->pNext, EXTERNAL_FORMAT_ANDROID);
399
400 if (ext_info && ext_info->externalFormat != 0) {
401 assert(base_info->format == VK_FORMAT_UNDEFINED);
402 assert(base_info->imageType == VK_IMAGE_TYPE_2D);
403 assert(base_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
404 assert(base_info->tiling == VK_IMAGE_TILING_OPTIMAL);
405 }
406
407 struct anv_image_create_info anv_info = {
408 .vk_info = base_info,
409 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
410 .external_format = true,
411 };
412
413 VkImage image_h;
414 VkResult result = anv_image_create(device_h, &anv_info, alloc, &image_h);
415 if (result != VK_SUCCESS)
416 return result;
417
418 *out_image_h = image_h;
419
420 return VK_SUCCESS;
421 #else
422 return VK_ERROR_EXTENSION_NOT_PRESENT;
423 #endif
424 }
425
426
427 VkResult
428 anv_image_from_gralloc(VkDevice device_h,
429 const VkImageCreateInfo *base_info,
430 const VkNativeBufferANDROID *gralloc_info,
431 const VkAllocationCallbacks *alloc,
432 VkImage *out_image_h)
433
434 {
435 ANV_FROM_HANDLE(anv_device, device, device_h);
436 VkImage image_h = VK_NULL_HANDLE;
437 struct anv_image *image = NULL;
438 struct anv_bo *bo = NULL;
439 VkResult result;
440
441 struct anv_image_create_info anv_info = {
442 .vk_info = base_info,
443 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
444 };
445
446 if (gralloc_info->handle->numFds != 1) {
447 return vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
448 "VkNativeBufferANDROID::handle::numFds is %d, "
449 "expected 1", gralloc_info->handle->numFds);
450 }
451
452 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
453 * must exceed that of the gralloc handle, and we do not own the gralloc
454 * handle.
455 */
456 int dma_buf = gralloc_info->handle->data[0];
457
458 /* We need to set the WRITE flag on window system buffers so that GEM will
459 * know we're writing to them and synchronize uses on other rings (for
460 * example, if the display server uses the blitter ring).
461 *
462 * If this function fails and if the imported bo was resident in the cache,
463 * we should avoid updating the bo's flags. Therefore, we defer updating
464 * the flags until success is certain.
465 *
466 */
467 result = anv_device_import_bo(device, dma_buf,
468 ANV_BO_ALLOC_IMPLICIT_SYNC |
469 ANV_BO_ALLOC_IMPLICIT_WRITE,
470 0 /* client_address */,
471 &bo);
472 if (result != VK_SUCCESS) {
473 return vk_errorf(device, device, result,
474 "failed to import dma-buf from VkNativeBufferANDROID");
475 }
476
477 int i915_tiling = anv_gem_get_tiling(device, bo->gem_handle);
478 switch (i915_tiling) {
479 case I915_TILING_NONE:
480 anv_info.isl_tiling_flags = ISL_TILING_LINEAR_BIT;
481 break;
482 case I915_TILING_X:
483 anv_info.isl_tiling_flags = ISL_TILING_X_BIT;
484 break;
485 case I915_TILING_Y:
486 anv_info.isl_tiling_flags = ISL_TILING_Y0_BIT;
487 break;
488 case -1:
489 result = vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
490 "DRM_IOCTL_I915_GEM_GET_TILING failed for "
491 "VkNativeBufferANDROID");
492 goto fail_tiling;
493 default:
494 result = vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
495 "DRM_IOCTL_I915_GEM_GET_TILING returned unknown "
496 "tiling %d for VkNativeBufferANDROID", i915_tiling);
497 goto fail_tiling;
498 }
499
500 enum isl_format format = anv_get_isl_format(&device->info,
501 base_info->format,
502 VK_IMAGE_ASPECT_COLOR_BIT,
503 base_info->tiling);
504 assert(format != ISL_FORMAT_UNSUPPORTED);
505
506 anv_info.stride = gralloc_info->stride *
507 (isl_format_get_layout(format)->bpb / 8);
508
509 result = anv_image_create(device_h, &anv_info, alloc, &image_h);
510 image = anv_image_from_handle(image_h);
511 if (result != VK_SUCCESS)
512 goto fail_create;
513
514 if (bo->size < image->size) {
515 result = vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
516 "dma-buf from VkNativeBufferANDROID is too small for "
517 "VkImage: %"PRIu64"B < %"PRIu64"B",
518 bo->size, image->size);
519 goto fail_size;
520 }
521
522 assert(image->n_planes == 1);
523 assert(image->planes[0].address.offset == 0);
524
525 image->planes[0].address.bo = bo;
526 image->planes[0].bo_is_owned = true;
527
528 /* Don't clobber the out-parameter until success is certain. */
529 *out_image_h = image_h;
530
531 return VK_SUCCESS;
532
533 fail_size:
534 anv_DestroyImage(device_h, image_h, alloc);
535 fail_create:
536 fail_tiling:
537 anv_device_release_bo(device, bo);
538
539 return result;
540 }
541
542 static VkResult
543 format_supported_with_usage(VkDevice device_h, VkFormat format,
544 VkImageUsageFlags imageUsage)
545 {
546 ANV_FROM_HANDLE(anv_device, device, device_h);
547 VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(device->physical);
548 VkResult result;
549
550 const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
551 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
552 .format = format,
553 .type = VK_IMAGE_TYPE_2D,
554 .tiling = VK_IMAGE_TILING_OPTIMAL,
555 .usage = imageUsage,
556 };
557
558 VkImageFormatProperties2 image_format_props = {
559 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
560 };
561
562 /* Check that requested format and usage are supported. */
563 result = anv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h,
564 &image_format_info, &image_format_props);
565 if (result != VK_SUCCESS) {
566 return vk_errorf(device, device, result,
567 "anv_GetPhysicalDeviceImageFormatProperties2 failed "
568 "inside %s", __func__);
569 }
570 return VK_SUCCESS;
571 }
572
573
574 static VkResult
575 setup_gralloc0_usage(struct anv_device *device, VkFormat format,
576 VkImageUsageFlags imageUsage, int *grallocUsage)
577 {
578 /* WARNING: Android's libvulkan.so hardcodes the VkImageUsageFlags
579 * returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags.
580 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
581 *
582 * TODO(jessehall): I think these are right, but haven't thought hard
583 * about it. Do we need to query the driver for support of any of
584 * these?
585 *
586 * Any disagreement between this function and the hardcoded
587 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
588 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
589 */
590
591 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
592 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
593 *grallocUsage |= GRALLOC_USAGE_HW_RENDER;
594
595 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
596 VK_IMAGE_USAGE_SAMPLED_BIT |
597 VK_IMAGE_USAGE_STORAGE_BIT |
598 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
599 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
600
601 /* All VkImageUsageFlags not explicitly checked here are unsupported for
602 * gralloc swapchains.
603 */
604 if (imageUsage != 0) {
605 return vk_errorf(device, device, VK_ERROR_FORMAT_NOT_SUPPORTED,
606 "unsupported VkImageUsageFlags(0x%x) for gralloc "
607 "swapchain", imageUsage);
608 }
609
610 /* The below formats support GRALLOC_USAGE_HW_FB (that is, display
611 * scanout). This short list of formats is univserally supported on Intel
612 * but is incomplete. The full set of supported formats is dependent on
613 * kernel and hardware.
614 *
615 * FINISHME: Advertise all display-supported formats.
616 */
617 switch (format) {
618 case VK_FORMAT_B8G8R8A8_UNORM:
619 case VK_FORMAT_R5G6B5_UNORM_PACK16:
620 case VK_FORMAT_R8G8B8A8_UNORM:
621 case VK_FORMAT_R8G8B8A8_SRGB:
622 *grallocUsage |= GRALLOC_USAGE_HW_FB |
623 GRALLOC_USAGE_HW_COMPOSER |
624 GRALLOC_USAGE_EXTERNAL_DISP;
625 break;
626 default:
627 intel_logw("%s: unsupported format=%d", __func__, format);
628 }
629
630 if (*grallocUsage == 0)
631 return VK_ERROR_FORMAT_NOT_SUPPORTED;
632
633 return VK_SUCCESS;
634 }
635
636 #if ANDROID_API_LEVEL >= 26
637 VkResult anv_GetSwapchainGrallocUsage2ANDROID(
638 VkDevice device_h,
639 VkFormat format,
640 VkImageUsageFlags imageUsage,
641 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
642 uint64_t* grallocConsumerUsage,
643 uint64_t* grallocProducerUsage)
644 {
645 ANV_FROM_HANDLE(anv_device, device, device_h);
646 VkResult result;
647
648 *grallocConsumerUsage = 0;
649 *grallocProducerUsage = 0;
650 intel_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
651
652 result = format_supported_with_usage(device_h, format, imageUsage);
653 if (result != VK_SUCCESS)
654 return result;
655
656 int32_t grallocUsage = 0;
657 result = setup_gralloc0_usage(device, format, imageUsage, &grallocUsage);
658 if (result != VK_SUCCESS)
659 return result;
660
661 /* Setup gralloc1 usage flags from gralloc0 flags. */
662
663 if (grallocUsage & GRALLOC_USAGE_HW_RENDER) {
664 *grallocProducerUsage |= GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET;
665 *grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET;
666 }
667
668 if (grallocUsage & GRALLOC_USAGE_HW_TEXTURE) {
669 *grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE;
670 }
671
672 if (grallocUsage & (GRALLOC_USAGE_HW_FB |
673 GRALLOC_USAGE_HW_COMPOSER |
674 GRALLOC_USAGE_EXTERNAL_DISP)) {
675 *grallocProducerUsage |= GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET;
676 *grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_HWCOMPOSER;
677 }
678
679 return VK_SUCCESS;
680 }
681 #endif
682
683 VkResult anv_GetSwapchainGrallocUsageANDROID(
684 VkDevice device_h,
685 VkFormat format,
686 VkImageUsageFlags imageUsage,
687 int* grallocUsage)
688 {
689 ANV_FROM_HANDLE(anv_device, device, device_h);
690 VkResult result;
691
692 *grallocUsage = 0;
693 intel_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
694
695 result = format_supported_with_usage(device_h, format, imageUsage);
696 if (result != VK_SUCCESS)
697 return result;
698
699 return setup_gralloc0_usage(device, format, imageUsage, grallocUsage);
700 }
701
702 VkResult
703 anv_AcquireImageANDROID(
704 VkDevice device_h,
705 VkImage image_h,
706 int nativeFenceFd,
707 VkSemaphore semaphore_h,
708 VkFence fence_h)
709 {
710 ANV_FROM_HANDLE(anv_device, device, device_h);
711 VkResult result = VK_SUCCESS;
712
713 if (nativeFenceFd != -1) {
714 /* As a simple, firstpass implementation of VK_ANDROID_native_buffer, we
715 * block on the nativeFenceFd. This may introduce latency and is
716 * definitiely inefficient, yet it's correct.
717 *
718 * FINISHME(chadv): Import the nativeFenceFd into the VkSemaphore and
719 * VkFence.
720 */
721 if (sync_wait(nativeFenceFd, /*timeout*/ -1) < 0) {
722 result = vk_errorf(device, device, VK_ERROR_DEVICE_LOST,
723 "%s: failed to wait on nativeFenceFd=%d",
724 __func__, nativeFenceFd);
725 }
726
727 /* From VK_ANDROID_native_buffer's pseudo spec
728 * (https://source.android.com/devices/graphics/implement-vulkan):
729 *
730 * The driver takes ownership of the fence fd and is responsible for
731 * closing it [...] even if vkAcquireImageANDROID fails and returns
732 * an error.
733 */
734 close(nativeFenceFd);
735
736 if (result != VK_SUCCESS)
737 return result;
738 }
739
740 if (semaphore_h || fence_h) {
741 /* Thanks to implicit sync, the image is ready for GPU access. But we
742 * must still put the semaphore into the "submit" state; otherwise the
743 * client may get unexpected behavior if the client later uses it as
744 * a wait semaphore.
745 *
746 * Because we blocked above on the nativeFenceFd, the image is also
747 * ready for foreign-device access (including CPU access). But we must
748 * still signal the fence; otherwise the client may get unexpected
749 * behavior if the client later waits on it.
750 *
751 * For some values of anv_semaphore_type, we must submit the semaphore
752 * to execbuf in order to signal it. Likewise for anv_fence_type.
753 * Instead of open-coding here the signal operation for each
754 * anv_semaphore_type and anv_fence_type, we piggy-back on
755 * vkQueueSubmit.
756 */
757 const VkSubmitInfo submit = {
758 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
759 .waitSemaphoreCount = 0,
760 .commandBufferCount = 0,
761 .signalSemaphoreCount = (semaphore_h ? 1 : 0),
762 .pSignalSemaphores = &semaphore_h,
763 };
764
765 result = anv_QueueSubmit(anv_queue_to_handle(&device->queue), 1,
766 &submit, fence_h);
767 if (result != VK_SUCCESS) {
768 return vk_errorf(device, device, result,
769 "anv_QueueSubmit failed inside %s", __func__);
770 }
771 }
772
773 return VK_SUCCESS;
774 }
775
776 VkResult
777 anv_QueueSignalReleaseImageANDROID(
778 VkQueue queue,
779 uint32_t waitSemaphoreCount,
780 const VkSemaphore* pWaitSemaphores,
781 VkImage image,
782 int* pNativeFenceFd)
783 {
784 VkResult result;
785
786 if (waitSemaphoreCount == 0)
787 goto done;
788
789 result = anv_QueueSubmit(queue, 1,
790 &(VkSubmitInfo) {
791 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
792 .waitSemaphoreCount = 1,
793 .pWaitSemaphores = pWaitSemaphores,
794 },
795 (VkFence) VK_NULL_HANDLE);
796 if (result != VK_SUCCESS)
797 return result;
798
799 done:
800 if (pNativeFenceFd) {
801 /* We can rely implicit on sync because above we submitted all
802 * semaphores to the queue.
803 */
804 *pNativeFenceFd = -1;
805 }
806
807 return VK_SUCCESS;
808 }