2 * Copyright © 2017, Google Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include <hardware/gralloc.h>
26 #if ANDROID_API_LEVEL >= 26
27 #include <hardware/gralloc1.h>
28 #include <grallocusage/GrallocUsageConversion.h>
31 #include <hardware/hardware.h>
32 #include <hardware/hwvulkan.h>
33 #include <vulkan/vk_android_native_buffer.h>
34 #include <vulkan/vk_icd.h>
35 #include <sync/sync.h>
37 #include "anv_private.h"
38 #include "vk_format_info.h"
41 static int anv_hal_open(const struct hw_module_t
* mod
, const char* id
, struct hw_device_t
** dev
);
42 static int anv_hal_close(struct hw_device_t
*dev
);
47 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC
== ICD_LOADER_MAGIC
);
50 PUBLIC
struct hwvulkan_module_t HAL_MODULE_INFO_SYM
= {
52 .tag
= HARDWARE_MODULE_TAG
,
53 .module_api_version
= HWVULKAN_MODULE_API_VERSION_0_1
,
54 .hal_api_version
= HARDWARE_MAKE_API_VERSION(1, 0),
55 .id
= HWVULKAN_HARDWARE_MODULE_ID
,
56 .name
= "Intel Vulkan HAL",
58 .methods
= &(hw_module_methods_t
) {
64 /* If any bits in test_mask are set, then unset them and return true. */
66 unmask32(uint32_t *inout_mask
, uint32_t test_mask
)
68 uint32_t orig_mask
= *inout_mask
;
69 *inout_mask
&= ~test_mask
;
70 return *inout_mask
!= orig_mask
;
74 anv_hal_open(const struct hw_module_t
* mod
, const char* id
,
75 struct hw_device_t
** dev
)
77 assert(mod
== &HAL_MODULE_INFO_SYM
.common
);
78 assert(strcmp(id
, HWVULKAN_DEVICE_0
) == 0);
80 hwvulkan_device_t
*hal_dev
= malloc(sizeof(*hal_dev
));
84 *hal_dev
= (hwvulkan_device_t
) {
86 .tag
= HARDWARE_DEVICE_TAG
,
87 .version
= HWVULKAN_DEVICE_API_VERSION_0_1
,
88 .module
= &HAL_MODULE_INFO_SYM
.common
,
89 .close
= anv_hal_close
,
91 .EnumerateInstanceExtensionProperties
= anv_EnumerateInstanceExtensionProperties
,
92 .CreateInstance
= anv_CreateInstance
,
93 .GetInstanceProcAddr
= anv_GetInstanceProcAddr
,
96 *dev
= &hal_dev
->common
;
101 anv_hal_close(struct hw_device_t
*dev
)
103 /* hwvulkan.h claims that hw_device_t::close() is never called. */
107 #if ANDROID_API_LEVEL >= 26
109 get_ahw_buffer_format_properties(
111 const struct AHardwareBuffer
*buffer
,
112 VkAndroidHardwareBufferFormatPropertiesANDROID
*pProperties
)
114 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
116 /* Get a description of buffer contents . */
117 AHardwareBuffer_Desc desc
;
118 AHardwareBuffer_describe(buffer
, &desc
);
120 /* Verify description. */
122 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
|
123 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT
|
124 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER
;
126 /* "Buffer must be a valid Android hardware buffer object with at least
127 * one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags."
129 if (!(desc
.usage
& (gpu_usage
)))
130 return VK_ERROR_INVALID_EXTERNAL_HANDLE
;
132 /* Fill properties fields based on description. */
133 VkAndroidHardwareBufferFormatPropertiesANDROID
*p
= pProperties
;
135 p
->format
= vk_format_from_android(desc
.format
, desc
.usage
);
137 const struct anv_format
*anv_format
= anv_get_format(p
->format
);
138 p
->externalFormat
= (uint64_t) (uintptr_t) anv_format
;
140 /* Default to OPTIMAL tiling but set to linear in case
141 * of AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER usage.
143 VkImageTiling tiling
= VK_IMAGE_TILING_OPTIMAL
;
145 if (desc
.usage
& AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER
)
146 tiling
= VK_IMAGE_TILING_LINEAR
;
149 anv_get_image_format_features(&device
->info
, p
->format
, anv_format
,
152 /* "Images can be created with an external format even if the Android hardware
153 * buffer has a format which has an equivalent Vulkan format to enable
154 * consistent handling of images from sources that might use either category
155 * of format. However, all images created with an external format are subject
156 * to the valid usage requirements associated with external formats, even if
157 * the Android hardware buffer’s format has a Vulkan equivalent."
159 * "The formatFeatures member *must* include
160 * VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT and at least one of
161 * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
162 * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"
165 VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT
;
167 /* "Implementations may not always be able to determine the color model,
168 * numerical range, or chroma offsets of the image contents, so the values
169 * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
170 * Applications should treat these values as sensible defaults to use in
171 * the absence of more reliable information obtained through some other
174 p
->samplerYcbcrConversionComponents
.r
= VK_COMPONENT_SWIZZLE_IDENTITY
;
175 p
->samplerYcbcrConversionComponents
.g
= VK_COMPONENT_SWIZZLE_IDENTITY
;
176 p
->samplerYcbcrConversionComponents
.b
= VK_COMPONENT_SWIZZLE_IDENTITY
;
177 p
->samplerYcbcrConversionComponents
.a
= VK_COMPONENT_SWIZZLE_IDENTITY
;
179 p
->suggestedYcbcrModel
= VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601
;
180 p
->suggestedYcbcrRange
= VK_SAMPLER_YCBCR_RANGE_ITU_FULL
;
182 p
->suggestedXChromaOffset
= VK_CHROMA_LOCATION_MIDPOINT
;
183 p
->suggestedYChromaOffset
= VK_CHROMA_LOCATION_MIDPOINT
;
189 anv_GetAndroidHardwareBufferPropertiesANDROID(
191 const struct AHardwareBuffer
*buffer
,
192 VkAndroidHardwareBufferPropertiesANDROID
*pProperties
)
194 ANV_FROM_HANDLE(anv_device
, dev
, device_h
);
195 struct anv_physical_device
*pdevice
= &dev
->instance
->physicalDevice
;
197 VkAndroidHardwareBufferFormatPropertiesANDROID
*format_prop
=
198 vk_find_struct(pProperties
->pNext
,
199 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID
);
201 /* Fill format properties of an Android hardware buffer. */
203 get_ahw_buffer_format_properties(device_h
, buffer
, format_prop
);
205 /* NOTE - We support buffers with only one handle but do not error on
206 * multiple handle case. Reason is that we want to support YUV formats
207 * where we have many logical planes but they all point to the same
208 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
210 const native_handle_t
*handle
=
211 AHardwareBuffer_getNativeHandle(buffer
);
212 int dma_buf
= (handle
&& handle
->numFds
) ? handle
->data
[0] : -1;
214 return VK_ERROR_INVALID_EXTERNAL_HANDLE
;
216 /* All memory types. */
217 uint32_t memory_types
= (1ull << pdevice
->memory
.type_count
) - 1;
219 pProperties
->allocationSize
= lseek(dma_buf
, 0, SEEK_END
);
220 pProperties
->memoryTypeBits
= memory_types
;
226 anv_GetMemoryAndroidHardwareBufferANDROID(
228 const VkMemoryGetAndroidHardwareBufferInfoANDROID
*pInfo
,
229 struct AHardwareBuffer
**pBuffer
)
231 ANV_FROM_HANDLE(anv_device_memory
, mem
, pInfo
->memory
);
233 /* Some quotes from Vulkan spec:
235 * "If the device memory was created by importing an Android hardware
236 * buffer, vkGetMemoryAndroidHardwareBufferANDROID must return that same
237 * Android hardware buffer object."
239 * "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must
240 * have been included in VkExportMemoryAllocateInfo::handleTypes when
241 * memory was created."
245 /* Increase refcount. */
246 AHardwareBuffer_acquire(mem
->ahw
);
250 return VK_ERROR_OUT_OF_HOST_MEMORY
;
255 /* Construct ahw usage mask from image usage bits, see
256 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
259 anv_ahw_usage_from_vk_usage(const VkImageCreateFlags vk_create
,
260 const VkImageUsageFlags vk_usage
)
262 uint64_t ahw_usage
= 0;
263 #if ANDROID_API_LEVEL >= 26
264 if (vk_usage
& VK_IMAGE_USAGE_SAMPLED_BIT
)
265 ahw_usage
|= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
;
267 if (vk_usage
& VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
)
268 ahw_usage
|= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
;
270 if (vk_usage
& VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
)
271 ahw_usage
|= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT
;
273 if (vk_create
& VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
)
274 ahw_usage
|= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP
;
276 if (vk_create
& VK_IMAGE_CREATE_PROTECTED_BIT
)
277 ahw_usage
|= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT
;
279 /* No usage bits set - set at least one GPU usage. */
281 ahw_usage
= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE
;
287 * Called from anv_AllocateMemory when import AHardwareBuffer.
290 anv_import_ahw_memory(VkDevice device_h
,
291 struct anv_device_memory
*mem
,
292 const VkImportAndroidHardwareBufferInfoANDROID
*info
)
294 #if ANDROID_API_LEVEL >= 26
295 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
297 /* Import from AHardwareBuffer to anv_device_memory. */
298 const native_handle_t
*handle
=
299 AHardwareBuffer_getNativeHandle(info
->buffer
);
301 /* NOTE - We support buffers with only one handle but do not error on
302 * multiple handle case. Reason is that we want to support YUV formats
303 * where we have many logical planes but they all point to the same
304 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
306 int dma_buf
= (handle
&& handle
->numFds
) ? handle
->data
[0] : -1;
308 return VK_ERROR_INVALID_EXTERNAL_HANDLE
;
310 uint64_t bo_flags
= ANV_BO_EXTERNAL
;
311 if (device
->instance
->physicalDevice
.supports_48bit_addresses
)
312 bo_flags
|= EXEC_OBJECT_SUPPORTS_48B_ADDRESS
;
313 if (device
->instance
->physicalDevice
.use_softpin
)
314 bo_flags
|= EXEC_OBJECT_PINNED
;
316 VkResult result
= anv_bo_cache_import(device
, &device
->bo_cache
,
317 dma_buf
, bo_flags
, &mem
->bo
);
320 /* "If the vkAllocateMemory command succeeds, the implementation must
321 * acquire a reference to the imported hardware buffer, which it must
322 * release when the device memory object is freed. If the command fails,
323 * the implementation must not retain a reference."
325 AHardwareBuffer_acquire(info
->buffer
);
326 mem
->ahw
= info
->buffer
;
330 return VK_ERROR_EXTENSION_NOT_PRESENT
;
335 anv_create_ahw_memory(VkDevice device_h
,
336 struct anv_device_memory
*mem
,
337 const VkMemoryAllocateInfo
*pAllocateInfo
)
339 #if ANDROID_API_LEVEL >= 26
340 ANV_FROM_HANDLE(anv_device
, dev
, device_h
);
342 const VkMemoryDedicatedAllocateInfo
*dedicated_info
=
343 vk_find_struct_const(pAllocateInfo
->pNext
,
344 MEMORY_DEDICATED_ALLOCATE_INFO
);
352 /* If caller passed dedicated information. */
353 if (dedicated_info
&& dedicated_info
->image
) {
354 ANV_FROM_HANDLE(anv_image
, image
, dedicated_info
->image
);
355 w
= image
->extent
.width
;
356 h
= image
->extent
.height
;
357 layers
= image
->array_size
;
358 format
= android_format_from_vk(image
->vk_format
);
359 usage
= anv_ahw_usage_from_vk_usage(image
->create_flags
, image
->usage
);
360 } else if (dedicated_info
&& dedicated_info
->buffer
) {
361 ANV_FROM_HANDLE(anv_buffer
, buffer
, dedicated_info
->buffer
);
363 format
= AHARDWAREBUFFER_FORMAT_BLOB
;
364 usage
= AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN
|
365 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN
;
367 w
= pAllocateInfo
->allocationSize
;
368 format
= AHARDWAREBUFFER_FORMAT_BLOB
;
369 usage
= AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN
|
370 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN
;
373 struct AHardwareBuffer
*ahw
= NULL
;
374 struct AHardwareBuffer_Desc desc
= {
382 if (AHardwareBuffer_allocate(&desc
, &ahw
) != 0)
383 return VK_ERROR_OUT_OF_HOST_MEMORY
;
388 return VK_ERROR_EXTENSION_NOT_PRESENT
;
394 anv_image_from_external(
396 const VkImageCreateInfo
*base_info
,
397 const struct VkExternalMemoryImageCreateInfo
*create_info
,
398 const VkAllocationCallbacks
*alloc
,
399 VkImage
*out_image_h
)
401 #if ANDROID_API_LEVEL >= 26
402 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
404 const struct VkExternalFormatANDROID
*ext_info
=
405 vk_find_struct_const(base_info
->pNext
, EXTERNAL_FORMAT_ANDROID
);
407 if (ext_info
&& ext_info
->externalFormat
!= 0) {
408 assert(base_info
->format
== VK_FORMAT_UNDEFINED
);
409 assert(base_info
->imageType
== VK_IMAGE_TYPE_2D
);
410 assert(base_info
->usage
== VK_IMAGE_USAGE_SAMPLED_BIT
);
411 assert(base_info
->tiling
== VK_IMAGE_TILING_OPTIMAL
);
414 struct anv_image_create_info anv_info
= {
415 .vk_info
= base_info
,
416 .isl_extra_usage_flags
= ISL_SURF_USAGE_DISABLE_AUX_BIT
,
417 .external_format
= true,
421 VkResult result
= anv_image_create(device_h
, &anv_info
, alloc
, &image_h
);
422 if (result
!= VK_SUCCESS
)
425 *out_image_h
= image_h
;
429 return VK_ERROR_EXTENSION_NOT_PRESENT
;
435 anv_image_from_gralloc(VkDevice device_h
,
436 const VkImageCreateInfo
*base_info
,
437 const VkNativeBufferANDROID
*gralloc_info
,
438 const VkAllocationCallbacks
*alloc
,
439 VkImage
*out_image_h
)
442 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
443 VkImage image_h
= VK_NULL_HANDLE
;
444 struct anv_image
*image
= NULL
;
445 struct anv_bo
*bo
= NULL
;
448 struct anv_image_create_info anv_info
= {
449 .vk_info
= base_info
,
450 .isl_extra_usage_flags
= ISL_SURF_USAGE_DISABLE_AUX_BIT
,
453 if (gralloc_info
->handle
->numFds
!= 1) {
454 return vk_errorf(device
->instance
, device
,
455 VK_ERROR_INVALID_EXTERNAL_HANDLE
,
456 "VkNativeBufferANDROID::handle::numFds is %d, "
457 "expected 1", gralloc_info
->handle
->numFds
);
460 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
461 * must exceed that of the gralloc handle, and we do not own the gralloc
464 int dma_buf
= gralloc_info
->handle
->data
[0];
466 uint64_t bo_flags
= ANV_BO_EXTERNAL
;
467 if (device
->instance
->physicalDevice
.supports_48bit_addresses
)
468 bo_flags
|= EXEC_OBJECT_SUPPORTS_48B_ADDRESS
;
469 if (device
->instance
->physicalDevice
.use_softpin
)
470 bo_flags
|= EXEC_OBJECT_PINNED
;
472 result
= anv_bo_cache_import(device
, &device
->bo_cache
, dma_buf
, bo_flags
, &bo
);
473 if (result
!= VK_SUCCESS
) {
474 return vk_errorf(device
->instance
, device
, result
,
475 "failed to import dma-buf from VkNativeBufferANDROID");
478 int i915_tiling
= anv_gem_get_tiling(device
, bo
->gem_handle
);
479 switch (i915_tiling
) {
480 case I915_TILING_NONE
:
481 anv_info
.isl_tiling_flags
= ISL_TILING_LINEAR_BIT
;
484 anv_info
.isl_tiling_flags
= ISL_TILING_X_BIT
;
487 anv_info
.isl_tiling_flags
= ISL_TILING_Y0_BIT
;
490 result
= vk_errorf(device
->instance
, device
,
491 VK_ERROR_INVALID_EXTERNAL_HANDLE
,
492 "DRM_IOCTL_I915_GEM_GET_TILING failed for "
493 "VkNativeBufferANDROID");
496 result
= vk_errorf(device
->instance
, device
,
497 VK_ERROR_INVALID_EXTERNAL_HANDLE
,
498 "DRM_IOCTL_I915_GEM_GET_TILING returned unknown "
499 "tiling %d for VkNativeBufferANDROID", i915_tiling
);
503 enum isl_format format
= anv_get_isl_format(&device
->info
,
505 VK_IMAGE_ASPECT_COLOR_BIT
,
507 assert(format
!= ISL_FORMAT_UNSUPPORTED
);
509 anv_info
.stride
= gralloc_info
->stride
*
510 (isl_format_get_layout(format
)->bpb
/ 8);
512 result
= anv_image_create(device_h
, &anv_info
, alloc
, &image_h
);
513 image
= anv_image_from_handle(image_h
);
514 if (result
!= VK_SUCCESS
)
517 if (bo
->size
< image
->size
) {
518 result
= vk_errorf(device
->instance
, device
,
519 VK_ERROR_INVALID_EXTERNAL_HANDLE
,
520 "dma-buf from VkNativeBufferANDROID is too small for "
521 "VkImage: %"PRIu64
"B < %"PRIu64
"B",
522 bo
->size
, image
->size
);
526 assert(image
->n_planes
== 1);
527 assert(image
->planes
[0].address
.offset
== 0);
529 image
->planes
[0].address
.bo
= bo
;
530 image
->planes
[0].bo_is_owned
= true;
532 /* We need to set the WRITE flag on window system buffers so that GEM will
533 * know we're writing to them and synchronize uses on other rings (for
534 * example, if the display server uses the blitter ring).
536 * If this function fails and if the imported bo was resident in the cache,
537 * we should avoid updating the bo's flags. Therefore, we defer updating
538 * the flags until success is certain.
541 bo
->flags
&= ~EXEC_OBJECT_ASYNC
;
542 bo
->flags
|= EXEC_OBJECT_WRITE
;
544 /* Don't clobber the out-parameter until success is certain. */
545 *out_image_h
= image_h
;
550 anv_DestroyImage(device_h
, image_h
, alloc
);
553 anv_bo_cache_release(device
, &device
->bo_cache
, bo
);
559 format_supported_with_usage(VkDevice device_h
, VkFormat format
,
560 VkImageUsageFlags imageUsage
)
562 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
563 struct anv_physical_device
*phys_dev
= &device
->instance
->physicalDevice
;
564 VkPhysicalDevice phys_dev_h
= anv_physical_device_to_handle(phys_dev
);
567 const VkPhysicalDeviceImageFormatInfo2 image_format_info
= {
568 .sType
= VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2
,
570 .type
= VK_IMAGE_TYPE_2D
,
571 .tiling
= VK_IMAGE_TILING_OPTIMAL
,
575 VkImageFormatProperties2 image_format_props
= {
576 .sType
= VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2
,
579 /* Check that requested format and usage are supported. */
580 result
= anv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h
,
581 &image_format_info
, &image_format_props
);
582 if (result
!= VK_SUCCESS
) {
583 return vk_errorf(device
->instance
, device
, result
,
584 "anv_GetPhysicalDeviceImageFormatProperties2 failed "
585 "inside %s", __func__
);
592 setup_gralloc0_usage(VkFormat format
, VkImageUsageFlags imageUsage
,
595 /* WARNING: Android's libvulkan.so hardcodes the VkImageUsageFlags
596 * returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags.
597 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
599 * TODO(jessehall): I think these are right, but haven't thought hard
600 * about it. Do we need to query the driver for support of any of
603 * Any disagreement between this function and the hardcoded
604 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
605 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
608 if (unmask32(&imageUsage
, VK_IMAGE_USAGE_TRANSFER_DST_BIT
|
609 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
))
610 *grallocUsage
|= GRALLOC_USAGE_HW_RENDER
;
612 if (unmask32(&imageUsage
, VK_IMAGE_USAGE_TRANSFER_SRC_BIT
|
613 VK_IMAGE_USAGE_SAMPLED_BIT
|
614 VK_IMAGE_USAGE_STORAGE_BIT
|
615 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
))
616 *grallocUsage
|= GRALLOC_USAGE_HW_TEXTURE
;
618 /* All VkImageUsageFlags not explicitly checked here are unsupported for
619 * gralloc swapchains.
621 if (imageUsage
!= 0) {
622 return vk_errorf(device
->instance
, device
, VK_ERROR_FORMAT_NOT_SUPPORTED
,
623 "unsupported VkImageUsageFlags(0x%x) for gralloc "
624 "swapchain", imageUsage
);
627 /* The below formats support GRALLOC_USAGE_HW_FB (that is, display
628 * scanout). This short list of formats is univserally supported on Intel
629 * but is incomplete. The full set of supported formats is dependent on
630 * kernel and hardware.
632 * FINISHME: Advertise all display-supported formats.
635 case VK_FORMAT_B8G8R8A8_UNORM
:
636 case VK_FORMAT_B5G6R5_UNORM_PACK16
:
637 case VK_FORMAT_R8G8B8A8_UNORM
:
638 case VK_FORMAT_R8G8B8A8_SRGB
:
639 *grallocUsage
|= GRALLOC_USAGE_HW_FB
|
640 GRALLOC_USAGE_HW_COMPOSER
|
641 GRALLOC_USAGE_EXTERNAL_DISP
;
644 intel_logw("%s: unsupported format=%d", __func__
, format
);
647 if (*grallocUsage
== 0)
648 return VK_ERROR_FORMAT_NOT_SUPPORTED
;
654 #if ANDROID_API_LEVEL >= 26
655 VkResult
anv_GetSwapchainGrallocUsage2ANDROID(
658 VkImageUsageFlags imageUsage
,
659 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage
,
660 uint64_t* grallocConsumerUsage
,
661 uint64_t* grallocProducerUsage
)
663 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
666 *grallocConsumerUsage
= 0;
667 *grallocProducerUsage
= 0;
668 intel_logd("%s: format=%d, usage=0x%x", __func__
, format
, imageUsage
);
670 result
= format_supported_with_usage(device_h
, format
, imageUsage
);
671 if (result
!= VK_SUCCESS
)
674 int32_t grallocUsage
= 0;
675 result
= setup_gralloc0_usage(format
, imageUsage
, &grallocUsage
);
676 if (result
!= VK_SUCCESS
)
679 android_convertGralloc0To1Usage(grallocUsage
, grallocProducerUsage
,
680 grallocConsumerUsage
);
686 VkResult
anv_GetSwapchainGrallocUsageANDROID(
689 VkImageUsageFlags imageUsage
,
692 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
693 struct anv_physical_device
*phys_dev
= &device
->instance
->physicalDevice
;
694 VkPhysicalDevice phys_dev_h
= anv_physical_device_to_handle(phys_dev
);
698 intel_logd("%s: format=%d, usage=0x%x", __func__
, format
, imageUsage
);
700 result
= format_supported_with_usage(device_h
, format
, imageUsage
);
701 if (result
!= VK_SUCCESS
)
704 return setup_gralloc0_usage(format
, imageUsage
, grallocUsage
);
708 anv_AcquireImageANDROID(
712 VkSemaphore semaphore_h
,
715 ANV_FROM_HANDLE(anv_device
, device
, device_h
);
716 VkResult result
= VK_SUCCESS
;
718 if (nativeFenceFd
!= -1) {
719 /* As a simple, firstpass implementation of VK_ANDROID_native_buffer, we
720 * block on the nativeFenceFd. This may introduce latency and is
721 * definitiely inefficient, yet it's correct.
723 * FINISHME(chadv): Import the nativeFenceFd into the VkSemaphore and
726 if (sync_wait(nativeFenceFd
, /*timeout*/ -1) < 0) {
727 result
= vk_errorf(device
->instance
, device
, VK_ERROR_DEVICE_LOST
,
728 "%s: failed to wait on nativeFenceFd=%d",
729 __func__
, nativeFenceFd
);
732 /* From VK_ANDROID_native_buffer's pseudo spec
733 * (https://source.android.com/devices/graphics/implement-vulkan):
735 * The driver takes ownership of the fence fd and is responsible for
736 * closing it [...] even if vkAcquireImageANDROID fails and returns
739 close(nativeFenceFd
);
741 if (result
!= VK_SUCCESS
)
745 if (semaphore_h
|| fence_h
) {
746 /* Thanks to implicit sync, the image is ready for GPU access. But we
747 * must still put the semaphore into the "submit" state; otherwise the
748 * client may get unexpected behavior if the client later uses it as
751 * Because we blocked above on the nativeFenceFd, the image is also
752 * ready for foreign-device access (including CPU access). But we must
753 * still signal the fence; otherwise the client may get unexpected
754 * behavior if the client later waits on it.
756 * For some values of anv_semaphore_type, we must submit the semaphore
757 * to execbuf in order to signal it. Likewise for anv_fence_type.
758 * Instead of open-coding here the signal operation for each
759 * anv_semaphore_type and anv_fence_type, we piggy-back on
762 const VkSubmitInfo submit
= {
763 .sType
= VK_STRUCTURE_TYPE_SUBMIT_INFO
,
764 .waitSemaphoreCount
= 0,
765 .commandBufferCount
= 0,
766 .signalSemaphoreCount
= (semaphore_h
? 1 : 0),
767 .pSignalSemaphores
= &semaphore_h
,
770 result
= anv_QueueSubmit(anv_queue_to_handle(&device
->queue
), 1,
772 if (result
!= VK_SUCCESS
) {
773 return vk_errorf(device
->instance
, device
, result
,
774 "anv_QueueSubmit failed inside %s", __func__
);
782 anv_QueueSignalReleaseImageANDROID(
784 uint32_t waitSemaphoreCount
,
785 const VkSemaphore
* pWaitSemaphores
,
791 if (waitSemaphoreCount
== 0)
794 result
= anv_QueueSubmit(queue
, 1,
796 .sType
= VK_STRUCTURE_TYPE_SUBMIT_INFO
,
797 .waitSemaphoreCount
= 1,
798 .pWaitSemaphores
= pWaitSemaphores
,
800 (VkFence
) VK_NULL_HANDLE
);
801 if (result
!= VK_SUCCESS
)
805 if (pNativeFenceFd
) {
806 /* We can rely implicit on sync because above we submitted all
807 * semaphores to the queue.
809 *pNativeFenceFd
= -1;