anv: Add a ralloc context to anv_pipeline
[mesa.git] / src / intel / vulkan / anv_android.c
1 /*
2 * Copyright © 2017, Google Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <hardware/gralloc.h>
25
26 #if ANDROID_API_LEVEL >= 26
27 #include <hardware/gralloc1.h>
28 #include <grallocusage/GrallocUsageConversion.h>
29 #endif
30
31 #include <hardware/hardware.h>
32 #include <hardware/hwvulkan.h>
33 #include <vulkan/vk_android_native_buffer.h>
34 #include <vulkan/vk_icd.h>
35 #include <sync/sync.h>
36
37 #include "anv_private.h"
38 #include "vk_format_info.h"
39 #include "vk_util.h"
40
41 static int anv_hal_open(const struct hw_module_t* mod, const char* id, struct hw_device_t** dev);
42 static int anv_hal_close(struct hw_device_t *dev);
43
44 static void UNUSED
45 static_asserts(void)
46 {
47 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
48 }
49
50 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
51 .common = {
52 .tag = HARDWARE_MODULE_TAG,
53 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
54 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
55 .id = HWVULKAN_HARDWARE_MODULE_ID,
56 .name = "Intel Vulkan HAL",
57 .author = "Intel",
58 .methods = &(hw_module_methods_t) {
59 .open = anv_hal_open,
60 },
61 },
62 };
63
64 /* If any bits in test_mask are set, then unset them and return true. */
65 static inline bool
66 unmask32(uint32_t *inout_mask, uint32_t test_mask)
67 {
68 uint32_t orig_mask = *inout_mask;
69 *inout_mask &= ~test_mask;
70 return *inout_mask != orig_mask;
71 }
72
73 static int
74 anv_hal_open(const struct hw_module_t* mod, const char* id,
75 struct hw_device_t** dev)
76 {
77 assert(mod == &HAL_MODULE_INFO_SYM.common);
78 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
79
80 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
81 if (!hal_dev)
82 return -1;
83
84 *hal_dev = (hwvulkan_device_t) {
85 .common = {
86 .tag = HARDWARE_DEVICE_TAG,
87 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
88 .module = &HAL_MODULE_INFO_SYM.common,
89 .close = anv_hal_close,
90 },
91 .EnumerateInstanceExtensionProperties = anv_EnumerateInstanceExtensionProperties,
92 .CreateInstance = anv_CreateInstance,
93 .GetInstanceProcAddr = anv_GetInstanceProcAddr,
94 };
95
96 *dev = &hal_dev->common;
97 return 0;
98 }
99
100 static int
101 anv_hal_close(struct hw_device_t *dev)
102 {
103 /* hwvulkan.h claims that hw_device_t::close() is never called. */
104 return -1;
105 }
106
107 #if ANDROID_API_LEVEL >= 26
108 static VkResult
109 get_ahw_buffer_format_properties(
110 VkDevice device_h,
111 const struct AHardwareBuffer *buffer,
112 VkAndroidHardwareBufferFormatPropertiesANDROID *pProperties)
113 {
114 ANV_FROM_HANDLE(anv_device, device, device_h);
115
116 /* Get a description of buffer contents . */
117 AHardwareBuffer_Desc desc;
118 AHardwareBuffer_describe(buffer, &desc);
119
120 /* Verify description. */
121 uint64_t gpu_usage =
122 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
123 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
124 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
125
126 /* "Buffer must be a valid Android hardware buffer object with at least
127 * one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags."
128 */
129 if (!(desc.usage & (gpu_usage)))
130 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
131
132 /* Fill properties fields based on description. */
133 VkAndroidHardwareBufferFormatPropertiesANDROID *p = pProperties;
134
135 p->format = vk_format_from_android(desc.format, desc.usage);
136
137 const struct anv_format *anv_format = anv_get_format(p->format);
138 p->externalFormat = (uint64_t) (uintptr_t) anv_format;
139
140 /* Default to OPTIMAL tiling but set to linear in case
141 * of AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER usage.
142 */
143 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
144
145 if (desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER)
146 tiling = VK_IMAGE_TILING_LINEAR;
147
148 p->formatFeatures =
149 anv_get_image_format_features(&device->info, p->format, anv_format,
150 tiling);
151
152 /* "Images can be created with an external format even if the Android hardware
153 * buffer has a format which has an equivalent Vulkan format to enable
154 * consistent handling of images from sources that might use either category
155 * of format. However, all images created with an external format are subject
156 * to the valid usage requirements associated with external formats, even if
157 * the Android hardware buffer’s format has a Vulkan equivalent."
158 *
159 * "The formatFeatures member *must* include
160 * VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT and at least one of
161 * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
162 * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"
163 */
164 p->formatFeatures |=
165 VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
166
167 /* "Implementations may not always be able to determine the color model,
168 * numerical range, or chroma offsets of the image contents, so the values
169 * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
170 * Applications should treat these values as sensible defaults to use in
171 * the absence of more reliable information obtained through some other
172 * means."
173 */
174 p->samplerYcbcrConversionComponents.r = VK_COMPONENT_SWIZZLE_IDENTITY;
175 p->samplerYcbcrConversionComponents.g = VK_COMPONENT_SWIZZLE_IDENTITY;
176 p->samplerYcbcrConversionComponents.b = VK_COMPONENT_SWIZZLE_IDENTITY;
177 p->samplerYcbcrConversionComponents.a = VK_COMPONENT_SWIZZLE_IDENTITY;
178
179 p->suggestedYcbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601;
180 p->suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
181
182 p->suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
183 p->suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
184
185 return VK_SUCCESS;
186 }
187
188 VkResult
189 anv_GetAndroidHardwareBufferPropertiesANDROID(
190 VkDevice device_h,
191 const struct AHardwareBuffer *buffer,
192 VkAndroidHardwareBufferPropertiesANDROID *pProperties)
193 {
194 ANV_FROM_HANDLE(anv_device, dev, device_h);
195 struct anv_physical_device *pdevice = &dev->instance->physicalDevice;
196
197 VkAndroidHardwareBufferFormatPropertiesANDROID *format_prop =
198 vk_find_struct(pProperties->pNext,
199 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
200
201 /* Fill format properties of an Android hardware buffer. */
202 if (format_prop)
203 get_ahw_buffer_format_properties(device_h, buffer, format_prop);
204
205 /* NOTE - We support buffers with only one handle but do not error on
206 * multiple handle case. Reason is that we want to support YUV formats
207 * where we have many logical planes but they all point to the same
208 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
209 */
210 const native_handle_t *handle =
211 AHardwareBuffer_getNativeHandle(buffer);
212 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
213 if (dma_buf < 0)
214 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
215
216 /* All memory types. */
217 uint32_t memory_types = (1ull << pdevice->memory.type_count) - 1;
218
219 pProperties->allocationSize = lseek(dma_buf, 0, SEEK_END);
220 pProperties->memoryTypeBits = memory_types;
221
222 return VK_SUCCESS;
223 }
224
225 VkResult
226 anv_GetMemoryAndroidHardwareBufferANDROID(
227 VkDevice device_h,
228 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
229 struct AHardwareBuffer **pBuffer)
230 {
231 ANV_FROM_HANDLE(anv_device_memory, mem, pInfo->memory);
232
233 /* Some quotes from Vulkan spec:
234 *
235 * "If the device memory was created by importing an Android hardware
236 * buffer, vkGetMemoryAndroidHardwareBufferANDROID must return that same
237 * Android hardware buffer object."
238 *
239 * "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must
240 * have been included in VkExportMemoryAllocateInfo::handleTypes when
241 * memory was created."
242 */
243 if (mem->ahw) {
244 *pBuffer = mem->ahw;
245 /* Increase refcount. */
246 AHardwareBuffer_acquire(mem->ahw);
247 return VK_SUCCESS;
248 }
249
250 return VK_ERROR_OUT_OF_HOST_MEMORY;
251 }
252
253 #endif
254
255 /* Construct ahw usage mask from image usage bits, see
256 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
257 */
258 uint64_t
259 anv_ahw_usage_from_vk_usage(const VkImageCreateFlags vk_create,
260 const VkImageUsageFlags vk_usage)
261 {
262 uint64_t ahw_usage = 0;
263 #if ANDROID_API_LEVEL >= 26
264 if (vk_usage & VK_IMAGE_USAGE_SAMPLED_BIT)
265 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
266
267 if (vk_usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)
268 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
269
270 if (vk_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
271 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
272
273 if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
274 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
275
276 if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT)
277 ahw_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
278
279 /* No usage bits set - set at least one GPU usage. */
280 if (ahw_usage == 0)
281 ahw_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
282 #endif
283 return ahw_usage;
284 }
285
286 /*
287 * Called from anv_AllocateMemory when import AHardwareBuffer.
288 */
289 VkResult
290 anv_import_ahw_memory(VkDevice device_h,
291 struct anv_device_memory *mem,
292 const VkImportAndroidHardwareBufferInfoANDROID *info)
293 {
294 #if ANDROID_API_LEVEL >= 26
295 ANV_FROM_HANDLE(anv_device, device, device_h);
296
297 /* Import from AHardwareBuffer to anv_device_memory. */
298 const native_handle_t *handle =
299 AHardwareBuffer_getNativeHandle(info->buffer);
300
301 /* NOTE - We support buffers with only one handle but do not error on
302 * multiple handle case. Reason is that we want to support YUV formats
303 * where we have many logical planes but they all point to the same
304 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
305 */
306 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
307 if (dma_buf < 0)
308 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
309
310 uint64_t bo_flags = ANV_BO_EXTERNAL;
311 if (device->instance->physicalDevice.supports_48bit_addresses)
312 bo_flags |= EXEC_OBJECT_SUPPORTS_48B_ADDRESS;
313 if (device->instance->physicalDevice.use_softpin)
314 bo_flags |= EXEC_OBJECT_PINNED;
315
316 VkResult result = anv_bo_cache_import(device, &device->bo_cache,
317 dma_buf, bo_flags, &mem->bo);
318 assert(VK_SUCCESS);
319
320 /* "If the vkAllocateMemory command succeeds, the implementation must
321 * acquire a reference to the imported hardware buffer, which it must
322 * release when the device memory object is freed. If the command fails,
323 * the implementation must not retain a reference."
324 */
325 AHardwareBuffer_acquire(info->buffer);
326 mem->ahw = info->buffer;
327
328 return VK_SUCCESS;
329 #else
330 return VK_ERROR_EXTENSION_NOT_PRESENT;
331 #endif
332 }
333
334 VkResult
335 anv_create_ahw_memory(VkDevice device_h,
336 struct anv_device_memory *mem,
337 const VkMemoryAllocateInfo *pAllocateInfo)
338 {
339 #if ANDROID_API_LEVEL >= 26
340 ANV_FROM_HANDLE(anv_device, dev, device_h);
341
342 const VkMemoryDedicatedAllocateInfo *dedicated_info =
343 vk_find_struct_const(pAllocateInfo->pNext,
344 MEMORY_DEDICATED_ALLOCATE_INFO);
345
346 uint32_t w = 0;
347 uint32_t h = 1;
348 uint32_t layers = 1;
349 uint32_t format = 0;
350 uint64_t usage = 0;
351
352 /* If caller passed dedicated information. */
353 if (dedicated_info && dedicated_info->image) {
354 ANV_FROM_HANDLE(anv_image, image, dedicated_info->image);
355 w = image->extent.width;
356 h = image->extent.height;
357 layers = image->array_size;
358 format = android_format_from_vk(image->vk_format);
359 usage = anv_ahw_usage_from_vk_usage(image->create_flags, image->usage);
360 } else if (dedicated_info && dedicated_info->buffer) {
361 ANV_FROM_HANDLE(anv_buffer, buffer, dedicated_info->buffer);
362 w = buffer->size;
363 format = AHARDWAREBUFFER_FORMAT_BLOB;
364 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
365 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
366 } else {
367 w = pAllocateInfo->allocationSize;
368 format = AHARDWAREBUFFER_FORMAT_BLOB;
369 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
370 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
371 }
372
373 struct AHardwareBuffer *ahw = NULL;
374 struct AHardwareBuffer_Desc desc = {
375 .width = w,
376 .height = h,
377 .layers = layers,
378 .format = format,
379 .usage = usage,
380 };
381
382 if (AHardwareBuffer_allocate(&desc, &ahw) != 0)
383 return VK_ERROR_OUT_OF_HOST_MEMORY;
384
385 mem->ahw = ahw;
386 return VK_SUCCESS;
387 #else
388 return VK_ERROR_EXTENSION_NOT_PRESENT;
389 #endif
390
391 }
392
393 VkResult
394 anv_image_from_external(
395 VkDevice device_h,
396 const VkImageCreateInfo *base_info,
397 const struct VkExternalMemoryImageCreateInfo *create_info,
398 const VkAllocationCallbacks *alloc,
399 VkImage *out_image_h)
400 {
401 #if ANDROID_API_LEVEL >= 26
402 ANV_FROM_HANDLE(anv_device, device, device_h);
403
404 const struct VkExternalFormatANDROID *ext_info =
405 vk_find_struct_const(base_info->pNext, EXTERNAL_FORMAT_ANDROID);
406
407 if (ext_info && ext_info->externalFormat != 0) {
408 assert(base_info->format == VK_FORMAT_UNDEFINED);
409 assert(base_info->imageType == VK_IMAGE_TYPE_2D);
410 assert(base_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
411 assert(base_info->tiling == VK_IMAGE_TILING_OPTIMAL);
412 }
413
414 struct anv_image_create_info anv_info = {
415 .vk_info = base_info,
416 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
417 .external_format = true,
418 };
419
420 VkImage image_h;
421 VkResult result = anv_image_create(device_h, &anv_info, alloc, &image_h);
422 if (result != VK_SUCCESS)
423 return result;
424
425 *out_image_h = image_h;
426
427 return VK_SUCCESS;
428 #else
429 return VK_ERROR_EXTENSION_NOT_PRESENT;
430 #endif
431 }
432
433
434 VkResult
435 anv_image_from_gralloc(VkDevice device_h,
436 const VkImageCreateInfo *base_info,
437 const VkNativeBufferANDROID *gralloc_info,
438 const VkAllocationCallbacks *alloc,
439 VkImage *out_image_h)
440
441 {
442 ANV_FROM_HANDLE(anv_device, device, device_h);
443 VkImage image_h = VK_NULL_HANDLE;
444 struct anv_image *image = NULL;
445 struct anv_bo *bo = NULL;
446 VkResult result;
447
448 struct anv_image_create_info anv_info = {
449 .vk_info = base_info,
450 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
451 };
452
453 if (gralloc_info->handle->numFds != 1) {
454 return vk_errorf(device->instance, device,
455 VK_ERROR_INVALID_EXTERNAL_HANDLE,
456 "VkNativeBufferANDROID::handle::numFds is %d, "
457 "expected 1", gralloc_info->handle->numFds);
458 }
459
460 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
461 * must exceed that of the gralloc handle, and we do not own the gralloc
462 * handle.
463 */
464 int dma_buf = gralloc_info->handle->data[0];
465
466 uint64_t bo_flags = ANV_BO_EXTERNAL;
467 if (device->instance->physicalDevice.supports_48bit_addresses)
468 bo_flags |= EXEC_OBJECT_SUPPORTS_48B_ADDRESS;
469 if (device->instance->physicalDevice.use_softpin)
470 bo_flags |= EXEC_OBJECT_PINNED;
471
472 result = anv_bo_cache_import(device, &device->bo_cache, dma_buf, bo_flags, &bo);
473 if (result != VK_SUCCESS) {
474 return vk_errorf(device->instance, device, result,
475 "failed to import dma-buf from VkNativeBufferANDROID");
476 }
477
478 int i915_tiling = anv_gem_get_tiling(device, bo->gem_handle);
479 switch (i915_tiling) {
480 case I915_TILING_NONE:
481 anv_info.isl_tiling_flags = ISL_TILING_LINEAR_BIT;
482 break;
483 case I915_TILING_X:
484 anv_info.isl_tiling_flags = ISL_TILING_X_BIT;
485 break;
486 case I915_TILING_Y:
487 anv_info.isl_tiling_flags = ISL_TILING_Y0_BIT;
488 break;
489 case -1:
490 result = vk_errorf(device->instance, device,
491 VK_ERROR_INVALID_EXTERNAL_HANDLE,
492 "DRM_IOCTL_I915_GEM_GET_TILING failed for "
493 "VkNativeBufferANDROID");
494 goto fail_tiling;
495 default:
496 result = vk_errorf(device->instance, device,
497 VK_ERROR_INVALID_EXTERNAL_HANDLE,
498 "DRM_IOCTL_I915_GEM_GET_TILING returned unknown "
499 "tiling %d for VkNativeBufferANDROID", i915_tiling);
500 goto fail_tiling;
501 }
502
503 enum isl_format format = anv_get_isl_format(&device->info,
504 base_info->format,
505 VK_IMAGE_ASPECT_COLOR_BIT,
506 base_info->tiling);
507 assert(format != ISL_FORMAT_UNSUPPORTED);
508
509 anv_info.stride = gralloc_info->stride *
510 (isl_format_get_layout(format)->bpb / 8);
511
512 result = anv_image_create(device_h, &anv_info, alloc, &image_h);
513 image = anv_image_from_handle(image_h);
514 if (result != VK_SUCCESS)
515 goto fail_create;
516
517 if (bo->size < image->size) {
518 result = vk_errorf(device->instance, device,
519 VK_ERROR_INVALID_EXTERNAL_HANDLE,
520 "dma-buf from VkNativeBufferANDROID is too small for "
521 "VkImage: %"PRIu64"B < %"PRIu64"B",
522 bo->size, image->size);
523 goto fail_size;
524 }
525
526 assert(image->n_planes == 1);
527 assert(image->planes[0].address.offset == 0);
528
529 image->planes[0].address.bo = bo;
530 image->planes[0].bo_is_owned = true;
531
532 /* We need to set the WRITE flag on window system buffers so that GEM will
533 * know we're writing to them and synchronize uses on other rings (for
534 * example, if the display server uses the blitter ring).
535 *
536 * If this function fails and if the imported bo was resident in the cache,
537 * we should avoid updating the bo's flags. Therefore, we defer updating
538 * the flags until success is certain.
539 *
540 */
541 bo->flags &= ~EXEC_OBJECT_ASYNC;
542 bo->flags |= EXEC_OBJECT_WRITE;
543
544 /* Don't clobber the out-parameter until success is certain. */
545 *out_image_h = image_h;
546
547 return VK_SUCCESS;
548
549 fail_size:
550 anv_DestroyImage(device_h, image_h, alloc);
551 fail_create:
552 fail_tiling:
553 anv_bo_cache_release(device, &device->bo_cache, bo);
554
555 return result;
556 }
557
558 VkResult
559 format_supported_with_usage(VkDevice device_h, VkFormat format,
560 VkImageUsageFlags imageUsage)
561 {
562 ANV_FROM_HANDLE(anv_device, device, device_h);
563 struct anv_physical_device *phys_dev = &device->instance->physicalDevice;
564 VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(phys_dev);
565 VkResult result;
566
567 const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
568 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
569 .format = format,
570 .type = VK_IMAGE_TYPE_2D,
571 .tiling = VK_IMAGE_TILING_OPTIMAL,
572 .usage = imageUsage,
573 };
574
575 VkImageFormatProperties2 image_format_props = {
576 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
577 };
578
579 /* Check that requested format and usage are supported. */
580 result = anv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h,
581 &image_format_info, &image_format_props);
582 if (result != VK_SUCCESS) {
583 return vk_errorf(device->instance, device, result,
584 "anv_GetPhysicalDeviceImageFormatProperties2 failed "
585 "inside %s", __func__);
586 }
587 return VK_SUCCESS;
588 }
589
590
591 static VkResult
592 setup_gralloc0_usage(VkFormat format, VkImageUsageFlags imageUsage,
593 int *grallocUsage)
594 {
595 /* WARNING: Android's libvulkan.so hardcodes the VkImageUsageFlags
596 * returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags.
597 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
598 *
599 * TODO(jessehall): I think these are right, but haven't thought hard
600 * about it. Do we need to query the driver for support of any of
601 * these?
602 *
603 * Any disagreement between this function and the hardcoded
604 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
605 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
606 */
607
608 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
609 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
610 *grallocUsage |= GRALLOC_USAGE_HW_RENDER;
611
612 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
613 VK_IMAGE_USAGE_SAMPLED_BIT |
614 VK_IMAGE_USAGE_STORAGE_BIT |
615 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
616 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
617
618 /* All VkImageUsageFlags not explicitly checked here are unsupported for
619 * gralloc swapchains.
620 */
621 if (imageUsage != 0) {
622 return vk_errorf(device->instance, device, VK_ERROR_FORMAT_NOT_SUPPORTED,
623 "unsupported VkImageUsageFlags(0x%x) for gralloc "
624 "swapchain", imageUsage);
625 }
626
627 /* The below formats support GRALLOC_USAGE_HW_FB (that is, display
628 * scanout). This short list of formats is univserally supported on Intel
629 * but is incomplete. The full set of supported formats is dependent on
630 * kernel and hardware.
631 *
632 * FINISHME: Advertise all display-supported formats.
633 */
634 switch (format) {
635 case VK_FORMAT_B8G8R8A8_UNORM:
636 case VK_FORMAT_B5G6R5_UNORM_PACK16:
637 case VK_FORMAT_R8G8B8A8_UNORM:
638 case VK_FORMAT_R8G8B8A8_SRGB:
639 *grallocUsage |= GRALLOC_USAGE_HW_FB |
640 GRALLOC_USAGE_HW_COMPOSER |
641 GRALLOC_USAGE_EXTERNAL_DISP;
642 break;
643 default:
644 intel_logw("%s: unsupported format=%d", __func__, format);
645 }
646
647 if (*grallocUsage == 0)
648 return VK_ERROR_FORMAT_NOT_SUPPORTED;
649
650 return VK_SUCCESS;
651 }
652
653
654 #if ANDROID_API_LEVEL >= 26
655 VkResult anv_GetSwapchainGrallocUsage2ANDROID(
656 VkDevice device_h,
657 VkFormat format,
658 VkImageUsageFlags imageUsage,
659 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
660 uint64_t* grallocConsumerUsage,
661 uint64_t* grallocProducerUsage)
662 {
663 ANV_FROM_HANDLE(anv_device, device, device_h);
664 VkResult result;
665
666 *grallocConsumerUsage = 0;
667 *grallocProducerUsage = 0;
668 intel_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
669
670 result = format_supported_with_usage(device_h, format, imageUsage);
671 if (result != VK_SUCCESS)
672 return result;
673
674 int32_t grallocUsage = 0;
675 result = setup_gralloc0_usage(format, imageUsage, &grallocUsage);
676 if (result != VK_SUCCESS)
677 return result;
678
679 android_convertGralloc0To1Usage(grallocUsage, grallocProducerUsage,
680 grallocConsumerUsage);
681
682 return VK_SUCCESS;
683 }
684 #endif
685
686 VkResult anv_GetSwapchainGrallocUsageANDROID(
687 VkDevice device_h,
688 VkFormat format,
689 VkImageUsageFlags imageUsage,
690 int* grallocUsage)
691 {
692 ANV_FROM_HANDLE(anv_device, device, device_h);
693 struct anv_physical_device *phys_dev = &device->instance->physicalDevice;
694 VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(phys_dev);
695 VkResult result;
696
697 *grallocUsage = 0;
698 intel_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
699
700 result = format_supported_with_usage(device_h, format, imageUsage);
701 if (result != VK_SUCCESS)
702 return result;
703
704 return setup_gralloc0_usage(format, imageUsage, grallocUsage);
705 }
706
707 VkResult
708 anv_AcquireImageANDROID(
709 VkDevice device_h,
710 VkImage image_h,
711 int nativeFenceFd,
712 VkSemaphore semaphore_h,
713 VkFence fence_h)
714 {
715 ANV_FROM_HANDLE(anv_device, device, device_h);
716 VkResult result = VK_SUCCESS;
717
718 if (nativeFenceFd != -1) {
719 /* As a simple, firstpass implementation of VK_ANDROID_native_buffer, we
720 * block on the nativeFenceFd. This may introduce latency and is
721 * definitiely inefficient, yet it's correct.
722 *
723 * FINISHME(chadv): Import the nativeFenceFd into the VkSemaphore and
724 * VkFence.
725 */
726 if (sync_wait(nativeFenceFd, /*timeout*/ -1) < 0) {
727 result = vk_errorf(device->instance, device, VK_ERROR_DEVICE_LOST,
728 "%s: failed to wait on nativeFenceFd=%d",
729 __func__, nativeFenceFd);
730 }
731
732 /* From VK_ANDROID_native_buffer's pseudo spec
733 * (https://source.android.com/devices/graphics/implement-vulkan):
734 *
735 * The driver takes ownership of the fence fd and is responsible for
736 * closing it [...] even if vkAcquireImageANDROID fails and returns
737 * an error.
738 */
739 close(nativeFenceFd);
740
741 if (result != VK_SUCCESS)
742 return result;
743 }
744
745 if (semaphore_h || fence_h) {
746 /* Thanks to implicit sync, the image is ready for GPU access. But we
747 * must still put the semaphore into the "submit" state; otherwise the
748 * client may get unexpected behavior if the client later uses it as
749 * a wait semaphore.
750 *
751 * Because we blocked above on the nativeFenceFd, the image is also
752 * ready for foreign-device access (including CPU access). But we must
753 * still signal the fence; otherwise the client may get unexpected
754 * behavior if the client later waits on it.
755 *
756 * For some values of anv_semaphore_type, we must submit the semaphore
757 * to execbuf in order to signal it. Likewise for anv_fence_type.
758 * Instead of open-coding here the signal operation for each
759 * anv_semaphore_type and anv_fence_type, we piggy-back on
760 * vkQueueSubmit.
761 */
762 const VkSubmitInfo submit = {
763 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
764 .waitSemaphoreCount = 0,
765 .commandBufferCount = 0,
766 .signalSemaphoreCount = (semaphore_h ? 1 : 0),
767 .pSignalSemaphores = &semaphore_h,
768 };
769
770 result = anv_QueueSubmit(anv_queue_to_handle(&device->queue), 1,
771 &submit, fence_h);
772 if (result != VK_SUCCESS) {
773 return vk_errorf(device->instance, device, result,
774 "anv_QueueSubmit failed inside %s", __func__);
775 }
776 }
777
778 return VK_SUCCESS;
779 }
780
781 VkResult
782 anv_QueueSignalReleaseImageANDROID(
783 VkQueue queue,
784 uint32_t waitSemaphoreCount,
785 const VkSemaphore* pWaitSemaphores,
786 VkImage image,
787 int* pNativeFenceFd)
788 {
789 VkResult result;
790
791 if (waitSemaphoreCount == 0)
792 goto done;
793
794 result = anv_QueueSubmit(queue, 1,
795 &(VkSubmitInfo) {
796 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
797 .waitSemaphoreCount = 1,
798 .pWaitSemaphores = pWaitSemaphores,
799 },
800 (VkFence) VK_NULL_HANDLE);
801 if (result != VK_SUCCESS)
802 return result;
803
804 done:
805 if (pNativeFenceFd) {
806 /* We can rely implicit on sync because above we submitted all
807 * semaphores to the queue.
808 */
809 *pNativeFenceFd = -1;
810 }
811
812 return VK_SUCCESS;
813 }