intel/compiler: Implement TCS 8_PATCH mode and INTEL_DEBUG=tcs8
[mesa.git] / src / intel / vulkan / anv_android.c
1 /*
2 * Copyright © 2017, Google Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <hardware/gralloc.h>
25 #include <hardware/hardware.h>
26 #include <hardware/hwvulkan.h>
27 #include <vulkan/vk_android_native_buffer.h>
28 #include <vulkan/vk_icd.h>
29 #include <sync/sync.h>
30
31 #include "anv_private.h"
32 #include "vk_format_info.h"
33 #include "vk_util.h"
34
35 static int anv_hal_open(const struct hw_module_t* mod, const char* id, struct hw_device_t** dev);
36 static int anv_hal_close(struct hw_device_t *dev);
37
38 static void UNUSED
39 static_asserts(void)
40 {
41 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
42 }
43
44 PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
45 .common = {
46 .tag = HARDWARE_MODULE_TAG,
47 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
48 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
49 .id = HWVULKAN_HARDWARE_MODULE_ID,
50 .name = "Intel Vulkan HAL",
51 .author = "Intel",
52 .methods = &(hw_module_methods_t) {
53 .open = anv_hal_open,
54 },
55 },
56 };
57
58 /* If any bits in test_mask are set, then unset them and return true. */
59 static inline bool
60 unmask32(uint32_t *inout_mask, uint32_t test_mask)
61 {
62 uint32_t orig_mask = *inout_mask;
63 *inout_mask &= ~test_mask;
64 return *inout_mask != orig_mask;
65 }
66
67 static int
68 anv_hal_open(const struct hw_module_t* mod, const char* id,
69 struct hw_device_t** dev)
70 {
71 assert(mod == &HAL_MODULE_INFO_SYM.common);
72 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
73
74 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
75 if (!hal_dev)
76 return -1;
77
78 *hal_dev = (hwvulkan_device_t) {
79 .common = {
80 .tag = HARDWARE_DEVICE_TAG,
81 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
82 .module = &HAL_MODULE_INFO_SYM.common,
83 .close = anv_hal_close,
84 },
85 .EnumerateInstanceExtensionProperties = anv_EnumerateInstanceExtensionProperties,
86 .CreateInstance = anv_CreateInstance,
87 .GetInstanceProcAddr = anv_GetInstanceProcAddr,
88 };
89
90 *dev = &hal_dev->common;
91 return 0;
92 }
93
94 static int
95 anv_hal_close(struct hw_device_t *dev)
96 {
97 /* hwvulkan.h claims that hw_device_t::close() is never called. */
98 return -1;
99 }
100
101 #if ANDROID_API_LEVEL >= 26
102 static VkResult
103 get_ahw_buffer_format_properties(
104 VkDevice device_h,
105 const struct AHardwareBuffer *buffer,
106 VkAndroidHardwareBufferFormatPropertiesANDROID *pProperties)
107 {
108 ANV_FROM_HANDLE(anv_device, device, device_h);
109
110 /* Get a description of buffer contents . */
111 AHardwareBuffer_Desc desc;
112 AHardwareBuffer_describe(buffer, &desc);
113
114 /* Verify description. */
115 uint64_t gpu_usage =
116 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
117 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
118 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
119
120 /* "Buffer must be a valid Android hardware buffer object with at least
121 * one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags."
122 */
123 if (!(desc.usage & (gpu_usage)))
124 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
125
126 /* Fill properties fields based on description. */
127 VkAndroidHardwareBufferFormatPropertiesANDROID *p = pProperties;
128
129 p->format = vk_format_from_android(desc.format);
130
131 const struct anv_format *anv_format = anv_get_format(p->format);
132 p->externalFormat = (uint64_t) (uintptr_t) anv_format;
133
134 /* Default to OPTIMAL tiling but set to linear in case
135 * of AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER usage.
136 */
137 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
138
139 if (desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER)
140 tiling = VK_IMAGE_TILING_LINEAR;
141
142 p->formatFeatures =
143 anv_get_image_format_features(&device->info, p->format, anv_format,
144 tiling);
145
146 /* "Images can be created with an external format even if the Android hardware
147 * buffer has a format which has an equivalent Vulkan format to enable
148 * consistent handling of images from sources that might use either category
149 * of format. However, all images created with an external format are subject
150 * to the valid usage requirements associated with external formats, even if
151 * the Android hardware buffer’s format has a Vulkan equivalent."
152 *
153 * "The formatFeatures member *must* include
154 * VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT and at least one of
155 * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
156 * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"
157 */
158 p->formatFeatures |=
159 VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
160
161 /* "Implementations may not always be able to determine the color model,
162 * numerical range, or chroma offsets of the image contents, so the values
163 * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
164 * Applications should treat these values as sensible defaults to use in
165 * the absence of more reliable information obtained through some other
166 * means."
167 */
168 p->samplerYcbcrConversionComponents.r = VK_COMPONENT_SWIZZLE_IDENTITY;
169 p->samplerYcbcrConversionComponents.g = VK_COMPONENT_SWIZZLE_IDENTITY;
170 p->samplerYcbcrConversionComponents.b = VK_COMPONENT_SWIZZLE_IDENTITY;
171 p->samplerYcbcrConversionComponents.a = VK_COMPONENT_SWIZZLE_IDENTITY;
172
173 p->suggestedYcbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601;
174 p->suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
175
176 p->suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
177 p->suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
178
179 return VK_SUCCESS;
180 }
181
182 VkResult
183 anv_GetAndroidHardwareBufferPropertiesANDROID(
184 VkDevice device_h,
185 const struct AHardwareBuffer *buffer,
186 VkAndroidHardwareBufferPropertiesANDROID *pProperties)
187 {
188 ANV_FROM_HANDLE(anv_device, dev, device_h);
189 struct anv_physical_device *pdevice = &dev->instance->physicalDevice;
190
191 VkAndroidHardwareBufferFormatPropertiesANDROID *format_prop =
192 vk_find_struct(pProperties->pNext,
193 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
194
195 /* Fill format properties of an Android hardware buffer. */
196 if (format_prop)
197 get_ahw_buffer_format_properties(device_h, buffer, format_prop);
198
199 /* NOTE - We support buffers with only one handle but do not error on
200 * multiple handle case. Reason is that we want to support YUV formats
201 * where we have many logical planes but they all point to the same
202 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
203 */
204 const native_handle_t *handle =
205 AHardwareBuffer_getNativeHandle(buffer);
206 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
207 if (dma_buf < 0)
208 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
209
210 /* All memory types. */
211 uint32_t memory_types = (1ull << pdevice->memory.type_count) - 1;
212
213 pProperties->allocationSize = lseek(dma_buf, 0, SEEK_END);
214 pProperties->memoryTypeBits = memory_types;
215
216 return VK_SUCCESS;
217 }
218
219 VkResult
220 anv_GetMemoryAndroidHardwareBufferANDROID(
221 VkDevice device_h,
222 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
223 struct AHardwareBuffer **pBuffer)
224 {
225 ANV_FROM_HANDLE(anv_device_memory, mem, pInfo->memory);
226
227 /* Some quotes from Vulkan spec:
228 *
229 * "If the device memory was created by importing an Android hardware
230 * buffer, vkGetMemoryAndroidHardwareBufferANDROID must return that same
231 * Android hardware buffer object."
232 *
233 * "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must
234 * have been included in VkExportMemoryAllocateInfo::handleTypes when
235 * memory was created."
236 */
237 if (mem->ahw) {
238 *pBuffer = mem->ahw;
239 /* Increase refcount. */
240 AHardwareBuffer_acquire(mem->ahw);
241 return VK_SUCCESS;
242 }
243
244 return VK_ERROR_OUT_OF_HOST_MEMORY;
245 }
246
247 #endif
248
249 /* Construct ahw usage mask from image usage bits, see
250 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
251 */
252 uint64_t
253 anv_ahw_usage_from_vk_usage(const VkImageCreateFlags vk_create,
254 const VkImageUsageFlags vk_usage)
255 {
256 uint64_t ahw_usage = 0;
257 #if ANDROID_API_LEVEL >= 26
258 if (vk_usage & VK_IMAGE_USAGE_SAMPLED_BIT)
259 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
260
261 if (vk_usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)
262 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
263
264 if (vk_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
265 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
266
267 if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
268 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
269
270 if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT)
271 ahw_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
272
273 /* No usage bits set - set at least one GPU usage. */
274 if (ahw_usage == 0)
275 ahw_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
276 #endif
277 return ahw_usage;
278 }
279
280 /*
281 * Called from anv_AllocateMemory when import AHardwareBuffer.
282 */
283 VkResult
284 anv_import_ahw_memory(VkDevice device_h,
285 struct anv_device_memory *mem,
286 const VkImportAndroidHardwareBufferInfoANDROID *info)
287 {
288 #if ANDROID_API_LEVEL >= 26
289 ANV_FROM_HANDLE(anv_device, device, device_h);
290
291 /* Import from AHardwareBuffer to anv_device_memory. */
292 const native_handle_t *handle =
293 AHardwareBuffer_getNativeHandle(info->buffer);
294
295 /* NOTE - We support buffers with only one handle but do not error on
296 * multiple handle case. Reason is that we want to support YUV formats
297 * where we have many logical planes but they all point to the same
298 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
299 */
300 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
301 if (dma_buf < 0)
302 return VK_ERROR_INVALID_EXTERNAL_HANDLE;
303
304 uint64_t bo_flags = ANV_BO_EXTERNAL;
305 if (device->instance->physicalDevice.supports_48bit_addresses)
306 bo_flags |= EXEC_OBJECT_SUPPORTS_48B_ADDRESS;
307 if (device->instance->physicalDevice.use_softpin)
308 bo_flags |= EXEC_OBJECT_PINNED;
309
310 VkResult result = anv_bo_cache_import(device, &device->bo_cache,
311 dma_buf, bo_flags, &mem->bo);
312 assert(VK_SUCCESS);
313
314 /* "If the vkAllocateMemory command succeeds, the implementation must
315 * acquire a reference to the imported hardware buffer, which it must
316 * release when the device memory object is freed. If the command fails,
317 * the implementation must not retain a reference."
318 */
319 AHardwareBuffer_acquire(info->buffer);
320 mem->ahw = info->buffer;
321
322 return VK_SUCCESS;
323 #else
324 return VK_ERROR_EXTENSION_NOT_PRESENT;
325 #endif
326 }
327
328 VkResult
329 anv_create_ahw_memory(VkDevice device_h,
330 struct anv_device_memory *mem,
331 const VkMemoryAllocateInfo *pAllocateInfo)
332 {
333 #if ANDROID_API_LEVEL >= 26
334 ANV_FROM_HANDLE(anv_device, dev, device_h);
335
336 const VkMemoryDedicatedAllocateInfo *dedicated_info =
337 vk_find_struct_const(pAllocateInfo->pNext,
338 MEMORY_DEDICATED_ALLOCATE_INFO);
339
340 uint32_t w = 0;
341 uint32_t h = 1;
342 uint32_t layers = 1;
343 uint32_t format = 0;
344 uint64_t usage = 0;
345
346 /* If caller passed dedicated information. */
347 if (dedicated_info && dedicated_info->image) {
348 ANV_FROM_HANDLE(anv_image, image, dedicated_info->image);
349 w = image->extent.width;
350 h = image->extent.height;
351 layers = image->array_size;
352 format = android_format_from_vk(image->vk_format);
353 usage = anv_ahw_usage_from_vk_usage(image->create_flags, image->usage);
354 } else if (dedicated_info && dedicated_info->buffer) {
355 ANV_FROM_HANDLE(anv_buffer, buffer, dedicated_info->buffer);
356 w = buffer->size;
357 format = AHARDWAREBUFFER_FORMAT_BLOB;
358 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
359 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
360 } else {
361 w = pAllocateInfo->allocationSize;
362 format = AHARDWAREBUFFER_FORMAT_BLOB;
363 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
364 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
365 }
366
367 struct AHardwareBuffer *ahw = NULL;
368 struct AHardwareBuffer_Desc desc = {
369 .width = w,
370 .height = h,
371 .layers = layers,
372 .format = format,
373 .usage = usage,
374 };
375
376 if (AHardwareBuffer_allocate(&desc, &ahw) != 0)
377 return VK_ERROR_OUT_OF_HOST_MEMORY;
378
379 mem->ahw = ahw;
380 return VK_SUCCESS;
381 #else
382 return VK_ERROR_EXTENSION_NOT_PRESENT;
383 #endif
384
385 }
386
387 VkResult
388 anv_image_from_external(
389 VkDevice device_h,
390 const VkImageCreateInfo *base_info,
391 const struct VkExternalMemoryImageCreateInfo *create_info,
392 const VkAllocationCallbacks *alloc,
393 VkImage *out_image_h)
394 {
395 #if ANDROID_API_LEVEL >= 26
396 ANV_FROM_HANDLE(anv_device, device, device_h);
397
398 const struct VkExternalFormatANDROID *ext_info =
399 vk_find_struct_const(base_info->pNext, EXTERNAL_FORMAT_ANDROID);
400
401 if (ext_info && ext_info->externalFormat != 0) {
402 assert(base_info->format == VK_FORMAT_UNDEFINED);
403 assert(base_info->imageType == VK_IMAGE_TYPE_2D);
404 assert(base_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
405 assert(base_info->tiling == VK_IMAGE_TILING_OPTIMAL);
406 }
407
408 struct anv_image_create_info anv_info = {
409 .vk_info = base_info,
410 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
411 .external_format = true,
412 };
413
414 VkImage image_h;
415 VkResult result = anv_image_create(device_h, &anv_info, alloc, &image_h);
416 if (result != VK_SUCCESS)
417 return result;
418
419 *out_image_h = image_h;
420
421 return VK_SUCCESS;
422 #else
423 return VK_ERROR_EXTENSION_NOT_PRESENT;
424 #endif
425 }
426
427
428 VkResult
429 anv_image_from_gralloc(VkDevice device_h,
430 const VkImageCreateInfo *base_info,
431 const VkNativeBufferANDROID *gralloc_info,
432 const VkAllocationCallbacks *alloc,
433 VkImage *out_image_h)
434
435 {
436 ANV_FROM_HANDLE(anv_device, device, device_h);
437 VkImage image_h = VK_NULL_HANDLE;
438 struct anv_image *image = NULL;
439 struct anv_bo *bo = NULL;
440 VkResult result;
441
442 struct anv_image_create_info anv_info = {
443 .vk_info = base_info,
444 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
445 };
446
447 if (gralloc_info->handle->numFds != 1) {
448 return vk_errorf(device->instance, device,
449 VK_ERROR_INVALID_EXTERNAL_HANDLE,
450 "VkNativeBufferANDROID::handle::numFds is %d, "
451 "expected 1", gralloc_info->handle->numFds);
452 }
453
454 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
455 * must exceed that of the gralloc handle, and we do not own the gralloc
456 * handle.
457 */
458 int dma_buf = gralloc_info->handle->data[0];
459
460 uint64_t bo_flags = ANV_BO_EXTERNAL;
461 if (device->instance->physicalDevice.supports_48bit_addresses)
462 bo_flags |= EXEC_OBJECT_SUPPORTS_48B_ADDRESS;
463 if (device->instance->physicalDevice.use_softpin)
464 bo_flags |= EXEC_OBJECT_PINNED;
465
466 result = anv_bo_cache_import(device, &device->bo_cache, dma_buf, bo_flags, &bo);
467 if (result != VK_SUCCESS) {
468 return vk_errorf(device->instance, device, result,
469 "failed to import dma-buf from VkNativeBufferANDROID");
470 }
471
472 int i915_tiling = anv_gem_get_tiling(device, bo->gem_handle);
473 switch (i915_tiling) {
474 case I915_TILING_NONE:
475 anv_info.isl_tiling_flags = ISL_TILING_LINEAR_BIT;
476 break;
477 case I915_TILING_X:
478 anv_info.isl_tiling_flags = ISL_TILING_X_BIT;
479 break;
480 case I915_TILING_Y:
481 anv_info.isl_tiling_flags = ISL_TILING_Y0_BIT;
482 break;
483 case -1:
484 result = vk_errorf(device->instance, device,
485 VK_ERROR_INVALID_EXTERNAL_HANDLE,
486 "DRM_IOCTL_I915_GEM_GET_TILING failed for "
487 "VkNativeBufferANDROID");
488 goto fail_tiling;
489 default:
490 result = vk_errorf(device->instance, device,
491 VK_ERROR_INVALID_EXTERNAL_HANDLE,
492 "DRM_IOCTL_I915_GEM_GET_TILING returned unknown "
493 "tiling %d for VkNativeBufferANDROID", i915_tiling);
494 goto fail_tiling;
495 }
496
497 enum isl_format format = anv_get_isl_format(&device->info,
498 base_info->format,
499 VK_IMAGE_ASPECT_COLOR_BIT,
500 base_info->tiling);
501 assert(format != ISL_FORMAT_UNSUPPORTED);
502
503 anv_info.stride = gralloc_info->stride *
504 (isl_format_get_layout(format)->bpb / 8);
505
506 result = anv_image_create(device_h, &anv_info, alloc, &image_h);
507 image = anv_image_from_handle(image_h);
508 if (result != VK_SUCCESS)
509 goto fail_create;
510
511 if (bo->size < image->size) {
512 result = vk_errorf(device->instance, device,
513 VK_ERROR_INVALID_EXTERNAL_HANDLE,
514 "dma-buf from VkNativeBufferANDROID is too small for "
515 "VkImage: %"PRIu64"B < %"PRIu64"B",
516 bo->size, image->size);
517 goto fail_size;
518 }
519
520 assert(image->n_planes == 1);
521 assert(image->planes[0].address.offset == 0);
522
523 image->planes[0].address.bo = bo;
524 image->planes[0].bo_is_owned = true;
525
526 /* We need to set the WRITE flag on window system buffers so that GEM will
527 * know we're writing to them and synchronize uses on other rings (for
528 * example, if the display server uses the blitter ring).
529 *
530 * If this function fails and if the imported bo was resident in the cache,
531 * we should avoid updating the bo's flags. Therefore, we defer updating
532 * the flags until success is certain.
533 *
534 */
535 bo->flags &= ~EXEC_OBJECT_ASYNC;
536 bo->flags |= EXEC_OBJECT_WRITE;
537
538 /* Don't clobber the out-parameter until success is certain. */
539 *out_image_h = image_h;
540
541 return VK_SUCCESS;
542
543 fail_size:
544 anv_DestroyImage(device_h, image_h, alloc);
545 fail_create:
546 fail_tiling:
547 anv_bo_cache_release(device, &device->bo_cache, bo);
548
549 return result;
550 }
551
552 VkResult anv_GetSwapchainGrallocUsageANDROID(
553 VkDevice device_h,
554 VkFormat format,
555 VkImageUsageFlags imageUsage,
556 int* grallocUsage)
557 {
558 ANV_FROM_HANDLE(anv_device, device, device_h);
559 struct anv_physical_device *phys_dev = &device->instance->physicalDevice;
560 VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(phys_dev);
561 VkResult result;
562
563 *grallocUsage = 0;
564 intel_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
565
566 /* WARNING: Android's libvulkan.so hardcodes the VkImageUsageFlags
567 * returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags.
568 * The relevant code in libvulkan/swapchain.cpp contains this fun comment:
569 *
570 * TODO(jessehall): I think these are right, but haven't thought hard
571 * about it. Do we need to query the driver for support of any of
572 * these?
573 *
574 * Any disagreement between this function and the hardcoded
575 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
576 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
577 */
578
579 const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
580 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
581 .format = format,
582 .type = VK_IMAGE_TYPE_2D,
583 .tiling = VK_IMAGE_TILING_OPTIMAL,
584 .usage = imageUsage,
585 };
586
587 VkImageFormatProperties2 image_format_props = {
588 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
589 };
590
591 /* Check that requested format and usage are supported. */
592 result = anv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h,
593 &image_format_info, &image_format_props);
594 if (result != VK_SUCCESS) {
595 return vk_errorf(device->instance, device, result,
596 "anv_GetPhysicalDeviceImageFormatProperties2 failed "
597 "inside %s", __func__);
598 }
599
600 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
601 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
602 *grallocUsage |= GRALLOC_USAGE_HW_RENDER;
603
604 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
605 VK_IMAGE_USAGE_SAMPLED_BIT |
606 VK_IMAGE_USAGE_STORAGE_BIT |
607 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
608 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
609
610 /* All VkImageUsageFlags not explicitly checked here are unsupported for
611 * gralloc swapchains.
612 */
613 if (imageUsage != 0) {
614 return vk_errorf(device->instance, device, VK_ERROR_FORMAT_NOT_SUPPORTED,
615 "unsupported VkImageUsageFlags(0x%x) for gralloc "
616 "swapchain", imageUsage);
617 }
618
619 /* The below formats support GRALLOC_USAGE_HW_FB (that is, display
620 * scanout). This short list of formats is univserally supported on Intel
621 * but is incomplete. The full set of supported formats is dependent on
622 * kernel and hardware.
623 *
624 * FINISHME: Advertise all display-supported formats.
625 */
626 switch (format) {
627 case VK_FORMAT_B8G8R8A8_UNORM:
628 case VK_FORMAT_B5G6R5_UNORM_PACK16:
629 case VK_FORMAT_R8G8B8A8_UNORM:
630 case VK_FORMAT_R8G8B8A8_SRGB:
631 *grallocUsage |= GRALLOC_USAGE_HW_FB |
632 GRALLOC_USAGE_HW_COMPOSER |
633 GRALLOC_USAGE_EXTERNAL_DISP;
634 break;
635 default:
636 intel_logw("%s: unsupported format=%d", __func__, format);
637 }
638
639 if (*grallocUsage == 0)
640 return VK_ERROR_FORMAT_NOT_SUPPORTED;
641
642 return VK_SUCCESS;
643 }
644
645 VkResult
646 anv_AcquireImageANDROID(
647 VkDevice device_h,
648 VkImage image_h,
649 int nativeFenceFd,
650 VkSemaphore semaphore_h,
651 VkFence fence_h)
652 {
653 ANV_FROM_HANDLE(anv_device, device, device_h);
654 VkResult result = VK_SUCCESS;
655
656 if (nativeFenceFd != -1) {
657 /* As a simple, firstpass implementation of VK_ANDROID_native_buffer, we
658 * block on the nativeFenceFd. This may introduce latency and is
659 * definitiely inefficient, yet it's correct.
660 *
661 * FINISHME(chadv): Import the nativeFenceFd into the VkSemaphore and
662 * VkFence.
663 */
664 if (sync_wait(nativeFenceFd, /*timeout*/ -1) < 0) {
665 result = vk_errorf(device->instance, device, VK_ERROR_DEVICE_LOST,
666 "%s: failed to wait on nativeFenceFd=%d",
667 __func__, nativeFenceFd);
668 }
669
670 /* From VK_ANDROID_native_buffer's pseudo spec
671 * (https://source.android.com/devices/graphics/implement-vulkan):
672 *
673 * The driver takes ownership of the fence fd and is responsible for
674 * closing it [...] even if vkAcquireImageANDROID fails and returns
675 * an error.
676 */
677 close(nativeFenceFd);
678
679 if (result != VK_SUCCESS)
680 return result;
681 }
682
683 if (semaphore_h || fence_h) {
684 /* Thanks to implicit sync, the image is ready for GPU access. But we
685 * must still put the semaphore into the "submit" state; otherwise the
686 * client may get unexpected behavior if the client later uses it as
687 * a wait semaphore.
688 *
689 * Because we blocked above on the nativeFenceFd, the image is also
690 * ready for foreign-device access (including CPU access). But we must
691 * still signal the fence; otherwise the client may get unexpected
692 * behavior if the client later waits on it.
693 *
694 * For some values of anv_semaphore_type, we must submit the semaphore
695 * to execbuf in order to signal it. Likewise for anv_fence_type.
696 * Instead of open-coding here the signal operation for each
697 * anv_semaphore_type and anv_fence_type, we piggy-back on
698 * vkQueueSubmit.
699 */
700 const VkSubmitInfo submit = {
701 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
702 .waitSemaphoreCount = 0,
703 .commandBufferCount = 0,
704 .signalSemaphoreCount = (semaphore_h ? 1 : 0),
705 .pSignalSemaphores = &semaphore_h,
706 };
707
708 result = anv_QueueSubmit(anv_queue_to_handle(&device->queue), 1,
709 &submit, fence_h);
710 if (result != VK_SUCCESS) {
711 return vk_errorf(device->instance, device, result,
712 "anv_QueueSubmit failed inside %s", __func__);
713 }
714 }
715
716 return VK_SUCCESS;
717 }
718
719 VkResult
720 anv_QueueSignalReleaseImageANDROID(
721 VkQueue queue,
722 uint32_t waitSemaphoreCount,
723 const VkSemaphore* pWaitSemaphores,
724 VkImage image,
725 int* pNativeFenceFd)
726 {
727 VkResult result;
728
729 if (waitSemaphoreCount == 0)
730 goto done;
731
732 result = anv_QueueSubmit(queue, 1,
733 &(VkSubmitInfo) {
734 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
735 .waitSemaphoreCount = 1,
736 .pWaitSemaphores = pWaitSemaphores,
737 },
738 (VkFence) VK_NULL_HANDLE);
739 if (result != VK_SUCCESS)
740 return result;
741
742 done:
743 if (pNativeFenceFd) {
744 /* We can rely implicit on sync because above we submitted all
745 * semaphores to the queue.
746 */
747 *pNativeFenceFd = -1;
748 }
749
750 return VK_SUCCESS;
751 }