2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
26 #include <xcb/present.h>
30 #include "util/hash_table.h"
32 struct wsi_x11_connection
{
38 struct anv_wsi_interface base
;
40 pthread_mutex_t mutex
;
41 /* Hash table of xcb_connection -> wsi_x11_connection mappings */
42 struct hash_table
*connections
;
45 static struct wsi_x11_connection
*
46 wsi_x11_connection_create(struct anv_instance
*instance
, xcb_connection_t
*conn
)
48 xcb_query_extension_cookie_t dri3_cookie
, pres_cookie
;
49 xcb_query_extension_reply_t
*dri3_reply
, *pres_reply
;
51 struct wsi_x11_connection
*wsi_conn
=
52 anv_alloc(&instance
->alloc
, sizeof(*wsi_conn
), 8,
53 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
);
57 dri3_cookie
= xcb_query_extension(conn
, 4, "DRI3");
58 pres_cookie
= xcb_query_extension(conn
, 7, "PRESENT");
60 dri3_reply
= xcb_query_extension_reply(conn
, dri3_cookie
, NULL
);
61 pres_reply
= xcb_query_extension_reply(conn
, pres_cookie
, NULL
);
62 if (dri3_reply
== NULL
|| pres_reply
== NULL
) {
65 anv_free(&instance
->alloc
, wsi_conn
);
69 wsi_conn
->has_dri3
= dri3_reply
->present
!= 0;
70 wsi_conn
->has_present
= pres_reply
->present
!= 0;
79 wsi_x11_connection_destroy(struct anv_instance
*instance
,
80 struct wsi_x11_connection
*conn
)
82 anv_free(&instance
->alloc
, conn
);
85 static struct wsi_x11_connection
*
86 wsi_x11_get_connection(struct anv_instance
*instance
, xcb_connection_t
*conn
)
89 (struct wsi_x11
*)instance
->wsi
[VK_ICD_WSI_PLATFORM_XCB
];
91 pthread_mutex_lock(&wsi
->mutex
);
93 struct hash_entry
*entry
= _mesa_hash_table_search(wsi
->connections
, conn
);
95 /* We're about to make a bunch of blocking calls. Let's drop the
96 * mutex for now so we don't block up too badly.
98 pthread_mutex_unlock(&wsi
->mutex
);
100 struct wsi_x11_connection
*wsi_conn
=
101 wsi_x11_connection_create(instance
, conn
);
103 pthread_mutex_lock(&wsi
->mutex
);
105 entry
= _mesa_hash_table_search(wsi
->connections
, conn
);
107 /* Oops, someone raced us to it */
108 wsi_x11_connection_destroy(instance
, wsi_conn
);
110 entry
= _mesa_hash_table_insert(wsi
->connections
, conn
, wsi_conn
);
114 pthread_mutex_unlock(&wsi
->mutex
);
119 static const VkSurfaceFormatKHR formats
[] = {
120 { .format
= VK_FORMAT_B8G8R8A8_SRGB
, },
123 static const VkPresentModeKHR present_modes
[] = {
124 VK_PRESENT_MODE_MAILBOX_KHR
,
127 static xcb_screen_t
*
128 get_screen_for_root(xcb_connection_t
*conn
, xcb_window_t root
)
130 xcb_screen_iterator_t screen_iter
=
131 xcb_setup_roots_iterator(xcb_get_setup(conn
));
133 for (; screen_iter
.rem
; xcb_screen_next (&screen_iter
)) {
134 if (screen_iter
.data
->root
== root
)
135 return screen_iter
.data
;
141 static xcb_visualtype_t
*
142 screen_get_visualtype(xcb_screen_t
*screen
, xcb_visualid_t visual_id
,
145 xcb_depth_iterator_t depth_iter
=
146 xcb_screen_allowed_depths_iterator(screen
);
148 for (; depth_iter
.rem
; xcb_depth_next (&depth_iter
)) {
149 xcb_visualtype_iterator_t visual_iter
=
150 xcb_depth_visuals_iterator (depth_iter
.data
);
152 for (; visual_iter
.rem
; xcb_visualtype_next (&visual_iter
)) {
153 if (visual_iter
.data
->visual_id
== visual_id
) {
155 *depth
= depth_iter
.data
->depth
;
156 return visual_iter
.data
;
164 static xcb_visualtype_t
*
165 connection_get_visualtype(xcb_connection_t
*conn
, xcb_visualid_t visual_id
,
168 xcb_screen_iterator_t screen_iter
=
169 xcb_setup_roots_iterator(xcb_get_setup(conn
));
171 /* For this we have to iterate over all of the screens which is rather
172 * annoying. Fortunately, there is probably only 1.
174 for (; screen_iter
.rem
; xcb_screen_next (&screen_iter
)) {
175 xcb_visualtype_t
*visual
= screen_get_visualtype(screen_iter
.data
,
184 static xcb_visualtype_t
*
185 get_visualtype_for_window(xcb_connection_t
*conn
, xcb_window_t window
,
188 xcb_query_tree_cookie_t tree_cookie
;
189 xcb_get_window_attributes_cookie_t attrib_cookie
;
190 xcb_query_tree_reply_t
*tree
;
191 xcb_get_window_attributes_reply_t
*attrib
;
193 tree_cookie
= xcb_query_tree(conn
, window
);
194 attrib_cookie
= xcb_get_window_attributes(conn
, window
);
196 tree
= xcb_query_tree_reply(conn
, tree_cookie
, NULL
);
197 attrib
= xcb_get_window_attributes_reply(conn
, attrib_cookie
, NULL
);
198 if (attrib
== NULL
|| tree
== NULL
) {
204 xcb_window_t root
= tree
->root
;
205 xcb_visualid_t visual_id
= attrib
->visual
;
209 xcb_screen_t
*screen
= get_screen_for_root(conn
, root
);
213 return screen_get_visualtype(screen
, visual_id
, depth
);
217 visual_has_alpha(xcb_visualtype_t
*visual
, unsigned depth
)
219 uint32_t rgb_mask
= visual
->red_mask
|
223 uint32_t all_mask
= 0xffffffff >> (32 - depth
);
225 /* Do we have bits left over after RGB? */
226 return (all_mask
& ~rgb_mask
) != 0;
229 VkBool32
anv_GetPhysicalDeviceXcbPresentationSupportKHR(
230 VkPhysicalDevice physicalDevice
,
231 uint32_t queueFamilyIndex
,
232 xcb_connection_t
* connection
,
233 xcb_visualid_t visual_id
)
235 ANV_FROM_HANDLE(anv_physical_device
, device
, physicalDevice
);
237 struct wsi_x11_connection
*wsi_conn
=
238 wsi_x11_get_connection(device
->instance
, connection
);
240 if (!wsi_conn
->has_dri3
) {
241 fprintf(stderr
, "vulkan: No DRI3 support\n");
245 unsigned visual_depth
;
246 if (!connection_get_visualtype(connection
, visual_id
, &visual_depth
))
249 if (visual_depth
!= 24 && visual_depth
!= 32)
256 x11_surface_get_support(VkIcdSurfaceBase
*icd_surface
,
257 struct anv_physical_device
*device
,
258 uint32_t queueFamilyIndex
,
259 VkBool32
* pSupported
)
261 VkIcdSurfaceXcb
*surface
= (VkIcdSurfaceXcb
*)icd_surface
;
263 struct wsi_x11_connection
*wsi_conn
=
264 wsi_x11_get_connection(device
->instance
, surface
->connection
);
266 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
268 if (!wsi_conn
->has_dri3
) {
269 fprintf(stderr
, "vulkan: No DRI3 support\n");
274 unsigned visual_depth
;
275 if (!get_visualtype_for_window(surface
->connection
, surface
->window
,
281 if (visual_depth
!= 24 && visual_depth
!= 32) {
291 x11_surface_get_capabilities(VkIcdSurfaceBase
*icd_surface
,
292 struct anv_physical_device
*device
,
293 VkSurfaceCapabilitiesKHR
*caps
)
295 VkIcdSurfaceXcb
*surface
= (VkIcdSurfaceXcb
*)icd_surface
;
296 xcb_get_geometry_cookie_t geom_cookie
;
297 xcb_generic_error_t
*err
;
298 xcb_get_geometry_reply_t
*geom
;
299 unsigned visual_depth
;
301 geom_cookie
= xcb_get_geometry(surface
->connection
, surface
->window
);
303 /* This does a round-trip. This is why we do get_geometry first and
304 * wait to read the reply until after we have a visual.
306 xcb_visualtype_t
*visual
=
307 get_visualtype_for_window(surface
->connection
, surface
->window
,
310 geom
= xcb_get_geometry_reply(surface
->connection
, geom_cookie
, &err
);
312 VkExtent2D extent
= { geom
->width
, geom
->height
};
313 caps
->currentExtent
= extent
;
314 caps
->minImageExtent
= extent
;
315 caps
->maxImageExtent
= extent
;
317 /* This can happen if the client didn't wait for the configure event
318 * to come back from the compositor. In that case, we don't know the
319 * size of the window so we just return valid "I don't know" stuff.
321 caps
->currentExtent
= (VkExtent2D
) { -1, -1 };
322 caps
->minImageExtent
= (VkExtent2D
) { 1, 1 };
323 caps
->maxImageExtent
= (VkExtent2D
) { INT16_MAX
, INT16_MAX
};
328 if (visual_has_alpha(visual
, visual_depth
)) {
329 caps
->supportedCompositeAlpha
= VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
|
330 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR
;
332 caps
->supportedCompositeAlpha
= VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
|
333 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR
;
336 caps
->minImageCount
= 2;
337 caps
->maxImageCount
= 4;
338 caps
->supportedTransforms
= VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR
;
339 caps
->currentTransform
= VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR
;
340 caps
->maxImageArrayLayers
= 1;
341 caps
->supportedUsageFlags
=
342 VK_IMAGE_USAGE_TRANSFER_SRC_BIT
|
343 VK_IMAGE_USAGE_SAMPLED_BIT
|
344 VK_IMAGE_USAGE_TRANSFER_DST_BIT
|
345 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
;
351 x11_surface_get_formats(VkIcdSurfaceBase
*surface
,
352 struct anv_physical_device
*device
,
353 uint32_t *pSurfaceFormatCount
,
354 VkSurfaceFormatKHR
*pSurfaceFormats
)
356 if (pSurfaceFormats
== NULL
) {
357 *pSurfaceFormatCount
= ARRAY_SIZE(formats
);
361 assert(*pSurfaceFormatCount
>= ARRAY_SIZE(formats
));
362 typed_memcpy(pSurfaceFormats
, formats
, *pSurfaceFormatCount
);
363 *pSurfaceFormatCount
= ARRAY_SIZE(formats
);
369 x11_surface_get_present_modes(VkIcdSurfaceBase
*surface
,
370 struct anv_physical_device
*device
,
371 uint32_t *pPresentModeCount
,
372 VkPresentModeKHR
*pPresentModes
)
374 if (pPresentModes
== NULL
) {
375 *pPresentModeCount
= ARRAY_SIZE(present_modes
);
379 assert(*pPresentModeCount
>= ARRAY_SIZE(present_modes
));
380 typed_memcpy(pPresentModes
, present_modes
, *pPresentModeCount
);
381 *pPresentModeCount
= ARRAY_SIZE(present_modes
);
387 x11_surface_create_swapchain(VkIcdSurfaceBase
*surface
,
388 struct anv_device
*device
,
389 const VkSwapchainCreateInfoKHR
* pCreateInfo
,
390 const VkAllocationCallbacks
* pAllocator
,
391 struct anv_swapchain
**swapchain
);
393 VkResult
anv_CreateXcbSurfaceKHR(
394 VkInstance _instance
,
395 const VkXcbSurfaceCreateInfoKHR
* pCreateInfo
,
396 const VkAllocationCallbacks
* pAllocator
,
397 VkSurfaceKHR
* pSurface
)
399 ANV_FROM_HANDLE(anv_instance
, instance
, _instance
);
401 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR
);
403 VkIcdSurfaceXcb
*surface
;
405 surface
= anv_alloc2(&instance
->alloc
, pAllocator
, sizeof *surface
, 8,
406 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
408 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
410 surface
->base
.platform
= VK_ICD_WSI_PLATFORM_XCB
;
411 surface
->connection
= pCreateInfo
->connection
;
412 surface
->window
= pCreateInfo
->window
;
414 *pSurface
= _VkIcdSurfaceBase_to_handle(&surface
->base
);
420 struct anv_image
* image
;
421 struct anv_device_memory
* memory
;
423 xcb_get_geometry_cookie_t geom_cookie
;
427 struct x11_swapchain
{
428 struct anv_swapchain base
;
430 xcb_connection_t
* conn
;
434 uint32_t image_count
;
436 struct x11_image images
[0];
440 x11_get_images(struct anv_swapchain
*anv_chain
,
441 uint32_t* pCount
, VkImage
*pSwapchainImages
)
443 struct x11_swapchain
*chain
= (struct x11_swapchain
*)anv_chain
;
445 if (pSwapchainImages
== NULL
) {
446 *pCount
= chain
->image_count
;
450 assert(chain
->image_count
<= *pCount
);
451 for (uint32_t i
= 0; i
< chain
->image_count
; i
++)
452 pSwapchainImages
[i
] = anv_image_to_handle(chain
->images
[i
].image
);
454 *pCount
= chain
->image_count
;
460 x11_acquire_next_image(struct anv_swapchain
*anv_chain
,
462 VkSemaphore semaphore
,
463 uint32_t *image_index
)
465 struct x11_swapchain
*chain
= (struct x11_swapchain
*)anv_chain
;
466 struct x11_image
*image
= &chain
->images
[chain
->next_image
];
469 xcb_generic_error_t
*err
;
470 xcb_get_geometry_reply_t
*geom
=
471 xcb_get_geometry_reply(chain
->conn
, image
->geom_cookie
, &err
);
474 return vk_error(VK_ERROR_OUT_OF_DATE_KHR
);
477 if (geom
->width
!= chain
->extent
.width
||
478 geom
->height
!= chain
->extent
.height
) {
480 return vk_error(VK_ERROR_OUT_OF_DATE_KHR
);
487 *image_index
= chain
->next_image
;
488 chain
->next_image
= (chain
->next_image
+ 1) % chain
->image_count
;
493 x11_queue_present(struct anv_swapchain
*anv_chain
,
494 struct anv_queue
*queue
,
495 uint32_t image_index
)
497 struct x11_swapchain
*chain
= (struct x11_swapchain
*)anv_chain
;
498 struct x11_image
*image
= &chain
->images
[image_index
];
500 assert(image_index
< chain
->image_count
);
502 xcb_void_cookie_t cookie
;
504 cookie
= xcb_copy_area(chain
->conn
,
511 chain
->extent
.height
);
512 xcb_discard_reply(chain
->conn
, cookie
.sequence
);
514 image
->geom_cookie
= xcb_get_geometry(chain
->conn
, chain
->window
);
517 xcb_flush(chain
->conn
);
523 x11_swapchain_destroy(struct anv_swapchain
*anv_chain
,
524 const VkAllocationCallbacks
*pAllocator
)
526 struct x11_swapchain
*chain
= (struct x11_swapchain
*)anv_chain
;
527 xcb_void_cookie_t cookie
;
529 for (uint32_t i
= 0; i
< chain
->image_count
; i
++) {
530 struct x11_image
*image
= &chain
->images
[i
];
533 xcb_discard_reply(chain
->conn
, image
->geom_cookie
.sequence
);
535 cookie
= xcb_free_pixmap(chain
->conn
, image
->pixmap
);
536 xcb_discard_reply(chain
->conn
, cookie
.sequence
);
538 anv_DestroyImage(anv_device_to_handle(chain
->base
.device
),
539 anv_image_to_handle(image
->image
), pAllocator
);
541 anv_FreeMemory(anv_device_to_handle(chain
->base
.device
),
542 anv_device_memory_to_handle(image
->memory
), pAllocator
);
545 anv_free2(&chain
->base
.device
->alloc
, pAllocator
, chain
);
551 x11_surface_create_swapchain(VkIcdSurfaceBase
*icd_surface
,
552 struct anv_device
*device
,
553 const VkSwapchainCreateInfoKHR
*pCreateInfo
,
554 const VkAllocationCallbacks
* pAllocator
,
555 struct anv_swapchain
**swapchain_out
)
557 VkIcdSurfaceXcb
*surface
= (VkIcdSurfaceXcb
*)icd_surface
;
558 struct x11_swapchain
*chain
;
559 xcb_void_cookie_t cookie
;
562 int num_images
= pCreateInfo
->minImageCount
;
564 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR
);
566 size_t size
= sizeof(*chain
) + num_images
* sizeof(chain
->images
[0]);
567 chain
= anv_alloc2(&device
->alloc
, pAllocator
, size
, 8,
568 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
570 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
572 chain
->base
.device
= device
;
573 chain
->base
.destroy
= x11_swapchain_destroy
;
574 chain
->base
.get_images
= x11_get_images
;
575 chain
->base
.acquire_next_image
= x11_acquire_next_image
;
576 chain
->base
.queue_present
= x11_queue_present
;
578 chain
->conn
= surface
->connection
;
579 chain
->window
= surface
->window
;
580 chain
->extent
= pCreateInfo
->imageExtent
;
581 chain
->image_count
= num_images
;
582 chain
->next_image
= 0;
584 for (uint32_t i
= 0; i
< chain
->image_count
; i
++) {
585 VkDeviceMemory memory_h
;
587 struct anv_image
*image
;
588 struct anv_surface
*surface
;
589 struct anv_device_memory
*memory
;
591 anv_image_create(anv_device_to_handle(device
),
592 &(struct anv_image_create_info
) {
593 .isl_tiling_flags
= ISL_TILING_X_BIT
,
596 &(VkImageCreateInfo
) {
597 .sType
= VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO
,
598 .imageType
= VK_IMAGE_TYPE_2D
,
599 .format
= pCreateInfo
->imageFormat
,
601 .width
= pCreateInfo
->imageExtent
.width
,
602 .height
= pCreateInfo
->imageExtent
.height
,
608 /* FIXME: Need a way to use X tiling to allow scanout */
609 .tiling
= VK_IMAGE_TILING_OPTIMAL
,
610 .usage
= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT
,
616 image
= anv_image_from_handle(image_h
);
617 assert(anv_format_is_color(image
->format
));
619 surface
= &image
->color_surface
;
621 anv_AllocateMemory(anv_device_to_handle(device
),
622 &(VkMemoryAllocateInfo
) {
623 .sType
= VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO
,
624 .allocationSize
= image
->size
,
625 .memoryTypeIndex
= 0,
627 NULL
/* XXX: pAllocator */,
630 memory
= anv_device_memory_from_handle(memory_h
);
631 memory
->bo
.is_winsys_bo
= true;
633 anv_BindImageMemory(VK_NULL_HANDLE
, anv_image_to_handle(image
),
636 int ret
= anv_gem_set_tiling(device
, memory
->bo
.gem_handle
,
637 surface
->isl
.row_pitch
, I915_TILING_X
);
639 /* FINISHME: Choose a better error. */
640 result
= vk_errorf(VK_ERROR_OUT_OF_DEVICE_MEMORY
,
641 "set_tiling failed: %m");
645 int fd
= anv_gem_handle_to_fd(device
, memory
->bo
.gem_handle
);
647 /* FINISHME: Choose a better error. */
648 result
= vk_errorf(VK_ERROR_OUT_OF_DEVICE_MEMORY
,
649 "handle_to_fd failed: %m");
655 xcb_pixmap_t pixmap
= xcb_generate_id(chain
->conn
);
658 xcb_dri3_pixmap_from_buffer_checked(chain
->conn
,
662 pCreateInfo
->imageExtent
.width
,
663 pCreateInfo
->imageExtent
.height
,
664 surface
->isl
.row_pitch
,
667 chain
->images
[i
].image
= image
;
668 chain
->images
[i
].memory
= memory
;
669 chain
->images
[i
].pixmap
= pixmap
;
670 chain
->images
[i
].busy
= false;
672 xcb_discard_reply(chain
->conn
, cookie
.sequence
);
675 chain
->gc
= xcb_generate_id(chain
->conn
);
677 /* FINISHME: Choose a better error. */
678 result
= vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
682 cookie
= xcb_create_gc(chain
->conn
,
685 XCB_GC_GRAPHICS_EXPOSURES
,
686 (uint32_t []) { 0 });
687 xcb_discard_reply(chain
->conn
, cookie
.sequence
);
689 *swapchain_out
= &chain
->base
;
698 anv_x11_init_wsi(struct anv_instance
*instance
)
703 wsi
= anv_alloc(&instance
->alloc
, sizeof(*wsi
), 8,
704 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
);
706 result
= vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
710 int ret
= pthread_mutex_init(&wsi
->mutex
, NULL
);
713 result
= vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
715 /* FINISHME: Choose a better error. */
716 result
= vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
722 wsi
->connections
= _mesa_hash_table_create(NULL
, _mesa_hash_pointer
,
723 _mesa_key_pointer_equal
);
724 if (!wsi
->connections
) {
725 result
= vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
729 wsi
->base
.get_support
= x11_surface_get_support
;
730 wsi
->base
.get_capabilities
= x11_surface_get_capabilities
;
731 wsi
->base
.get_formats
= x11_surface_get_formats
;
732 wsi
->base
.get_present_modes
= x11_surface_get_present_modes
;
733 wsi
->base
.create_swapchain
= x11_surface_create_swapchain
;
735 instance
->wsi
[VK_ICD_WSI_PLATFORM_XCB
] = &wsi
->base
;
740 pthread_mutex_destroy(&wsi
->mutex
);
742 anv_free(&instance
->alloc
, wsi
);
744 instance
->wsi
[VK_ICD_WSI_PLATFORM_XCB
] = NULL
;
750 anv_x11_finish_wsi(struct anv_instance
*instance
)
752 struct wsi_x11
*wsi
=
753 (struct wsi_x11
*)instance
->wsi
[VK_ICD_WSI_PLATFORM_XCB
];
756 _mesa_hash_table_destroy(wsi
->connections
, NULL
);
758 pthread_mutex_destroy(&wsi
->mutex
);
760 anv_free(&instance
->alloc
, wsi
);