vulkan/wsi/wayland: Stop caching Wayland displays
[mesa.git] / src / vulkan / wsi / wsi_common_wayland.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25
26 #include <assert.h>
27 #include <stdlib.h>
28 #include <stdio.h>
29 #include <unistd.h>
30 #include <errno.h>
31 #include <string.h>
32 #include <pthread.h>
33
34 #include "vk_util.h"
35 #include "wsi_common_wayland.h"
36 #include "wayland-drm-client-protocol.h"
37
38 #include <util/hash_table.h>
39 #include <util/u_vector.h>
40
41 #define typed_memcpy(dest, src, count) ({ \
42 STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
43 memcpy((dest), (src), (count) * sizeof(*(src))); \
44 })
45
46 struct wsi_wayland;
47
48 struct wsi_wl_display {
49 /* The real wl_display */
50 struct wl_display * wl_display;
51 /* Actually a proxy wrapper around the event queue */
52 struct wl_display * wl_display_wrapper;
53 struct wl_event_queue * queue;
54 struct wl_drm * drm;
55
56 struct wsi_wayland *wsi_wl;
57 /* Vector of VkFormats supported */
58 struct u_vector formats;
59
60 uint32_t capabilities;
61 };
62
63 struct wsi_wayland {
64 struct wsi_interface base;
65
66 const VkAllocationCallbacks *alloc;
67 VkPhysicalDevice physical_device;
68
69 const struct wsi_callbacks *cbs;
70 };
71
72 static void
73 wsi_wl_display_add_vk_format(struct wsi_wl_display *display, VkFormat format)
74 {
75 /* Don't add a format that's already in the list */
76 VkFormat *f;
77 u_vector_foreach(f, &display->formats)
78 if (*f == format)
79 return;
80
81 /* Don't add formats that aren't renderable. */
82 VkFormatProperties props;
83
84 display->wsi_wl->cbs->get_phys_device_format_properties(display->wsi_wl->physical_device,
85 format, &props);
86 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
87 return;
88
89 f = u_vector_add(&display->formats);
90 if (f)
91 *f = format;
92 }
93
94 static void
95 drm_handle_device(void *data, struct wl_drm *drm, const char *name)
96 {
97 }
98
99 static uint32_t
100 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
101 {
102 switch (vk_format) {
103 /* TODO: Figure out what all the formats mean and make this table
104 * correct.
105 */
106 #if 0
107 case VK_FORMAT_R4G4B4A4_UNORM:
108 return alpha ? WL_DRM_FORMAT_ABGR4444 : WL_DRM_FORMAT_XBGR4444;
109 case VK_FORMAT_R5G6B5_UNORM:
110 return WL_DRM_FORMAT_BGR565;
111 case VK_FORMAT_R5G5B5A1_UNORM:
112 return alpha ? WL_DRM_FORMAT_ABGR1555 : WL_DRM_FORMAT_XBGR1555;
113 case VK_FORMAT_R8G8B8_UNORM:
114 return WL_DRM_FORMAT_XBGR8888;
115 case VK_FORMAT_R8G8B8A8_UNORM:
116 return alpha ? WL_DRM_FORMAT_ABGR8888 : WL_DRM_FORMAT_XBGR8888;
117 case VK_FORMAT_R10G10B10A2_UNORM:
118 return alpha ? WL_DRM_FORMAT_ABGR2101010 : WL_DRM_FORMAT_XBGR2101010;
119 case VK_FORMAT_B4G4R4A4_UNORM:
120 return alpha ? WL_DRM_FORMAT_ARGB4444 : WL_DRM_FORMAT_XRGB4444;
121 case VK_FORMAT_B5G6R5_UNORM:
122 return WL_DRM_FORMAT_RGB565;
123 case VK_FORMAT_B5G5R5A1_UNORM:
124 return alpha ? WL_DRM_FORMAT_XRGB1555 : WL_DRM_FORMAT_XRGB1555;
125 #endif
126 case VK_FORMAT_B8G8R8_UNORM:
127 case VK_FORMAT_B8G8R8_SRGB:
128 return WL_DRM_FORMAT_BGRX8888;
129 case VK_FORMAT_B8G8R8A8_UNORM:
130 case VK_FORMAT_B8G8R8A8_SRGB:
131 return alpha ? WL_DRM_FORMAT_ARGB8888 : WL_DRM_FORMAT_XRGB8888;
132 #if 0
133 case VK_FORMAT_B10G10R10A2_UNORM:
134 return alpha ? WL_DRM_FORMAT_ARGB2101010 : WL_DRM_FORMAT_XRGB2101010;
135 #endif
136
137 default:
138 assert(!"Unsupported Vulkan format");
139 return 0;
140 }
141 }
142
143 static void
144 drm_handle_format(void *data, struct wl_drm *drm, uint32_t wl_format)
145 {
146 struct wsi_wl_display *display = data;
147 if (display->formats.element_size == 0)
148 return;
149
150 switch (wl_format) {
151 #if 0
152 case WL_DRM_FORMAT_ABGR4444:
153 case WL_DRM_FORMAT_XBGR4444:
154 wsi_wl_display_add_vk_format(display, VK_FORMAT_R4G4B4A4_UNORM);
155 break;
156 case WL_DRM_FORMAT_BGR565:
157 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G6B5_UNORM);
158 break;
159 case WL_DRM_FORMAT_ABGR1555:
160 case WL_DRM_FORMAT_XBGR1555:
161 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G5B5A1_UNORM);
162 break;
163 case WL_DRM_FORMAT_XBGR8888:
164 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8_UNORM);
165 /* fallthrough */
166 case WL_DRM_FORMAT_ABGR8888:
167 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8A8_UNORM);
168 break;
169 case WL_DRM_FORMAT_ABGR2101010:
170 case WL_DRM_FORMAT_XBGR2101010:
171 wsi_wl_display_add_vk_format(display, VK_FORMAT_R10G10B10A2_UNORM);
172 break;
173 case WL_DRM_FORMAT_ARGB4444:
174 case WL_DRM_FORMAT_XRGB4444:
175 wsi_wl_display_add_vk_format(display, VK_FORMAT_B4G4R4A4_UNORM);
176 break;
177 case WL_DRM_FORMAT_RGB565:
178 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G6R5_UNORM);
179 break;
180 case WL_DRM_FORMAT_ARGB1555:
181 case WL_DRM_FORMAT_XRGB1555:
182 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G5R5A1_UNORM);
183 break;
184 #endif
185 case WL_DRM_FORMAT_XRGB8888:
186 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_SRGB);
187 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_UNORM);
188 /* fallthrough */
189 case WL_DRM_FORMAT_ARGB8888:
190 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_SRGB);
191 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_UNORM);
192 break;
193 #if 0
194 case WL_DRM_FORMAT_ARGB2101010:
195 case WL_DRM_FORMAT_XRGB2101010:
196 wsi_wl_display_add_vk_format(display, VK_FORMAT_B10G10R10A2_UNORM);
197 break;
198 #endif
199 }
200 }
201
202 static void
203 drm_handle_authenticated(void *data, struct wl_drm *drm)
204 {
205 }
206
207 static void
208 drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t capabilities)
209 {
210 struct wsi_wl_display *display = data;
211
212 display->capabilities = capabilities;
213 }
214
215 static const struct wl_drm_listener drm_listener = {
216 drm_handle_device,
217 drm_handle_format,
218 drm_handle_authenticated,
219 drm_handle_capabilities,
220 };
221
222 static void
223 registry_handle_global(void *data, struct wl_registry *registry,
224 uint32_t name, const char *interface, uint32_t version)
225 {
226 struct wsi_wl_display *display = data;
227
228 if (strcmp(interface, "wl_drm") == 0) {
229 assert(display->drm == NULL);
230
231 assert(version >= 2);
232 display->drm = wl_registry_bind(registry, name, &wl_drm_interface, 2);
233
234 if (display->drm)
235 wl_drm_add_listener(display->drm, &drm_listener, display);
236 }
237 }
238
239 static void
240 registry_handle_global_remove(void *data, struct wl_registry *registry,
241 uint32_t name)
242 { /* No-op */ }
243
244 static const struct wl_registry_listener registry_listener = {
245 registry_handle_global,
246 registry_handle_global_remove
247 };
248
249 static void
250 wsi_wl_display_finish(struct wsi_wl_display *display)
251 {
252 u_vector_finish(&display->formats);
253 if (display->drm)
254 wl_drm_destroy(display->drm);
255 if (display->wl_display_wrapper)
256 wl_proxy_wrapper_destroy(display->wl_display_wrapper);
257 if (display->queue)
258 wl_event_queue_destroy(display->queue);
259 }
260
261 static int
262 wsi_wl_display_init(struct wsi_wayland *wsi_wl,
263 struct wsi_wl_display *display,
264 struct wl_display *wl_display,
265 bool get_format_list)
266 {
267 memset(display, 0, sizeof(*display));
268
269 display->wsi_wl = wsi_wl;
270 display->wl_display = wl_display;
271
272 if (get_format_list) {
273 if (!u_vector_init(&display->formats, sizeof(VkFormat), 8))
274 goto fail;
275 }
276
277 display->queue = wl_display_create_queue(wl_display);
278 if (!display->queue)
279 goto fail;
280
281 display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
282 if (!display->wl_display_wrapper)
283 goto fail;
284
285 wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
286 display->queue);
287
288 struct wl_registry *registry =
289 wl_display_get_registry(display->wl_display_wrapper);
290 if (!registry)
291 goto fail;
292
293 wl_registry_add_listener(registry, &registry_listener, display);
294
295 /* Round-trip to get the wl_drm global */
296 wl_display_roundtrip_queue(display->wl_display, display->queue);
297
298 if (!display->drm)
299 goto fail_registry;
300
301 /* Round-trip to get wl_drm formats and capabilities */
302 wl_display_roundtrip_queue(display->wl_display, display->queue);
303
304 /* We need prime support */
305 if (!(display->capabilities & WL_DRM_CAPABILITY_PRIME))
306 goto fail_registry;
307
308 /* We don't need this anymore */
309 wl_registry_destroy(registry);
310
311 return 0;
312
313 fail_registry:
314 if (registry)
315 wl_registry_destroy(registry);
316
317 fail:
318 wsi_wl_display_finish(display);
319 return -1;
320 }
321
322 static struct wsi_wl_display *
323 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display)
324 {
325 struct wsi_wl_display *display =
326 vk_alloc(wsi->alloc, sizeof(*display), 8,
327 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
328 if (!display)
329 return NULL;
330
331 if (wsi_wl_display_init(wsi, display, wl_display, true)) {
332 vk_free(wsi->alloc, display);
333 return NULL;
334 }
335
336 return display;
337 }
338
339 static void
340 wsi_wl_display_destroy(struct wsi_wl_display *display)
341 {
342 struct wsi_wayland *wsi = display->wsi_wl;
343 wsi_wl_display_finish(display);
344 vk_free(wsi->alloc, display);
345 }
346
347 VkBool32
348 wsi_wl_get_presentation_support(struct wsi_device *wsi_device,
349 struct wl_display *wl_display)
350 {
351 struct wsi_wayland *wsi =
352 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
353
354 struct wsi_wl_display display;
355 int ret = wsi_wl_display_init(wsi, &display, wl_display, false);
356 wsi_wl_display_finish(&display);
357
358 return ret == 0;
359 }
360
361 static VkResult
362 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
363 struct wsi_device *wsi_device,
364 const VkAllocationCallbacks *alloc,
365 uint32_t queueFamilyIndex,
366 int local_fd,
367 bool can_handle_different_gpu,
368 VkBool32* pSupported)
369 {
370 *pSupported = true;
371
372 return VK_SUCCESS;
373 }
374
375 static const VkPresentModeKHR present_modes[] = {
376 VK_PRESENT_MODE_MAILBOX_KHR,
377 VK_PRESENT_MODE_FIFO_KHR,
378 };
379
380 static VkResult
381 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
382 VkSurfaceCapabilitiesKHR* caps)
383 {
384 /* For true mailbox mode, we need at least 4 images:
385 * 1) One to scan out from
386 * 2) One to have queued for scan-out
387 * 3) One to be currently held by the Wayland compositor
388 * 4) One to render to
389 */
390 caps->minImageCount = 4;
391 /* There is no real maximum */
392 caps->maxImageCount = 0;
393
394 caps->currentExtent = (VkExtent2D) { -1, -1 };
395 caps->minImageExtent = (VkExtent2D) { 1, 1 };
396 /* This is the maximum supported size on Intel */
397 caps->maxImageExtent = (VkExtent2D) { 1 << 14, 1 << 14 };
398 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
399 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
400 caps->maxImageArrayLayers = 1;
401
402 caps->supportedCompositeAlpha =
403 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
404 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
405
406 caps->supportedUsageFlags =
407 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
408 VK_IMAGE_USAGE_SAMPLED_BIT |
409 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
410 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
411
412 return VK_SUCCESS;
413 }
414
415 static VkResult
416 wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
417 const void *info_next,
418 VkSurfaceCapabilities2KHR* caps)
419 {
420 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
421
422 return wsi_wl_surface_get_capabilities(surface, &caps->surfaceCapabilities);
423 }
424
425 static VkResult
426 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
427 struct wsi_device *wsi_device,
428 uint32_t* pSurfaceFormatCount,
429 VkSurfaceFormatKHR* pSurfaceFormats)
430 {
431 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
432 struct wsi_wayland *wsi =
433 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
434
435 struct wsi_wl_display display;
436 if (wsi_wl_display_init(wsi, &display, surface->display, true))
437 return VK_ERROR_SURFACE_LOST_KHR;
438
439 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
440
441 VkFormat *disp_fmt;
442 u_vector_foreach(disp_fmt, &display.formats) {
443 vk_outarray_append(&out, out_fmt) {
444 out_fmt->format = *disp_fmt;
445 out_fmt->colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
446 }
447 }
448
449 wsi_wl_display_finish(&display);
450
451 return vk_outarray_status(&out);
452 }
453
454 static VkResult
455 wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
456 struct wsi_device *wsi_device,
457 const void *info_next,
458 uint32_t* pSurfaceFormatCount,
459 VkSurfaceFormat2KHR* pSurfaceFormats)
460 {
461 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
462 struct wsi_wayland *wsi =
463 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
464
465 struct wsi_wl_display display;
466 if (wsi_wl_display_init(wsi, &display, surface->display, true))
467 return VK_ERROR_SURFACE_LOST_KHR;
468
469 VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
470
471 VkFormat *disp_fmt;
472 u_vector_foreach(disp_fmt, &display.formats) {
473 vk_outarray_append(&out, out_fmt) {
474 out_fmt->surfaceFormat.format = *disp_fmt;
475 out_fmt->surfaceFormat.colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
476 }
477 }
478
479 wsi_wl_display_finish(&display);
480
481 return vk_outarray_status(&out);
482 }
483
484 static VkResult
485 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
486 uint32_t* pPresentModeCount,
487 VkPresentModeKHR* pPresentModes)
488 {
489 if (pPresentModes == NULL) {
490 *pPresentModeCount = ARRAY_SIZE(present_modes);
491 return VK_SUCCESS;
492 }
493
494 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
495 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
496
497 if (*pPresentModeCount < ARRAY_SIZE(present_modes))
498 return VK_INCOMPLETE;
499 else
500 return VK_SUCCESS;
501 }
502
503 VkResult wsi_create_wl_surface(const VkAllocationCallbacks *pAllocator,
504 const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
505 VkSurfaceKHR *pSurface)
506 {
507 VkIcdSurfaceWayland *surface;
508
509 surface = vk_alloc(pAllocator, sizeof *surface, 8,
510 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
511 if (surface == NULL)
512 return VK_ERROR_OUT_OF_HOST_MEMORY;
513
514 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
515 surface->display = pCreateInfo->display;
516 surface->surface = pCreateInfo->surface;
517
518 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
519
520 return VK_SUCCESS;
521 }
522
523 struct wsi_wl_image {
524 VkImage image;
525 VkDeviceMemory memory;
526 struct wl_buffer * buffer;
527 bool busy;
528 };
529
530 struct wsi_wl_swapchain {
531 struct wsi_swapchain base;
532
533 struct wsi_wl_display *display;
534
535 struct wl_surface * surface;
536 uint32_t surface_version;
537 struct wl_drm * drm_wrapper;
538 struct wl_callback * frame;
539
540 VkExtent2D extent;
541 VkFormat vk_format;
542 uint32_t drm_format;
543
544 VkPresentModeKHR present_mode;
545 bool fifo_ready;
546
547 struct wsi_wl_image images[0];
548 };
549
550 static VkResult
551 wsi_wl_swapchain_get_images(struct wsi_swapchain *wsi_chain,
552 uint32_t *pCount, VkImage *pSwapchainImages)
553 {
554 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
555 uint32_t ret_count;
556 VkResult result;
557
558 if (pSwapchainImages == NULL) {
559 *pCount = chain->base.image_count;
560 return VK_SUCCESS;
561 }
562
563 result = VK_SUCCESS;
564 ret_count = chain->base.image_count;
565 if (chain->base.image_count > *pCount) {
566 ret_count = *pCount;
567 result = VK_INCOMPLETE;
568 }
569
570 for (uint32_t i = 0; i < ret_count; i++)
571 pSwapchainImages[i] = chain->images[i].image;
572
573 return result;
574 }
575
576 static VkResult
577 wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
578 uint64_t timeout,
579 VkSemaphore semaphore,
580 uint32_t *image_index)
581 {
582 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
583
584 int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
585 chain->display->queue);
586 /* XXX: I'm not sure if out-of-date is the right error here. If
587 * wl_display_dispatch_queue_pending fails it most likely means we got
588 * kicked by the server so this seems more-or-less correct.
589 */
590 if (ret < 0)
591 return VK_ERROR_OUT_OF_DATE_KHR;
592
593 while (1) {
594 for (uint32_t i = 0; i < chain->base.image_count; i++) {
595 if (!chain->images[i].busy) {
596 /* We found a non-busy image */
597 *image_index = i;
598 chain->images[i].busy = true;
599 return VK_SUCCESS;
600 }
601 }
602
603 /* This time we do a blocking dispatch because we can't go
604 * anywhere until we get an event.
605 */
606 int ret = wl_display_roundtrip_queue(chain->display->wl_display,
607 chain->display->queue);
608 if (ret < 0)
609 return VK_ERROR_OUT_OF_DATE_KHR;
610 }
611 }
612
613 static void
614 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
615 {
616 struct wsi_wl_swapchain *chain = data;
617
618 chain->frame = NULL;
619 chain->fifo_ready = true;
620
621 wl_callback_destroy(callback);
622 }
623
624 static const struct wl_callback_listener frame_listener = {
625 frame_handle_done,
626 };
627
628 static VkResult
629 wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
630 uint32_t image_index,
631 const VkPresentRegionKHR *damage)
632 {
633 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
634
635 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
636 while (!chain->fifo_ready) {
637 int ret = wl_display_dispatch_queue(chain->display->wl_display,
638 chain->display->queue);
639 if (ret < 0)
640 return VK_ERROR_OUT_OF_DATE_KHR;
641 }
642 }
643
644 assert(image_index < chain->base.image_count);
645 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
646
647 if (chain->surface_version >= 4 && damage &&
648 damage->pRectangles && damage->rectangleCount > 0) {
649 for (unsigned i = 0; i < damage->rectangleCount; i++) {
650 const VkRectLayerKHR *rect = &damage->pRectangles[i];
651 assert(rect->layer == 0);
652 wl_surface_damage_buffer(chain->surface,
653 rect->offset.x, rect->offset.y,
654 rect->extent.width, rect->extent.height);
655 }
656 } else {
657 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
658 }
659
660 if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
661 chain->frame = wl_surface_frame(chain->surface);
662 wl_callback_add_listener(chain->frame, &frame_listener, chain);
663 chain->fifo_ready = false;
664 }
665
666 chain->images[image_index].busy = true;
667 wl_surface_commit(chain->surface);
668 wl_display_flush(chain->display->wl_display);
669
670 return VK_SUCCESS;
671 }
672
673 static void
674 buffer_handle_release(void *data, struct wl_buffer *buffer)
675 {
676 struct wsi_wl_image *image = data;
677
678 assert(image->buffer == buffer);
679
680 image->busy = false;
681 }
682
683 static const struct wl_buffer_listener buffer_listener = {
684 buffer_handle_release,
685 };
686
687 static VkResult
688 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
689 struct wsi_wl_image *image,
690 const VkSwapchainCreateInfoKHR *pCreateInfo,
691 const VkAllocationCallbacks* pAllocator)
692 {
693 VkDevice vk_device = chain->base.device;
694 VkResult result;
695 int fd;
696 uint32_t size;
697 uint32_t row_pitch;
698 uint32_t offset;
699 result = chain->base.image_fns->create_wsi_image(vk_device,
700 pCreateInfo,
701 pAllocator,
702 false,
703 false,
704 &image->image,
705 &image->memory,
706 &size,
707 &offset,
708 &row_pitch,
709 &fd);
710 if (result != VK_SUCCESS)
711 return result;
712
713 image->buffer = wl_drm_create_prime_buffer(chain->drm_wrapper,
714 fd, /* name */
715 chain->extent.width,
716 chain->extent.height,
717 chain->drm_format,
718 offset,
719 row_pitch,
720 0, 0, 0, 0 /* unused */);
721 close(fd);
722
723 if (!image->buffer)
724 goto fail_image;
725
726 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
727
728 return VK_SUCCESS;
729
730 fail_image:
731 chain->base.image_fns->free_wsi_image(vk_device, pAllocator,
732 image->image, image->memory);
733
734 return result;
735 }
736
737 static VkResult
738 wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
739 const VkAllocationCallbacks *pAllocator)
740 {
741 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
742
743 for (uint32_t i = 0; i < chain->base.image_count; i++) {
744 if (chain->images[i].buffer) {
745 wl_buffer_destroy(chain->images[i].buffer);
746 chain->base.image_fns->free_wsi_image(chain->base.device, pAllocator,
747 chain->images[i].image,
748 chain->images[i].memory);
749 }
750 }
751
752 if (chain->frame)
753 wl_callback_destroy(chain->frame);
754 if (chain->surface)
755 wl_proxy_wrapper_destroy(chain->surface);
756 if (chain->drm_wrapper)
757 wl_proxy_wrapper_destroy(chain->drm_wrapper);
758
759 if (chain->display)
760 wsi_wl_display_destroy(chain->display);
761
762 vk_free(pAllocator, chain);
763
764 return VK_SUCCESS;
765 }
766
767 static VkResult
768 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
769 VkDevice device,
770 struct wsi_device *wsi_device,
771 int local_fd,
772 const VkSwapchainCreateInfoKHR* pCreateInfo,
773 const VkAllocationCallbacks* pAllocator,
774 const struct wsi_image_fns *image_fns,
775 struct wsi_swapchain **swapchain_out)
776 {
777 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
778 struct wsi_wayland *wsi =
779 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
780 struct wsi_wl_swapchain *chain;
781 VkResult result;
782
783 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
784
785 int num_images = pCreateInfo->minImageCount;
786
787 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
788 chain = vk_alloc(pAllocator, size, 8,
789 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
790 if (chain == NULL)
791 return VK_ERROR_OUT_OF_HOST_MEMORY;
792
793 /* Mark a bunch of stuff as NULL. This way we can just call
794 * destroy_swapchain for cleanup.
795 */
796 for (uint32_t i = 0; i < num_images; i++)
797 chain->images[i].buffer = NULL;
798 chain->surface = NULL;
799 chain->drm_wrapper = NULL;
800 chain->frame = NULL;
801
802 bool alpha = pCreateInfo->compositeAlpha ==
803 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
804
805 chain->base.device = device;
806 chain->base.destroy = wsi_wl_swapchain_destroy;
807 chain->base.get_images = wsi_wl_swapchain_get_images;
808 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
809 chain->base.queue_present = wsi_wl_swapchain_queue_present;
810 chain->base.image_fns = image_fns;
811 chain->base.present_mode = pCreateInfo->presentMode;
812 chain->base.image_count = num_images;
813 chain->base.needs_linear_copy = false;
814 chain->extent = pCreateInfo->imageExtent;
815 chain->vk_format = pCreateInfo->imageFormat;
816 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
817
818 chain->display = wsi_wl_display_create(wsi, surface->display);
819 if (!chain->display) {
820 result = VK_ERROR_INITIALIZATION_FAILED;
821 goto fail;
822 }
823
824 chain->surface = wl_proxy_create_wrapper(surface->surface);
825 if (!chain->surface) {
826 result = VK_ERROR_INITIALIZATION_FAILED;
827 goto fail;
828 }
829 wl_proxy_set_queue((struct wl_proxy *) chain->surface,
830 chain->display->queue);
831 chain->surface_version = wl_proxy_get_version((void *)surface->surface);
832
833 chain->drm_wrapper = wl_proxy_create_wrapper(chain->display->drm);
834 if (!chain->drm_wrapper) {
835 result = VK_ERROR_INITIALIZATION_FAILED;
836 goto fail;
837 }
838 wl_proxy_set_queue((struct wl_proxy *) chain->drm_wrapper,
839 chain->display->queue);
840
841 chain->fifo_ready = true;
842
843 for (uint32_t i = 0; i < chain->base.image_count; i++) {
844 result = wsi_wl_image_init(chain, &chain->images[i],
845 pCreateInfo, pAllocator);
846 if (result != VK_SUCCESS)
847 goto fail;
848 chain->images[i].busy = false;
849 }
850
851 *swapchain_out = &chain->base;
852
853 return VK_SUCCESS;
854
855 fail:
856 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
857
858 return result;
859 }
860
861 VkResult
862 wsi_wl_init_wsi(struct wsi_device *wsi_device,
863 const VkAllocationCallbacks *alloc,
864 VkPhysicalDevice physical_device,
865 const struct wsi_callbacks *cbs)
866 {
867 struct wsi_wayland *wsi;
868 VkResult result;
869
870 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
871 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
872 if (!wsi) {
873 result = VK_ERROR_OUT_OF_HOST_MEMORY;
874 goto fail;
875 }
876
877 wsi->physical_device = physical_device;
878 wsi->alloc = alloc;
879 wsi->cbs = cbs;
880
881 wsi->base.get_support = wsi_wl_surface_get_support;
882 wsi->base.get_capabilities = wsi_wl_surface_get_capabilities;
883 wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
884 wsi->base.get_formats = wsi_wl_surface_get_formats;
885 wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
886 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
887 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
888
889 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
890
891 return VK_SUCCESS;
892
893 fail:
894 wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
895
896 return result;
897 }
898
899 void
900 wsi_wl_finish_wsi(struct wsi_device *wsi_device,
901 const VkAllocationCallbacks *alloc)
902 {
903 struct wsi_wayland *wsi =
904 (struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
905 if (!wsi)
906 return;
907
908 vk_free(alloc, wsi);
909 }