14062638f92ff8518ab73bf18060232c98b9a6bc
[mesa.git] / src / intel / vulkan / anv_wsi_wayland.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25 #include <wayland-drm-client-protocol.h>
26
27 #include "anv_wsi.h"
28
29 #include "vk_format_info.h"
30 #include <util/hash_table.h>
31
32 #define MIN_NUM_IMAGES 2
33
34 struct wsi_wl_display {
35 struct anv_physical_device *physical_device;
36 struct wl_display * display;
37 struct wl_drm * drm;
38
39 /* Vector of VkFormats supported */
40 struct anv_vector formats;
41
42 uint32_t capabilities;
43 };
44
45 struct wsi_wayland {
46 struct anv_wsi_interface base;
47
48 struct anv_physical_device * physical_device;
49
50 pthread_mutex_t mutex;
51 /* Hash table of wl_display -> wsi_wl_display mappings */
52 struct hash_table * displays;
53 };
54
55 static void
56 wsi_wl_display_add_vk_format(struct wsi_wl_display *display, VkFormat format)
57 {
58 /* Don't add a format that's already in the list */
59 VkFormat *f;
60 anv_vector_foreach(f, &display->formats)
61 if (*f == format)
62 return;
63
64 /* Don't add formats that aren't renderable. */
65 VkFormatProperties props;
66 anv_GetPhysicalDeviceFormatProperties(
67 anv_physical_device_to_handle(display->physical_device), format, &props);
68 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
69 return;
70
71 f = anv_vector_add(&display->formats);
72 if (f)
73 *f = format;
74 }
75
76 static void
77 drm_handle_device(void *data, struct wl_drm *drm, const char *name)
78 {
79 fprintf(stderr, "wl_drm.device(%s)\n", name);
80 }
81
82 static uint32_t
83 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
84 {
85 switch (vk_format) {
86 /* TODO: Figure out what all the formats mean and make this table
87 * correct.
88 */
89 #if 0
90 case VK_FORMAT_R4G4B4A4_UNORM:
91 return alpha ? WL_DRM_FORMAT_ABGR4444 : WL_DRM_FORMAT_XBGR4444;
92 case VK_FORMAT_R5G6B5_UNORM:
93 return WL_DRM_FORMAT_BGR565;
94 case VK_FORMAT_R5G5B5A1_UNORM:
95 return alpha ? WL_DRM_FORMAT_ABGR1555 : WL_DRM_FORMAT_XBGR1555;
96 case VK_FORMAT_R8G8B8_UNORM:
97 return WL_DRM_FORMAT_XBGR8888;
98 case VK_FORMAT_R8G8B8A8_UNORM:
99 return alpha ? WL_DRM_FORMAT_ABGR8888 : WL_DRM_FORMAT_XBGR8888;
100 case VK_FORMAT_R10G10B10A2_UNORM:
101 return alpha ? WL_DRM_FORMAT_ABGR2101010 : WL_DRM_FORMAT_XBGR2101010;
102 case VK_FORMAT_B4G4R4A4_UNORM:
103 return alpha ? WL_DRM_FORMAT_ARGB4444 : WL_DRM_FORMAT_XRGB4444;
104 case VK_FORMAT_B5G6R5_UNORM:
105 return WL_DRM_FORMAT_RGB565;
106 case VK_FORMAT_B5G5R5A1_UNORM:
107 return alpha ? WL_DRM_FORMAT_XRGB1555 : WL_DRM_FORMAT_XRGB1555;
108 #endif
109 case VK_FORMAT_B8G8R8_SRGB:
110 return WL_DRM_FORMAT_BGRX8888;
111 case VK_FORMAT_B8G8R8A8_SRGB:
112 return alpha ? WL_DRM_FORMAT_ARGB8888 : WL_DRM_FORMAT_XRGB8888;
113 #if 0
114 case VK_FORMAT_B10G10R10A2_UNORM:
115 return alpha ? WL_DRM_FORMAT_ARGB2101010 : WL_DRM_FORMAT_XRGB2101010;
116 #endif
117
118 default:
119 assert("!Unsupported Vulkan format");
120 return 0;
121 }
122 }
123
124 static void
125 drm_handle_format(void *data, struct wl_drm *drm, uint32_t wl_format)
126 {
127 struct wsi_wl_display *display = data;
128
129 switch (wl_format) {
130 #if 0
131 case WL_DRM_FORMAT_ABGR4444:
132 case WL_DRM_FORMAT_XBGR4444:
133 wsi_wl_display_add_vk_format(display, VK_FORMAT_R4G4B4A4_UNORM);
134 break;
135 case WL_DRM_FORMAT_BGR565:
136 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G6B5_UNORM);
137 break;
138 case WL_DRM_FORMAT_ABGR1555:
139 case WL_DRM_FORMAT_XBGR1555:
140 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G5B5A1_UNORM);
141 break;
142 case WL_DRM_FORMAT_XBGR8888:
143 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8_UNORM);
144 /* fallthrough */
145 case WL_DRM_FORMAT_ABGR8888:
146 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8A8_UNORM);
147 break;
148 case WL_DRM_FORMAT_ABGR2101010:
149 case WL_DRM_FORMAT_XBGR2101010:
150 wsi_wl_display_add_vk_format(display, VK_FORMAT_R10G10B10A2_UNORM);
151 break;
152 case WL_DRM_FORMAT_ARGB4444:
153 case WL_DRM_FORMAT_XRGB4444:
154 wsi_wl_display_add_vk_format(display, VK_FORMAT_B4G4R4A4_UNORM);
155 break;
156 case WL_DRM_FORMAT_RGB565:
157 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G6R5_UNORM);
158 break;
159 case WL_DRM_FORMAT_ARGB1555:
160 case WL_DRM_FORMAT_XRGB1555:
161 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G5R5A1_UNORM);
162 break;
163 #endif
164 case WL_DRM_FORMAT_XRGB8888:
165 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_SRGB);
166 /* fallthrough */
167 case WL_DRM_FORMAT_ARGB8888:
168 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_SRGB);
169 break;
170 #if 0
171 case WL_DRM_FORMAT_ARGB2101010:
172 case WL_DRM_FORMAT_XRGB2101010:
173 wsi_wl_display_add_vk_format(display, VK_FORMAT_B10G10R10A2_UNORM);
174 break;
175 #endif
176 }
177 }
178
179 static void
180 drm_handle_authenticated(void *data, struct wl_drm *drm)
181 {
182 }
183
184 static void
185 drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t capabilities)
186 {
187 struct wsi_wl_display *display = data;
188
189 display->capabilities = capabilities;
190 }
191
192 static const struct wl_drm_listener drm_listener = {
193 drm_handle_device,
194 drm_handle_format,
195 drm_handle_authenticated,
196 drm_handle_capabilities,
197 };
198
199 static void
200 registry_handle_global(void *data, struct wl_registry *registry,
201 uint32_t name, const char *interface, uint32_t version)
202 {
203 struct wsi_wl_display *display = data;
204
205 if (strcmp(interface, "wl_drm") == 0) {
206 assert(display->drm == NULL);
207
208 assert(version >= 2);
209 display->drm = wl_registry_bind(registry, name, &wl_drm_interface, 2);
210
211 if (display->drm)
212 wl_drm_add_listener(display->drm, &drm_listener, display);
213 }
214 }
215
216 static void
217 registry_handle_global_remove(void *data, struct wl_registry *registry,
218 uint32_t name)
219 { /* No-op */ }
220
221 static const struct wl_registry_listener registry_listener = {
222 registry_handle_global,
223 registry_handle_global_remove
224 };
225
226 static void
227 wsi_wl_display_destroy(struct wsi_wayland *wsi, struct wsi_wl_display *display)
228 {
229 anv_vector_finish(&display->formats);
230 if (display->drm)
231 wl_drm_destroy(display->drm);
232 anv_free(&wsi->physical_device->instance->alloc, display);
233 }
234
235 static struct wsi_wl_display *
236 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display)
237 {
238 struct wsi_wl_display *display =
239 anv_alloc(&wsi->physical_device->instance->alloc, sizeof(*display), 8,
240 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
241 if (!display)
242 return NULL;
243
244 memset(display, 0, sizeof(*display));
245
246 display->display = wl_display;
247 display->physical_device = wsi->physical_device;
248
249 if (!anv_vector_init(&display->formats, sizeof(VkFormat), 8))
250 goto fail;
251
252 struct wl_registry *registry = wl_display_get_registry(wl_display);
253 if (!registry)
254 return NULL;
255
256 wl_registry_add_listener(registry, &registry_listener, display);
257
258 /* Round-rip to get the wl_drm global */
259 wl_display_roundtrip(wl_display);
260
261 if (!display->drm)
262 goto fail;
263
264 /* Round-rip to get wl_drm formats and capabilities */
265 wl_display_roundtrip(wl_display);
266
267 /* We need prime support */
268 if (!(display->capabilities & WL_DRM_CAPABILITY_PRIME))
269 goto fail;
270
271 /* We don't need this anymore */
272 wl_registry_destroy(registry);
273
274 return display;
275
276 fail:
277 if (registry)
278 wl_registry_destroy(registry);
279
280 wsi_wl_display_destroy(wsi, display);
281 return NULL;
282 }
283
284 static struct wsi_wl_display *
285 wsi_wl_get_display(struct anv_physical_device *device,
286 struct wl_display *wl_display)
287 {
288 struct wsi_wayland *wsi =
289 (struct wsi_wayland *)device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
290
291 pthread_mutex_lock(&wsi->mutex);
292
293 struct hash_entry *entry = _mesa_hash_table_search(wsi->displays,
294 wl_display);
295 if (!entry) {
296 /* We're about to make a bunch of blocking calls. Let's drop the
297 * mutex for now so we don't block up too badly.
298 */
299 pthread_mutex_unlock(&wsi->mutex);
300
301 struct wsi_wl_display *display = wsi_wl_display_create(wsi, wl_display);
302
303 pthread_mutex_lock(&wsi->mutex);
304
305 entry = _mesa_hash_table_search(wsi->displays, wl_display);
306 if (entry) {
307 /* Oops, someone raced us to it */
308 wsi_wl_display_destroy(wsi, display);
309 } else {
310 entry = _mesa_hash_table_insert(wsi->displays, wl_display, display);
311 }
312 }
313
314 pthread_mutex_unlock(&wsi->mutex);
315
316 return entry->data;
317 }
318
319 VkBool32 anv_GetPhysicalDeviceWaylandPresentationSupportKHR(
320 VkPhysicalDevice physicalDevice,
321 uint32_t queueFamilyIndex,
322 struct wl_display* display)
323 {
324 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
325
326 return wsi_wl_get_display(physical_device, display) != NULL;
327 }
328
329 static VkResult
330 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
331 struct anv_physical_device *device,
332 uint32_t queueFamilyIndex,
333 VkBool32* pSupported)
334 {
335 *pSupported = true;
336
337 return VK_SUCCESS;
338 }
339
340 static const VkPresentModeKHR present_modes[] = {
341 VK_PRESENT_MODE_MAILBOX_KHR,
342 VK_PRESENT_MODE_FIFO_KHR,
343 };
344
345 static VkResult
346 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
347 struct anv_physical_device *device,
348 VkSurfaceCapabilitiesKHR* caps)
349 {
350 caps->minImageCount = MIN_NUM_IMAGES;
351 caps->maxImageCount = 4;
352 caps->currentExtent = (VkExtent2D) { -1, -1 };
353 caps->minImageExtent = (VkExtent2D) { 1, 1 };
354 caps->maxImageExtent = (VkExtent2D) { INT16_MAX, INT16_MAX };
355 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
356 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
357 caps->maxImageArrayLayers = 1;
358
359 caps->supportedCompositeAlpha =
360 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
361 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
362
363 caps->supportedUsageFlags =
364 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
365 VK_IMAGE_USAGE_SAMPLED_BIT |
366 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
367 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
368
369 return VK_SUCCESS;
370 }
371
372 static VkResult
373 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
374 struct anv_physical_device *device,
375 uint32_t* pSurfaceFormatCount,
376 VkSurfaceFormatKHR* pSurfaceFormats)
377 {
378 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
379 struct wsi_wl_display *display =
380 wsi_wl_get_display(device, surface->display);
381
382 uint32_t count = anv_vector_length(&display->formats);
383
384 if (pSurfaceFormats == NULL) {
385 *pSurfaceFormatCount = count;
386 return VK_SUCCESS;
387 }
388
389 assert(*pSurfaceFormatCount >= count);
390 *pSurfaceFormatCount = count;
391
392 VkFormat *f;
393 anv_vector_foreach(f, &display->formats) {
394 *(pSurfaceFormats++) = (VkSurfaceFormatKHR) {
395 .format = *f,
396 /* TODO: We should get this from the compositor somehow */
397 .colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
398 };
399 }
400
401 return VK_SUCCESS;
402 }
403
404 static VkResult
405 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
406 struct anv_physical_device *device,
407 uint32_t* pPresentModeCount,
408 VkPresentModeKHR* pPresentModes)
409 {
410 if (pPresentModes == NULL) {
411 *pPresentModeCount = ARRAY_SIZE(present_modes);
412 return VK_SUCCESS;
413 }
414
415 assert(*pPresentModeCount >= ARRAY_SIZE(present_modes));
416 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
417 *pPresentModeCount = ARRAY_SIZE(present_modes);
418
419 return VK_SUCCESS;
420 }
421
422 static VkResult
423 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *surface,
424 struct anv_device *device,
425 const VkSwapchainCreateInfoKHR* pCreateInfo,
426 const VkAllocationCallbacks* pAllocator,
427 struct anv_swapchain **swapchain);
428
429 VkResult anv_CreateWaylandSurfaceKHR(
430 VkInstance _instance,
431 const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
432 const VkAllocationCallbacks* pAllocator,
433 VkSurfaceKHR* pSurface)
434 {
435 ANV_FROM_HANDLE(anv_instance, instance, _instance);
436
437 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR);
438
439 VkIcdSurfaceWayland *surface;
440
441 surface = anv_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
442 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
443 if (surface == NULL)
444 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
445
446 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
447 surface->display = pCreateInfo->display;
448 surface->surface = pCreateInfo->surface;
449
450 *pSurface = _VkIcdSurfaceBase_to_handle(&surface->base);
451
452 return VK_SUCCESS;
453 }
454
455 struct wsi_wl_image {
456 struct anv_image * image;
457 struct anv_device_memory * memory;
458 struct wl_buffer * buffer;
459 bool busy;
460 };
461
462 struct wsi_wl_swapchain {
463 struct anv_swapchain base;
464
465 struct wsi_wl_display * display;
466 struct wl_event_queue * queue;
467 struct wl_surface * surface;
468
469 VkExtent2D extent;
470 VkFormat vk_format;
471 uint32_t drm_format;
472
473 VkPresentModeKHR present_mode;
474 bool fifo_ready;
475
476 uint32_t image_count;
477 struct wsi_wl_image images[0];
478 };
479
480 static VkResult
481 wsi_wl_swapchain_get_images(struct anv_swapchain *anv_chain,
482 uint32_t *pCount, VkImage *pSwapchainImages)
483 {
484 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
485
486 if (pSwapchainImages == NULL) {
487 *pCount = chain->image_count;
488 return VK_SUCCESS;
489 }
490
491 assert(chain->image_count <= *pCount);
492 for (uint32_t i = 0; i < chain->image_count; i++)
493 pSwapchainImages[i] = anv_image_to_handle(chain->images[i].image);
494
495 *pCount = chain->image_count;
496
497 return VK_SUCCESS;
498 }
499
500 static VkResult
501 wsi_wl_swapchain_acquire_next_image(struct anv_swapchain *anv_chain,
502 uint64_t timeout,
503 VkSemaphore semaphore,
504 uint32_t *image_index)
505 {
506 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
507
508 int ret = wl_display_dispatch_queue_pending(chain->display->display,
509 chain->queue);
510 /* XXX: I'm not sure if out-of-date is the right error here. If
511 * wl_display_dispatch_queue_pending fails it most likely means we got
512 * kicked by the server so this seems more-or-less correct.
513 */
514 if (ret < 0)
515 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
516
517 while (1) {
518 for (uint32_t i = 0; i < chain->image_count; i++) {
519 if (!chain->images[i].busy) {
520 /* We found a non-busy image */
521 *image_index = i;
522 return VK_SUCCESS;
523 }
524 }
525
526 /* This time we do a blocking dispatch because we can't go
527 * anywhere until we get an event.
528 */
529 int ret = wl_display_roundtrip_queue(chain->display->display,
530 chain->queue);
531 if (ret < 0)
532 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
533 }
534 }
535
536 static void
537 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
538 {
539 struct wsi_wl_swapchain *chain = data;
540
541 chain->fifo_ready = true;
542
543 wl_callback_destroy(callback);
544 }
545
546 static const struct wl_callback_listener frame_listener = {
547 frame_handle_done,
548 };
549
550 static VkResult
551 wsi_wl_swapchain_queue_present(struct anv_swapchain *anv_chain,
552 struct anv_queue *queue,
553 uint32_t image_index)
554 {
555 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
556
557 if (chain->present_mode == VK_PRESENT_MODE_FIFO_KHR) {
558 while (!chain->fifo_ready) {
559 int ret = wl_display_dispatch_queue(chain->display->display,
560 chain->queue);
561 if (ret < 0)
562 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
563 }
564 }
565
566 assert(image_index < chain->image_count);
567 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
568 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
569
570 if (chain->present_mode == VK_PRESENT_MODE_FIFO_KHR) {
571 struct wl_callback *frame = wl_surface_frame(chain->surface);
572 wl_proxy_set_queue((struct wl_proxy *)frame, chain->queue);
573 wl_callback_add_listener(frame, &frame_listener, chain);
574 chain->fifo_ready = false;
575 }
576
577 chain->images[image_index].busy = true;
578 wl_surface_commit(chain->surface);
579 wl_display_flush(chain->display->display);
580
581 return VK_SUCCESS;
582 }
583
584 static void
585 wsi_wl_image_finish(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image,
586 const VkAllocationCallbacks* pAllocator)
587 {
588 VkDevice vk_device = anv_device_to_handle(chain->base.device);
589 anv_FreeMemory(vk_device, anv_device_memory_to_handle(image->memory),
590 pAllocator);
591 anv_DestroyImage(vk_device, anv_image_to_handle(image->image),
592 pAllocator);
593 }
594
595 static void
596 buffer_handle_release(void *data, struct wl_buffer *buffer)
597 {
598 struct wsi_wl_image *image = data;
599
600 assert(image->buffer == buffer);
601
602 image->busy = false;
603 }
604
605 static const struct wl_buffer_listener buffer_listener = {
606 buffer_handle_release,
607 };
608
609 static VkResult
610 wsi_wl_image_init(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image,
611 const VkAllocationCallbacks* pAllocator)
612 {
613 VkDevice vk_device = anv_device_to_handle(chain->base.device);
614 VkResult result;
615
616 VkImage vk_image;
617 result = anv_image_create(vk_device,
618 &(struct anv_image_create_info) {
619 .isl_tiling_flags = ISL_TILING_X_BIT,
620 .stride = 0,
621 .vk_info =
622 &(VkImageCreateInfo) {
623 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
624 .imageType = VK_IMAGE_TYPE_2D,
625 .format = chain->vk_format,
626 .extent = {
627 .width = chain->extent.width,
628 .height = chain->extent.height,
629 .depth = 1
630 },
631 .mipLevels = 1,
632 .arrayLayers = 1,
633 .samples = 1,
634 /* FIXME: Need a way to use X tiling to allow scanout */
635 .tiling = VK_IMAGE_TILING_OPTIMAL,
636 .usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
637 .flags = 0,
638 }},
639 pAllocator,
640 &vk_image);
641
642 if (result != VK_SUCCESS)
643 return result;
644
645 image->image = anv_image_from_handle(vk_image);
646 assert(vk_format_is_color(image->image->vk_format));
647
648 struct anv_surface *surface = &image->image->color_surface;
649
650 VkDeviceMemory vk_memory;
651 result = anv_AllocateMemory(vk_device,
652 &(VkMemoryAllocateInfo) {
653 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
654 .allocationSize = image->image->size,
655 .memoryTypeIndex = 0,
656 },
657 pAllocator,
658 &vk_memory);
659
660 if (result != VK_SUCCESS)
661 goto fail_image;
662
663 image->memory = anv_device_memory_from_handle(vk_memory);
664 image->memory->bo.is_winsys_bo = true;
665
666 result = anv_BindImageMemory(vk_device, vk_image, vk_memory, 0);
667
668 if (result != VK_SUCCESS)
669 goto fail_mem;
670
671 int ret = anv_gem_set_tiling(chain->base.device,
672 image->memory->bo.gem_handle,
673 surface->isl.row_pitch, I915_TILING_X);
674 if (ret) {
675 /* FINISHME: Choose a better error. */
676 result = vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
677 goto fail_mem;
678 }
679
680 int fd = anv_gem_handle_to_fd(chain->base.device,
681 image->memory->bo.gem_handle);
682 if (fd == -1) {
683 /* FINISHME: Choose a better error. */
684 result = vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
685 goto fail_mem;
686 }
687
688 image->buffer = wl_drm_create_prime_buffer(chain->display->drm,
689 fd, /* name */
690 chain->extent.width,
691 chain->extent.height,
692 chain->drm_format,
693 surface->offset,
694 surface->isl.row_pitch,
695 0, 0, 0, 0 /* unused */);
696 wl_display_roundtrip(chain->display->display);
697 close(fd);
698
699 wl_proxy_set_queue((struct wl_proxy *)image->buffer, chain->queue);
700 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
701
702 return VK_SUCCESS;
703
704 fail_mem:
705 anv_FreeMemory(vk_device, vk_memory, pAllocator);
706 fail_image:
707 anv_DestroyImage(vk_device, vk_image, pAllocator);
708
709 return result;
710 }
711
712 static VkResult
713 wsi_wl_swapchain_destroy(struct anv_swapchain *anv_chain,
714 const VkAllocationCallbacks *pAllocator)
715 {
716 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
717
718 for (uint32_t i = 0; i < chain->image_count; i++) {
719 if (chain->images[i].buffer)
720 wsi_wl_image_finish(chain, &chain->images[i], pAllocator);
721 }
722
723 anv_free2(&chain->base.device->alloc, pAllocator, chain);
724
725 return VK_SUCCESS;
726 }
727
728 static VkResult
729 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
730 struct anv_device *device,
731 const VkSwapchainCreateInfoKHR* pCreateInfo,
732 const VkAllocationCallbacks* pAllocator,
733 struct anv_swapchain **swapchain_out)
734 {
735 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
736 struct wsi_wl_swapchain *chain;
737 VkResult result;
738
739 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
740
741 int num_images = pCreateInfo->minImageCount;
742
743 assert(num_images >= MIN_NUM_IMAGES);
744
745 /* For true mailbox mode, we need at least 4 images:
746 * 1) One to scan out from
747 * 2) One to have queued for scan-out
748 * 3) One to be currently held by the Wayland compositor
749 * 4) One to render to
750 */
751 if (pCreateInfo->presentMode == VK_PRESENT_MODE_MAILBOX_KHR)
752 num_images = MAX2(num_images, 4);
753
754 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
755 chain = anv_alloc2(&device->alloc, pAllocator, size, 8,
756 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
757 if (chain == NULL)
758 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
759
760 chain->base.device = device;
761 chain->base.destroy = wsi_wl_swapchain_destroy;
762 chain->base.get_images = wsi_wl_swapchain_get_images;
763 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
764 chain->base.queue_present = wsi_wl_swapchain_queue_present;
765
766 chain->surface = surface->surface;
767 chain->extent = pCreateInfo->imageExtent;
768 chain->vk_format = pCreateInfo->imageFormat;
769 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, false);
770
771 chain->present_mode = pCreateInfo->presentMode;
772 chain->fifo_ready = true;
773
774 chain->image_count = num_images;
775
776 /* Mark a bunch of stuff as NULL. This way we can just call
777 * destroy_swapchain for cleanup.
778 */
779 for (uint32_t i = 0; i < chain->image_count; i++)
780 chain->images[i].buffer = NULL;
781 chain->queue = NULL;
782
783 chain->display = wsi_wl_get_display(&device->instance->physicalDevice,
784 surface->display);
785 if (!chain->display)
786 goto fail;
787
788 chain->queue = wl_display_create_queue(chain->display->display);
789 if (!chain->queue)
790 goto fail;
791
792 for (uint32_t i = 0; i < chain->image_count; i++) {
793 result = wsi_wl_image_init(chain, &chain->images[i], pAllocator);
794 if (result != VK_SUCCESS)
795 goto fail;
796 chain->images[i].busy = false;
797 }
798
799 *swapchain_out = &chain->base;
800
801 return VK_SUCCESS;
802
803 fail:
804 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
805
806 return result;
807 }
808
809 VkResult
810 anv_wl_init_wsi(struct anv_physical_device *device)
811 {
812 struct wsi_wayland *wsi;
813 VkResult result;
814
815 wsi = anv_alloc(&device->instance->alloc, sizeof(*wsi), 8,
816 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
817 if (!wsi) {
818 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
819 goto fail;
820 }
821
822 wsi->physical_device = device;
823
824 int ret = pthread_mutex_init(&wsi->mutex, NULL);
825 if (ret != 0) {
826 if (ret == ENOMEM) {
827 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
828 } else {
829 /* FINISHME: Choose a better error. */
830 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
831 }
832
833 goto fail_alloc;
834 }
835
836 wsi->displays = _mesa_hash_table_create(NULL, _mesa_hash_pointer,
837 _mesa_key_pointer_equal);
838 if (!wsi->displays) {
839 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
840 goto fail_mutex;
841 }
842
843 wsi->base.get_support = wsi_wl_surface_get_support;
844 wsi->base.get_capabilities = wsi_wl_surface_get_capabilities;
845 wsi->base.get_formats = wsi_wl_surface_get_formats;
846 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
847 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
848
849 device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
850
851 return VK_SUCCESS;
852
853 fail_mutex:
854 pthread_mutex_destroy(&wsi->mutex);
855
856 fail_alloc:
857 anv_free(&device->instance->alloc, wsi);
858 fail:
859 device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
860
861 return result;
862 }
863
864 void
865 anv_wl_finish_wsi(struct anv_physical_device *device)
866 {
867 struct wsi_wayland *wsi =
868 (struct wsi_wayland *)device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
869
870 if (wsi) {
871 _mesa_hash_table_destroy(wsi->displays, NULL);
872
873 pthread_mutex_destroy(&wsi->mutex);
874
875 anv_free(&device->instance->alloc, wsi);
876 }
877 }