anv: move to using vk_alloc helpers.
[mesa.git] / src / intel / vulkan / anv_wsi_wayland.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <wayland-client.h>
25 #include <wayland-drm-client-protocol.h>
26
27 #include "anv_wsi.h"
28
29 #include "vk_format_info.h"
30 #include <util/hash_table.h>
31
32 #define MIN_NUM_IMAGES 2
33
34 struct wsi_wl_display {
35 struct anv_physical_device *physical_device;
36 struct wl_display * display;
37 struct wl_drm * drm;
38
39 /* Vector of VkFormats supported */
40 struct u_vector formats;
41
42 uint32_t capabilities;
43 };
44
45 struct wsi_wayland {
46 struct anv_wsi_interface base;
47
48 struct anv_physical_device * physical_device;
49
50 pthread_mutex_t mutex;
51 /* Hash table of wl_display -> wsi_wl_display mappings */
52 struct hash_table * displays;
53 };
54
55 static void
56 wsi_wl_display_add_vk_format(struct wsi_wl_display *display, VkFormat format)
57 {
58 /* Don't add a format that's already in the list */
59 VkFormat *f;
60 u_vector_foreach(f, &display->formats)
61 if (*f == format)
62 return;
63
64 /* Don't add formats that aren't renderable. */
65 VkFormatProperties props;
66 anv_GetPhysicalDeviceFormatProperties(
67 anv_physical_device_to_handle(display->physical_device), format, &props);
68 if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
69 return;
70
71 f = u_vector_add(&display->formats);
72 if (f)
73 *f = format;
74 }
75
76 static void
77 drm_handle_device(void *data, struct wl_drm *drm, const char *name)
78 {
79 fprintf(stderr, "wl_drm.device(%s)\n", name);
80 }
81
82 static uint32_t
83 wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
84 {
85 switch (vk_format) {
86 /* TODO: Figure out what all the formats mean and make this table
87 * correct.
88 */
89 #if 0
90 case VK_FORMAT_R4G4B4A4_UNORM:
91 return alpha ? WL_DRM_FORMAT_ABGR4444 : WL_DRM_FORMAT_XBGR4444;
92 case VK_FORMAT_R5G6B5_UNORM:
93 return WL_DRM_FORMAT_BGR565;
94 case VK_FORMAT_R5G5B5A1_UNORM:
95 return alpha ? WL_DRM_FORMAT_ABGR1555 : WL_DRM_FORMAT_XBGR1555;
96 case VK_FORMAT_R8G8B8_UNORM:
97 return WL_DRM_FORMAT_XBGR8888;
98 case VK_FORMAT_R8G8B8A8_UNORM:
99 return alpha ? WL_DRM_FORMAT_ABGR8888 : WL_DRM_FORMAT_XBGR8888;
100 case VK_FORMAT_R10G10B10A2_UNORM:
101 return alpha ? WL_DRM_FORMAT_ABGR2101010 : WL_DRM_FORMAT_XBGR2101010;
102 case VK_FORMAT_B4G4R4A4_UNORM:
103 return alpha ? WL_DRM_FORMAT_ARGB4444 : WL_DRM_FORMAT_XRGB4444;
104 case VK_FORMAT_B5G6R5_UNORM:
105 return WL_DRM_FORMAT_RGB565;
106 case VK_FORMAT_B5G5R5A1_UNORM:
107 return alpha ? WL_DRM_FORMAT_XRGB1555 : WL_DRM_FORMAT_XRGB1555;
108 #endif
109 case VK_FORMAT_B8G8R8_UNORM:
110 case VK_FORMAT_B8G8R8_SRGB:
111 return WL_DRM_FORMAT_BGRX8888;
112 case VK_FORMAT_B8G8R8A8_UNORM:
113 case VK_FORMAT_B8G8R8A8_SRGB:
114 return alpha ? WL_DRM_FORMAT_ARGB8888 : WL_DRM_FORMAT_XRGB8888;
115 #if 0
116 case VK_FORMAT_B10G10R10A2_UNORM:
117 return alpha ? WL_DRM_FORMAT_ARGB2101010 : WL_DRM_FORMAT_XRGB2101010;
118 #endif
119
120 default:
121 assert(!"Unsupported Vulkan format");
122 return 0;
123 }
124 }
125
126 static void
127 drm_handle_format(void *data, struct wl_drm *drm, uint32_t wl_format)
128 {
129 struct wsi_wl_display *display = data;
130
131 switch (wl_format) {
132 #if 0
133 case WL_DRM_FORMAT_ABGR4444:
134 case WL_DRM_FORMAT_XBGR4444:
135 wsi_wl_display_add_vk_format(display, VK_FORMAT_R4G4B4A4_UNORM);
136 break;
137 case WL_DRM_FORMAT_BGR565:
138 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G6B5_UNORM);
139 break;
140 case WL_DRM_FORMAT_ABGR1555:
141 case WL_DRM_FORMAT_XBGR1555:
142 wsi_wl_display_add_vk_format(display, VK_FORMAT_R5G5B5A1_UNORM);
143 break;
144 case WL_DRM_FORMAT_XBGR8888:
145 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8_UNORM);
146 /* fallthrough */
147 case WL_DRM_FORMAT_ABGR8888:
148 wsi_wl_display_add_vk_format(display, VK_FORMAT_R8G8B8A8_UNORM);
149 break;
150 case WL_DRM_FORMAT_ABGR2101010:
151 case WL_DRM_FORMAT_XBGR2101010:
152 wsi_wl_display_add_vk_format(display, VK_FORMAT_R10G10B10A2_UNORM);
153 break;
154 case WL_DRM_FORMAT_ARGB4444:
155 case WL_DRM_FORMAT_XRGB4444:
156 wsi_wl_display_add_vk_format(display, VK_FORMAT_B4G4R4A4_UNORM);
157 break;
158 case WL_DRM_FORMAT_RGB565:
159 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G6R5_UNORM);
160 break;
161 case WL_DRM_FORMAT_ARGB1555:
162 case WL_DRM_FORMAT_XRGB1555:
163 wsi_wl_display_add_vk_format(display, VK_FORMAT_B5G5R5A1_UNORM);
164 break;
165 #endif
166 case WL_DRM_FORMAT_XRGB8888:
167 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_SRGB);
168 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8_UNORM);
169 /* fallthrough */
170 case WL_DRM_FORMAT_ARGB8888:
171 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_SRGB);
172 wsi_wl_display_add_vk_format(display, VK_FORMAT_B8G8R8A8_UNORM);
173 break;
174 #if 0
175 case WL_DRM_FORMAT_ARGB2101010:
176 case WL_DRM_FORMAT_XRGB2101010:
177 wsi_wl_display_add_vk_format(display, VK_FORMAT_B10G10R10A2_UNORM);
178 break;
179 #endif
180 }
181 }
182
183 static void
184 drm_handle_authenticated(void *data, struct wl_drm *drm)
185 {
186 }
187
188 static void
189 drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t capabilities)
190 {
191 struct wsi_wl_display *display = data;
192
193 display->capabilities = capabilities;
194 }
195
196 static const struct wl_drm_listener drm_listener = {
197 drm_handle_device,
198 drm_handle_format,
199 drm_handle_authenticated,
200 drm_handle_capabilities,
201 };
202
203 static void
204 registry_handle_global(void *data, struct wl_registry *registry,
205 uint32_t name, const char *interface, uint32_t version)
206 {
207 struct wsi_wl_display *display = data;
208
209 if (strcmp(interface, "wl_drm") == 0) {
210 assert(display->drm == NULL);
211
212 assert(version >= 2);
213 display->drm = wl_registry_bind(registry, name, &wl_drm_interface, 2);
214
215 if (display->drm)
216 wl_drm_add_listener(display->drm, &drm_listener, display);
217 }
218 }
219
220 static void
221 registry_handle_global_remove(void *data, struct wl_registry *registry,
222 uint32_t name)
223 { /* No-op */ }
224
225 static const struct wl_registry_listener registry_listener = {
226 registry_handle_global,
227 registry_handle_global_remove
228 };
229
230 static void
231 wsi_wl_display_destroy(struct wsi_wayland *wsi, struct wsi_wl_display *display)
232 {
233 u_vector_finish(&display->formats);
234 if (display->drm)
235 wl_drm_destroy(display->drm);
236 vk_free(&wsi->physical_device->instance->alloc, display);
237 }
238
239 static struct wsi_wl_display *
240 wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display)
241 {
242 struct wsi_wl_display *display =
243 vk_alloc(&wsi->physical_device->instance->alloc, sizeof(*display), 8,
244 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
245 if (!display)
246 return NULL;
247
248 memset(display, 0, sizeof(*display));
249
250 display->display = wl_display;
251 display->physical_device = wsi->physical_device;
252
253 if (!u_vector_init(&display->formats, sizeof(VkFormat), 8))
254 goto fail;
255
256 struct wl_registry *registry = wl_display_get_registry(wl_display);
257 if (!registry)
258 return NULL;
259
260 wl_registry_add_listener(registry, &registry_listener, display);
261
262 /* Round-rip to get the wl_drm global */
263 wl_display_roundtrip(wl_display);
264
265 if (!display->drm)
266 goto fail;
267
268 /* Round-rip to get wl_drm formats and capabilities */
269 wl_display_roundtrip(wl_display);
270
271 /* We need prime support */
272 if (!(display->capabilities & WL_DRM_CAPABILITY_PRIME))
273 goto fail;
274
275 /* We don't need this anymore */
276 wl_registry_destroy(registry);
277
278 return display;
279
280 fail:
281 if (registry)
282 wl_registry_destroy(registry);
283
284 wsi_wl_display_destroy(wsi, display);
285 return NULL;
286 }
287
288 static struct wsi_wl_display *
289 wsi_wl_get_display(struct anv_physical_device *device,
290 struct wl_display *wl_display)
291 {
292 struct wsi_wayland *wsi =
293 (struct wsi_wayland *)device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
294
295 pthread_mutex_lock(&wsi->mutex);
296
297 struct hash_entry *entry = _mesa_hash_table_search(wsi->displays,
298 wl_display);
299 if (!entry) {
300 /* We're about to make a bunch of blocking calls. Let's drop the
301 * mutex for now so we don't block up too badly.
302 */
303 pthread_mutex_unlock(&wsi->mutex);
304
305 struct wsi_wl_display *display = wsi_wl_display_create(wsi, wl_display);
306
307 pthread_mutex_lock(&wsi->mutex);
308
309 entry = _mesa_hash_table_search(wsi->displays, wl_display);
310 if (entry) {
311 /* Oops, someone raced us to it */
312 wsi_wl_display_destroy(wsi, display);
313 } else {
314 entry = _mesa_hash_table_insert(wsi->displays, wl_display, display);
315 }
316 }
317
318 pthread_mutex_unlock(&wsi->mutex);
319
320 return entry->data;
321 }
322
323 VkBool32 anv_GetPhysicalDeviceWaylandPresentationSupportKHR(
324 VkPhysicalDevice physicalDevice,
325 uint32_t queueFamilyIndex,
326 struct wl_display* display)
327 {
328 ANV_FROM_HANDLE(anv_physical_device, physical_device, physicalDevice);
329
330 return wsi_wl_get_display(physical_device, display) != NULL;
331 }
332
333 static VkResult
334 wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
335 struct anv_physical_device *device,
336 uint32_t queueFamilyIndex,
337 VkBool32* pSupported)
338 {
339 *pSupported = true;
340
341 return VK_SUCCESS;
342 }
343
344 static const VkPresentModeKHR present_modes[] = {
345 VK_PRESENT_MODE_MAILBOX_KHR,
346 VK_PRESENT_MODE_FIFO_KHR,
347 };
348
349 static VkResult
350 wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
351 struct anv_physical_device *device,
352 VkSurfaceCapabilitiesKHR* caps)
353 {
354 caps->minImageCount = MIN_NUM_IMAGES;
355 caps->maxImageCount = 4;
356 caps->currentExtent = (VkExtent2D) { -1, -1 };
357 caps->minImageExtent = (VkExtent2D) { 1, 1 };
358 caps->maxImageExtent = (VkExtent2D) { INT16_MAX, INT16_MAX };
359 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
360 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
361 caps->maxImageArrayLayers = 1;
362
363 caps->supportedCompositeAlpha =
364 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
365 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
366
367 caps->supportedUsageFlags =
368 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
369 VK_IMAGE_USAGE_SAMPLED_BIT |
370 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
371 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
372
373 return VK_SUCCESS;
374 }
375
376 static VkResult
377 wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
378 struct anv_physical_device *device,
379 uint32_t* pSurfaceFormatCount,
380 VkSurfaceFormatKHR* pSurfaceFormats)
381 {
382 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
383 struct wsi_wl_display *display =
384 wsi_wl_get_display(device, surface->display);
385
386 uint32_t count = u_vector_length(&display->formats);
387
388 if (pSurfaceFormats == NULL) {
389 *pSurfaceFormatCount = count;
390 return VK_SUCCESS;
391 }
392
393 assert(*pSurfaceFormatCount >= count);
394 *pSurfaceFormatCount = count;
395
396 VkFormat *f;
397 u_vector_foreach(f, &display->formats) {
398 *(pSurfaceFormats++) = (VkSurfaceFormatKHR) {
399 .format = *f,
400 /* TODO: We should get this from the compositor somehow */
401 .colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
402 };
403 }
404
405 return VK_SUCCESS;
406 }
407
408 static VkResult
409 wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
410 struct anv_physical_device *device,
411 uint32_t* pPresentModeCount,
412 VkPresentModeKHR* pPresentModes)
413 {
414 if (pPresentModes == NULL) {
415 *pPresentModeCount = ARRAY_SIZE(present_modes);
416 return VK_SUCCESS;
417 }
418
419 assert(*pPresentModeCount >= ARRAY_SIZE(present_modes));
420 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
421 *pPresentModeCount = ARRAY_SIZE(present_modes);
422
423 return VK_SUCCESS;
424 }
425
426 static VkResult
427 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *surface,
428 struct anv_device *device,
429 const VkSwapchainCreateInfoKHR* pCreateInfo,
430 const VkAllocationCallbacks* pAllocator,
431 struct anv_swapchain **swapchain);
432
433 VkResult anv_CreateWaylandSurfaceKHR(
434 VkInstance _instance,
435 const VkWaylandSurfaceCreateInfoKHR* pCreateInfo,
436 const VkAllocationCallbacks* pAllocator,
437 VkSurfaceKHR* pSurface)
438 {
439 ANV_FROM_HANDLE(anv_instance, instance, _instance);
440
441 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR);
442
443 VkIcdSurfaceWayland *surface;
444
445 surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
446 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
447 if (surface == NULL)
448 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
449
450 surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
451 surface->display = pCreateInfo->display;
452 surface->surface = pCreateInfo->surface;
453
454 *pSurface = _VkIcdSurfaceBase_to_handle(&surface->base);
455
456 return VK_SUCCESS;
457 }
458
459 struct wsi_wl_image {
460 struct anv_image * image;
461 struct anv_device_memory * memory;
462 struct wl_buffer * buffer;
463 bool busy;
464 };
465
466 struct wsi_wl_swapchain {
467 struct anv_swapchain base;
468
469 struct wsi_wl_display * display;
470 struct wl_event_queue * queue;
471 struct wl_surface * surface;
472
473 VkExtent2D extent;
474 VkFormat vk_format;
475 uint32_t drm_format;
476
477 VkPresentModeKHR present_mode;
478 bool fifo_ready;
479
480 uint32_t image_count;
481 struct wsi_wl_image images[0];
482 };
483
484 static VkResult
485 wsi_wl_swapchain_get_images(struct anv_swapchain *anv_chain,
486 uint32_t *pCount, VkImage *pSwapchainImages)
487 {
488 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
489
490 if (pSwapchainImages == NULL) {
491 *pCount = chain->image_count;
492 return VK_SUCCESS;
493 }
494
495 assert(chain->image_count <= *pCount);
496 for (uint32_t i = 0; i < chain->image_count; i++)
497 pSwapchainImages[i] = anv_image_to_handle(chain->images[i].image);
498
499 *pCount = chain->image_count;
500
501 return VK_SUCCESS;
502 }
503
504 static VkResult
505 wsi_wl_swapchain_acquire_next_image(struct anv_swapchain *anv_chain,
506 uint64_t timeout,
507 VkSemaphore semaphore,
508 uint32_t *image_index)
509 {
510 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
511
512 int ret = wl_display_dispatch_queue_pending(chain->display->display,
513 chain->queue);
514 /* XXX: I'm not sure if out-of-date is the right error here. If
515 * wl_display_dispatch_queue_pending fails it most likely means we got
516 * kicked by the server so this seems more-or-less correct.
517 */
518 if (ret < 0)
519 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
520
521 while (1) {
522 for (uint32_t i = 0; i < chain->image_count; i++) {
523 if (!chain->images[i].busy) {
524 /* We found a non-busy image */
525 *image_index = i;
526 chain->images[i].busy = true;
527 return VK_SUCCESS;
528 }
529 }
530
531 /* This time we do a blocking dispatch because we can't go
532 * anywhere until we get an event.
533 */
534 int ret = wl_display_roundtrip_queue(chain->display->display,
535 chain->queue);
536 if (ret < 0)
537 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
538 }
539 }
540
541 static void
542 frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
543 {
544 struct wsi_wl_swapchain *chain = data;
545
546 chain->fifo_ready = true;
547
548 wl_callback_destroy(callback);
549 }
550
551 static const struct wl_callback_listener frame_listener = {
552 frame_handle_done,
553 };
554
555 static VkResult
556 wsi_wl_swapchain_queue_present(struct anv_swapchain *anv_chain,
557 struct anv_queue *queue,
558 uint32_t image_index)
559 {
560 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
561
562 if (chain->present_mode == VK_PRESENT_MODE_FIFO_KHR) {
563 while (!chain->fifo_ready) {
564 int ret = wl_display_dispatch_queue(chain->display->display,
565 chain->queue);
566 if (ret < 0)
567 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
568 }
569 }
570
571 assert(image_index < chain->image_count);
572 wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
573 wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
574
575 if (chain->present_mode == VK_PRESENT_MODE_FIFO_KHR) {
576 struct wl_callback *frame = wl_surface_frame(chain->surface);
577 wl_proxy_set_queue((struct wl_proxy *)frame, chain->queue);
578 wl_callback_add_listener(frame, &frame_listener, chain);
579 chain->fifo_ready = false;
580 }
581
582 chain->images[image_index].busy = true;
583 wl_surface_commit(chain->surface);
584 wl_display_flush(chain->display->display);
585
586 return VK_SUCCESS;
587 }
588
589 static void
590 wsi_wl_image_finish(struct wsi_wl_swapchain *chain, struct wsi_wl_image *image,
591 const VkAllocationCallbacks* pAllocator)
592 {
593 VkDevice vk_device = anv_device_to_handle(chain->base.device);
594 anv_FreeMemory(vk_device, anv_device_memory_to_handle(image->memory),
595 pAllocator);
596 anv_DestroyImage(vk_device, anv_image_to_handle(image->image),
597 pAllocator);
598 }
599
600 static void
601 buffer_handle_release(void *data, struct wl_buffer *buffer)
602 {
603 struct wsi_wl_image *image = data;
604
605 assert(image->buffer == buffer);
606
607 image->busy = false;
608 }
609
610 static const struct wl_buffer_listener buffer_listener = {
611 buffer_handle_release,
612 };
613
614 static VkResult
615 wsi_wl_image_init(struct wsi_wl_swapchain *chain,
616 struct wsi_wl_image *image,
617 const VkSwapchainCreateInfoKHR *pCreateInfo,
618 const VkAllocationCallbacks* pAllocator)
619 {
620 VkDevice vk_device = anv_device_to_handle(chain->base.device);
621 VkResult result;
622
623 VkImage vk_image;
624 result = anv_image_create(vk_device,
625 &(struct anv_image_create_info) {
626 .isl_tiling_flags = ISL_TILING_X_BIT,
627 .stride = 0,
628 .vk_info =
629 &(VkImageCreateInfo) {
630 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
631 .imageType = VK_IMAGE_TYPE_2D,
632 .format = chain->vk_format,
633 .extent = {
634 .width = chain->extent.width,
635 .height = chain->extent.height,
636 .depth = 1
637 },
638 .mipLevels = 1,
639 .arrayLayers = 1,
640 .samples = 1,
641 /* FIXME: Need a way to use X tiling to allow scanout */
642 .tiling = VK_IMAGE_TILING_OPTIMAL,
643 .usage = (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
644 pCreateInfo->imageUsage),
645 .flags = 0,
646 }},
647 pAllocator,
648 &vk_image);
649
650 if (result != VK_SUCCESS)
651 return result;
652
653 image->image = anv_image_from_handle(vk_image);
654 assert(vk_format_is_color(image->image->vk_format));
655
656 struct anv_surface *surface = &image->image->color_surface;
657
658 VkDeviceMemory vk_memory;
659 result = anv_AllocateMemory(vk_device,
660 &(VkMemoryAllocateInfo) {
661 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
662 .allocationSize = image->image->size,
663 .memoryTypeIndex = 0,
664 },
665 pAllocator,
666 &vk_memory);
667
668 if (result != VK_SUCCESS)
669 goto fail_image;
670
671 image->memory = anv_device_memory_from_handle(vk_memory);
672 image->memory->bo.is_winsys_bo = true;
673
674 result = anv_BindImageMemory(vk_device, vk_image, vk_memory, 0);
675
676 if (result != VK_SUCCESS)
677 goto fail_mem;
678
679 int ret = anv_gem_set_tiling(chain->base.device,
680 image->memory->bo.gem_handle,
681 surface->isl.row_pitch, I915_TILING_X);
682 if (ret) {
683 /* FINISHME: Choose a better error. */
684 result = vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
685 goto fail_mem;
686 }
687
688 int fd = anv_gem_handle_to_fd(chain->base.device,
689 image->memory->bo.gem_handle);
690 if (fd == -1) {
691 /* FINISHME: Choose a better error. */
692 result = vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
693 goto fail_mem;
694 }
695
696 image->buffer = wl_drm_create_prime_buffer(chain->display->drm,
697 fd, /* name */
698 chain->extent.width,
699 chain->extent.height,
700 chain->drm_format,
701 surface->offset,
702 surface->isl.row_pitch,
703 0, 0, 0, 0 /* unused */);
704 wl_display_roundtrip(chain->display->display);
705 close(fd);
706
707 wl_proxy_set_queue((struct wl_proxy *)image->buffer, chain->queue);
708 wl_buffer_add_listener(image->buffer, &buffer_listener, image);
709
710 return VK_SUCCESS;
711
712 fail_mem:
713 anv_FreeMemory(vk_device, vk_memory, pAllocator);
714 fail_image:
715 anv_DestroyImage(vk_device, vk_image, pAllocator);
716
717 return result;
718 }
719
720 static VkResult
721 wsi_wl_swapchain_destroy(struct anv_swapchain *anv_chain,
722 const VkAllocationCallbacks *pAllocator)
723 {
724 struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)anv_chain;
725
726 for (uint32_t i = 0; i < chain->image_count; i++) {
727 if (chain->images[i].buffer)
728 wsi_wl_image_finish(chain, &chain->images[i], pAllocator);
729 }
730
731 vk_free2(&chain->base.device->alloc, pAllocator, chain);
732
733 return VK_SUCCESS;
734 }
735
736 static VkResult
737 wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
738 struct anv_device *device,
739 const VkSwapchainCreateInfoKHR* pCreateInfo,
740 const VkAllocationCallbacks* pAllocator,
741 struct anv_swapchain **swapchain_out)
742 {
743 VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
744 struct wsi_wl_swapchain *chain;
745 VkResult result;
746
747 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
748
749 int num_images = pCreateInfo->minImageCount;
750
751 assert(num_images >= MIN_NUM_IMAGES);
752
753 /* For true mailbox mode, we need at least 4 images:
754 * 1) One to scan out from
755 * 2) One to have queued for scan-out
756 * 3) One to be currently held by the Wayland compositor
757 * 4) One to render to
758 */
759 if (pCreateInfo->presentMode == VK_PRESENT_MODE_MAILBOX_KHR)
760 num_images = MAX2(num_images, 4);
761
762 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
763 chain = vk_alloc2(&device->alloc, pAllocator, size, 8,
764 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
765 if (chain == NULL)
766 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
767
768 chain->base.device = device;
769 chain->base.destroy = wsi_wl_swapchain_destroy;
770 chain->base.get_images = wsi_wl_swapchain_get_images;
771 chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
772 chain->base.queue_present = wsi_wl_swapchain_queue_present;
773
774 chain->surface = surface->surface;
775 chain->extent = pCreateInfo->imageExtent;
776 chain->vk_format = pCreateInfo->imageFormat;
777 chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, false);
778
779 chain->present_mode = pCreateInfo->presentMode;
780 chain->fifo_ready = true;
781
782 chain->image_count = num_images;
783
784 /* Mark a bunch of stuff as NULL. This way we can just call
785 * destroy_swapchain for cleanup.
786 */
787 for (uint32_t i = 0; i < chain->image_count; i++)
788 chain->images[i].buffer = NULL;
789 chain->queue = NULL;
790
791 chain->display = wsi_wl_get_display(&device->instance->physicalDevice,
792 surface->display);
793 if (!chain->display) {
794 result = vk_error(VK_ERROR_INITIALIZATION_FAILED);
795 goto fail;
796 }
797
798 chain->queue = wl_display_create_queue(chain->display->display);
799 if (!chain->queue) {
800 result = vk_error(VK_ERROR_INITIALIZATION_FAILED);
801 goto fail;
802 }
803
804 for (uint32_t i = 0; i < chain->image_count; i++) {
805 result = wsi_wl_image_init(chain, &chain->images[i],
806 pCreateInfo, pAllocator);
807 if (result != VK_SUCCESS)
808 goto fail;
809 chain->images[i].busy = false;
810 }
811
812 *swapchain_out = &chain->base;
813
814 return VK_SUCCESS;
815
816 fail:
817 wsi_wl_swapchain_destroy(&chain->base, pAllocator);
818
819 return result;
820 }
821
822 VkResult
823 anv_wl_init_wsi(struct anv_physical_device *device)
824 {
825 struct wsi_wayland *wsi;
826 VkResult result;
827
828 wsi = vk_alloc(&device->instance->alloc, sizeof(*wsi), 8,
829 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
830 if (!wsi) {
831 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
832 goto fail;
833 }
834
835 wsi->physical_device = device;
836
837 int ret = pthread_mutex_init(&wsi->mutex, NULL);
838 if (ret != 0) {
839 if (ret == ENOMEM) {
840 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
841 } else {
842 /* FINISHME: Choose a better error. */
843 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
844 }
845
846 goto fail_alloc;
847 }
848
849 wsi->displays = _mesa_hash_table_create(NULL, _mesa_hash_pointer,
850 _mesa_key_pointer_equal);
851 if (!wsi->displays) {
852 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
853 goto fail_mutex;
854 }
855
856 wsi->base.get_support = wsi_wl_surface_get_support;
857 wsi->base.get_capabilities = wsi_wl_surface_get_capabilities;
858 wsi->base.get_formats = wsi_wl_surface_get_formats;
859 wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
860 wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
861
862 device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
863
864 return VK_SUCCESS;
865
866 fail_mutex:
867 pthread_mutex_destroy(&wsi->mutex);
868
869 fail_alloc:
870 vk_free(&device->instance->alloc, wsi);
871 fail:
872 device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
873
874 return result;
875 }
876
877 void
878 anv_wl_finish_wsi(struct anv_physical_device *device)
879 {
880 struct wsi_wayland *wsi =
881 (struct wsi_wayland *)device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
882
883 if (wsi) {
884 _mesa_hash_table_destroy(wsi->displays, NULL);
885
886 pthread_mutex_destroy(&wsi->mutex);
887
888 vk_free(&device->instance->alloc, wsi);
889 }
890 }