vk: Fill out buffer surface state when updating descriptor set
[mesa.git] / src / vulkan / anv_wsi_x11.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <xcb/xcb.h>
25 #include <xcb/dri3.h>
26 #include <xcb/present.h>
27
28 #include "anv_wsi.h"
29
30 struct x11_surface {
31 struct anv_wsi_surface base;
32
33 xcb_connection_t *connection;
34 xcb_window_t window;
35 };
36
37 static const VkSurfaceFormatKHR formats[] = {
38 { .format = VK_FORMAT_B8G8R8A8_UNORM, },
39 };
40
41 static const VkPresentModeKHR present_modes[] = {
42 VK_PRESENT_MODE_MAILBOX_KHR,
43 };
44
45 VkBool32 anv_GetPhysicalDeviceXcbPresentationSupportKHR(
46 VkPhysicalDevice physicalDevice,
47 uint32_t queueFamilyIndex,
48 xcb_connection_t* connection,
49 xcb_visualid_t visual_id)
50 {
51 anv_finishme("Check that we actually have DRI3");
52 stub_return(true);
53 }
54
55 static VkResult
56 x11_surface_get_capabilities(struct anv_wsi_surface *wsi_surface,
57 struct anv_physical_device *device,
58 VkSurfaceCapabilitiesKHR *caps)
59 {
60 struct x11_surface *surface = (struct x11_surface *)wsi_surface;
61
62 xcb_get_geometry_cookie_t cookie = xcb_get_geometry(surface->connection,
63 surface->window);
64 xcb_generic_error_t *err;
65 xcb_get_geometry_reply_t *geom = xcb_get_geometry_reply(surface->connection,
66 cookie, &err);
67 if (geom) {
68 VkExtent2D extent = { geom->width, geom->height };
69 caps->currentExtent = extent;
70 caps->minImageExtent = extent;
71 caps->maxImageExtent = extent;
72 } else {
73 /* This can happen if the client didn't wait for the configure event
74 * to come back from the compositor. In that case, we don't know the
75 * size of the window so we just return valid "I don't know" stuff.
76 */
77 caps->currentExtent = (VkExtent2D) { -1, -1 };
78 caps->minImageExtent = (VkExtent2D) { 1, 1 };
79 caps->maxImageExtent = (VkExtent2D) { INT16_MAX, INT16_MAX };
80 }
81 free(err);
82 free(geom);
83
84 caps->minImageCount = 2;
85 caps->maxImageCount = 4;
86 caps->supportedTransforms = VK_SURFACE_TRANSFORM_NONE_BIT_KHR;
87 caps->currentTransform = VK_SURFACE_TRANSFORM_NONE_BIT_KHR;
88 caps->maxImageArrayLayers = 1;
89 caps->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
90 caps->supportedUsageFlags =
91 VK_IMAGE_USAGE_TRANSFER_DST_BIT |
92 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
93
94 return VK_SUCCESS;
95 }
96
97 static VkResult
98 x11_surface_get_formats(struct anv_wsi_surface *surface,
99 struct anv_physical_device *device,
100 uint32_t *pSurfaceFormatCount,
101 VkSurfaceFormatKHR *pSurfaceFormats)
102 {
103 if (pSurfaceFormats == NULL) {
104 *pSurfaceFormatCount = ARRAY_SIZE(formats);
105 return VK_SUCCESS;
106 }
107
108 assert(*pSurfaceFormatCount >= ARRAY_SIZE(formats));
109 typed_memcpy(pSurfaceFormats, formats, *pSurfaceFormatCount);
110 *pSurfaceFormatCount = ARRAY_SIZE(formats);
111
112 return VK_SUCCESS;
113 }
114
115 static VkResult
116 x11_surface_get_present_modes(struct anv_wsi_surface *surface,
117 struct anv_physical_device *device,
118 uint32_t *pPresentModeCount,
119 VkPresentModeKHR *pPresentModes)
120 {
121 if (pPresentModes == NULL) {
122 *pPresentModeCount = ARRAY_SIZE(present_modes);
123 return VK_SUCCESS;
124 }
125
126 assert(*pPresentModeCount >= ARRAY_SIZE(present_modes));
127 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
128 *pPresentModeCount = ARRAY_SIZE(present_modes);
129
130 return VK_SUCCESS;
131 }
132
133 static void
134 x11_surface_destroy(struct anv_wsi_surface *surface,
135 const VkAllocationCallbacks *pAllocator)
136 {
137 anv_free2(&surface->instance->alloc, pAllocator, surface);
138 }
139
140 static VkResult
141 x11_surface_create_swapchain(struct anv_wsi_surface *surface,
142 struct anv_device *device,
143 const VkSwapchainCreateInfoKHR* pCreateInfo,
144 const VkAllocationCallbacks* pAllocator,
145 struct anv_swapchain **swapchain);
146
147 VkResult anv_CreateXcbSurfaceKHR(
148 VkInstance _instance,
149 xcb_connection_t* connection,
150 xcb_window_t window,
151 const VkAllocationCallbacks* pAllocator,
152 VkSurfaceKHR* pSurface)
153 {
154 ANV_FROM_HANDLE(anv_instance, instance, _instance);
155 struct x11_surface *surface;
156
157 surface = anv_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
158 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
159 if (surface == NULL)
160 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
161
162 surface->connection = connection;
163 surface->window = window;
164
165 surface->base.instance = instance;
166 surface->base.destroy = x11_surface_destroy;
167 surface->base.get_capabilities = x11_surface_get_capabilities;
168 surface->base.get_formats = x11_surface_get_formats;
169 surface->base.get_present_modes = x11_surface_get_present_modes;
170 surface->base.create_swapchain = x11_surface_create_swapchain;
171
172 *pSurface = anv_wsi_surface_to_handle(&surface->base);
173
174 return VK_SUCCESS;
175 }
176
177 struct x11_image {
178 struct anv_image * image;
179 struct anv_device_memory * memory;
180 xcb_pixmap_t pixmap;
181 xcb_get_geometry_cookie_t geom_cookie;
182 bool busy;
183 };
184
185 struct x11_swapchain {
186 struct anv_swapchain base;
187
188 xcb_connection_t * conn;
189 xcb_window_t window;
190 xcb_gc_t gc;
191 VkExtent2D extent;
192 uint32_t image_count;
193 uint32_t next_image;
194 struct x11_image images[0];
195 };
196
197 static VkResult
198 x11_get_images(struct anv_swapchain *anv_chain,
199 uint32_t* pCount, VkImage *pSwapchainImages)
200 {
201 struct x11_swapchain *chain = (struct x11_swapchain *)anv_chain;
202
203 if (pSwapchainImages == NULL) {
204 *pCount = chain->image_count;
205 return VK_SUCCESS;
206 }
207
208 assert(chain->image_count <= *pCount);
209 for (uint32_t i = 0; i < chain->image_count; i++)
210 pSwapchainImages[i] = anv_image_to_handle(chain->images[i].image);
211
212 *pCount = chain->image_count;
213
214 return VK_SUCCESS;
215 }
216
217 static VkResult
218 x11_acquire_next_image(struct anv_swapchain *anv_chain,
219 uint64_t timeout,
220 VkSemaphore semaphore,
221 uint32_t *image_index)
222 {
223 struct x11_swapchain *chain = (struct x11_swapchain *)anv_chain;
224 struct x11_image *image = &chain->images[chain->next_image];
225
226 if (image->busy) {
227 xcb_generic_error_t *err;
228 xcb_get_geometry_reply_t *geom =
229 xcb_get_geometry_reply(chain->conn, image->geom_cookie, &err);
230 if (!geom) {
231 free(err);
232 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
233 }
234
235 if (geom->width != chain->extent.width ||
236 geom->height != chain->extent.height) {
237 free(geom);
238 return vk_error(VK_ERROR_OUT_OF_DATE_KHR);
239 }
240 free(geom);
241
242 image->busy = false;
243 }
244
245 *image_index = chain->next_image;
246 chain->next_image = (chain->next_image + 1) % chain->image_count;
247 return VK_SUCCESS;
248 }
249
250 static VkResult
251 x11_queue_present(struct anv_swapchain *anv_chain,
252 struct anv_queue *queue,
253 uint32_t image_index)
254 {
255 struct x11_swapchain *chain = (struct x11_swapchain *)anv_chain;
256 struct x11_image *image = &chain->images[image_index];
257
258 assert(image_index < chain->image_count);
259
260 xcb_void_cookie_t cookie;
261
262 cookie = xcb_copy_area(chain->conn,
263 image->pixmap,
264 chain->window,
265 chain->gc,
266 0, 0,
267 0, 0,
268 chain->extent.width,
269 chain->extent.height);
270 xcb_discard_reply(chain->conn, cookie.sequence);
271
272 image->geom_cookie = xcb_get_geometry(chain->conn, chain->window);
273 image->busy = true;
274
275 xcb_flush(chain->conn);
276
277 return VK_SUCCESS;
278 }
279
280 static VkResult
281 x11_swapchain_destroy(struct anv_swapchain *anv_chain,
282 const VkAllocationCallbacks *pAllocator)
283 {
284 struct x11_swapchain *chain = (struct x11_swapchain *)anv_chain;
285 xcb_void_cookie_t cookie;
286
287 for (uint32_t i = 0; i < chain->image_count; i++) {
288 struct x11_image *image = &chain->images[i];
289
290 if (image->busy)
291 xcb_discard_reply(chain->conn, image->geom_cookie.sequence);
292
293 cookie = xcb_free_pixmap(chain->conn, image->pixmap);
294 xcb_discard_reply(chain->conn, cookie.sequence);
295
296 /* TODO: Delete images and free memory */
297 }
298
299 anv_free(NULL /* XXX: pAllocator */, chain);
300
301 return VK_SUCCESS;
302 }
303
304 static VkResult
305 x11_surface_create_swapchain(struct anv_wsi_surface *wsi_surface,
306 struct anv_device *device,
307 const VkSwapchainCreateInfoKHR *pCreateInfo,
308 const VkAllocationCallbacks* pAllocator,
309 struct anv_swapchain **swapchain_out)
310 {
311 struct x11_surface *surface = (struct x11_surface *)wsi_surface;
312 struct x11_swapchain *chain;
313 xcb_void_cookie_t cookie;
314 VkResult result;
315
316 int num_images = pCreateInfo->minImageCount;
317
318 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
319
320 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
321 chain = anv_alloc2(&device->alloc, pAllocator, size, 8,
322 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
323 if (chain == NULL)
324 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
325
326 chain->base.device = device;
327 chain->base.destroy = x11_swapchain_destroy;
328 chain->base.get_images = x11_get_images;
329 chain->base.acquire_next_image = x11_acquire_next_image;
330 chain->base.queue_present = x11_queue_present;
331
332 chain->conn = surface->connection;
333 chain->window = surface->window;
334 chain->extent = pCreateInfo->imageExtent;
335 chain->image_count = num_images;
336 chain->next_image = 0;
337
338 for (uint32_t i = 0; i < chain->image_count; i++) {
339 VkDeviceMemory memory_h;
340 VkImage image_h;
341 struct anv_image *image;
342 struct anv_surface *surface;
343 struct anv_device_memory *memory;
344
345 anv_image_create(anv_device_to_handle(device),
346 &(struct anv_image_create_info) {
347 .isl_tiling_flags = ISL_TILING_X_BIT,
348 .stride = 0,
349 .vk_info =
350 &(VkImageCreateInfo) {
351 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
352 .imageType = VK_IMAGE_TYPE_2D,
353 .format = pCreateInfo->imageFormat,
354 .extent = {
355 .width = pCreateInfo->imageExtent.width,
356 .height = pCreateInfo->imageExtent.height,
357 .depth = 1
358 },
359 .mipLevels = 1,
360 .arrayLayers = 1,
361 .samples = 1,
362 /* FIXME: Need a way to use X tiling to allow scanout */
363 .tiling = VK_IMAGE_TILING_OPTIMAL,
364 .usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
365 .flags = 0,
366 }},
367 NULL,
368 &image_h);
369
370 image = anv_image_from_handle(image_h);
371 assert(anv_format_is_color(image->format));
372
373 surface = &image->color_surface;
374
375 anv_AllocateMemory(anv_device_to_handle(device),
376 &(VkMemoryAllocateInfo) {
377 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
378 .allocationSize = image->size,
379 .memoryTypeIndex = 0,
380 },
381 NULL /* XXX: pAllocator */,
382 &memory_h);
383
384 memory = anv_device_memory_from_handle(memory_h);
385
386 anv_BindImageMemory(VK_NULL_HANDLE, anv_image_to_handle(image),
387 memory_h, 0);
388
389 int ret = anv_gem_set_tiling(device, memory->bo.gem_handle,
390 surface->isl.row_pitch, I915_TILING_X);
391 if (ret) {
392 /* FINISHME: Choose a better error. */
393 result = vk_errorf(VK_ERROR_OUT_OF_DEVICE_MEMORY,
394 "set_tiling failed: %m");
395 goto fail;
396 }
397
398 int fd = anv_gem_handle_to_fd(device, memory->bo.gem_handle);
399 if (fd == -1) {
400 /* FINISHME: Choose a better error. */
401 result = vk_errorf(VK_ERROR_OUT_OF_DEVICE_MEMORY,
402 "handle_to_fd failed: %m");
403 goto fail;
404 }
405
406 uint32_t bpp = 32;
407 uint32_t depth = 24;
408 xcb_pixmap_t pixmap = xcb_generate_id(chain->conn);
409
410 cookie =
411 xcb_dri3_pixmap_from_buffer_checked(chain->conn,
412 pixmap,
413 chain->window,
414 image->size,
415 pCreateInfo->imageExtent.width,
416 pCreateInfo->imageExtent.height,
417 surface->isl.row_pitch,
418 depth, bpp, fd);
419
420 chain->images[i].image = image;
421 chain->images[i].memory = memory;
422 chain->images[i].pixmap = pixmap;
423 chain->images[i].busy = false;
424
425 xcb_discard_reply(chain->conn, cookie.sequence);
426 }
427
428 chain->gc = xcb_generate_id(chain->conn);
429 if (!chain->gc) {
430 /* FINISHME: Choose a better error. */
431 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
432 goto fail;
433 }
434
435 cookie = xcb_create_gc(chain->conn,
436 chain->gc,
437 chain->window,
438 XCB_GC_GRAPHICS_EXPOSURES,
439 (uint32_t []) { 0 });
440 xcb_discard_reply(chain->conn, cookie.sequence);
441
442 *swapchain_out = &chain->base;
443
444 return VK_SUCCESS;
445
446 fail:
447 return result;
448 }
449
450 VkResult
451 anv_x11_init_wsi(struct anv_instance *instance)
452 {
453 return VK_SUCCESS;
454 }
455
456 void
457 anv_x11_finish_wsi(struct anv_instance *instance)
458 { }