23729f7ecb247bffcd7ed6ab5ae0281d619d4c38
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27 #include <string.h>
28
29 #include <X11/xshmfence.h>
30 #include <xcb/xcb.h>
31 #include <xcb/dri3.h>
32 #include <xcb/present.h>
33
34 #include <X11/Xlib-xcb.h>
35
36 #include "loader_dri3_helper.h"
37 #include "util/macros.h"
38 #include "drm_fourcc.h"
39
40 /* From xmlpool/options.h, user exposed so should be stable */
41 #define DRI_CONF_VBLANK_NEVER 0
42 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
43 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
44 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
45
46 /**
47 * A cached blit context.
48 */
49 struct loader_dri3_blit_context {
50 mtx_t mtx;
51 __DRIcontext *ctx;
52 __DRIscreen *cur_screen;
53 const __DRIcoreExtension *core;
54 };
55
56 /* For simplicity we maintain the cache only for a single screen at a time */
57 static struct loader_dri3_blit_context blit_context = {
58 _MTX_INITIALIZER_NP, NULL
59 };
60
61 static void
62 dri3_flush_present_events(struct loader_dri3_drawable *draw);
63
64 static struct loader_dri3_buffer *
65 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
66
67 /**
68 * Do we have blit functionality in the image blit extension?
69 *
70 * \param draw[in] The drawable intended to blit from / to.
71 * \return true if we have blit functionality. false otherwise.
72 */
73 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
74 {
75 return draw->ext->image->base.version >= 9 &&
76 draw->ext->image->blitImage != NULL;
77 }
78
79 /**
80 * Get and lock (for use with the current thread) a dri context associated
81 * with the drawable's dri screen. The context is intended to be used with
82 * the dri image extension's blitImage method.
83 *
84 * \param draw[in] Pointer to the drawable whose dri screen we want a
85 * dri context for.
86 * \return A dri context or NULL if context creation failed.
87 *
88 * When the caller is done with the context (even if the context returned was
89 * NULL), the caller must call loader_dri3_blit_context_put.
90 */
91 static __DRIcontext *
92 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
93 {
94 mtx_lock(&blit_context.mtx);
95
96 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
97 blit_context.core->destroyContext(blit_context.ctx);
98 blit_context.ctx = NULL;
99 }
100
101 if (!blit_context.ctx) {
102 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
103 NULL, NULL, NULL);
104 blit_context.cur_screen = draw->dri_screen;
105 blit_context.core = draw->ext->core;
106 }
107
108 return blit_context.ctx;
109 }
110
111 /**
112 * Release (for use with other threads) a dri context previously obtained using
113 * loader_dri3_blit_context_get.
114 */
115 static void
116 loader_dri3_blit_context_put(void)
117 {
118 mtx_unlock(&blit_context.mtx);
119 }
120
121 /**
122 * Blit (parts of) the contents of a DRI image to another dri image
123 *
124 * \param draw[in] The drawable which owns the images.
125 * \param dst[in] The destination image.
126 * \param src[in] The source image.
127 * \param dstx0[in] Start destination coordinate.
128 * \param dsty0[in] Start destination coordinate.
129 * \param width[in] Blit width.
130 * \param height[in] Blit height.
131 * \param srcx0[in] Start source coordinate.
132 * \param srcy0[in] Start source coordinate.
133 * \param flush_flag[in] Image blit flush flag.
134 * \return true iff successful.
135 */
136 static bool
137 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
138 __DRIimage *dst, __DRIimage *src,
139 int dstx0, int dsty0, int width, int height,
140 int srcx0, int srcy0, int flush_flag)
141 {
142 __DRIcontext *dri_context;
143 bool use_blit_context = false;
144
145 if (!loader_dri3_have_image_blit(draw))
146 return false;
147
148 dri_context = draw->vtable->get_dri_context(draw);
149
150 if (!dri_context || !draw->vtable->in_current_context(draw)) {
151 dri_context = loader_dri3_blit_context_get(draw);
152 use_blit_context = true;
153 flush_flag |= __BLIT_FLAG_FLUSH;
154 }
155
156 if (dri_context)
157 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
158 width, height, srcx0, srcy0,
159 width, height, flush_flag);
160
161 if (use_blit_context)
162 loader_dri3_blit_context_put();
163
164 return dri_context != NULL;
165 }
166
167 static inline void
168 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
169 {
170 xshmfence_reset(buffer->shm_fence);
171 }
172
173 static inline void
174 dri3_fence_set(struct loader_dri3_buffer *buffer)
175 {
176 xshmfence_trigger(buffer->shm_fence);
177 }
178
179 static inline void
180 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
181 {
182 xcb_sync_trigger_fence(c, buffer->sync_fence);
183 }
184
185 static inline void
186 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
187 struct loader_dri3_buffer *buffer)
188 {
189 xcb_flush(c);
190 xshmfence_await(buffer->shm_fence);
191 if (draw) {
192 mtx_lock(&draw->mtx);
193 dri3_flush_present_events(draw);
194 mtx_unlock(&draw->mtx);
195 }
196 }
197
198 static void
199 dri3_update_num_back(struct loader_dri3_drawable *draw)
200 {
201 if (draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP)
202 draw->num_back = 3;
203 else
204 draw->num_back = 2;
205 }
206
207 void
208 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
209 {
210 draw->swap_interval = interval;
211 }
212
213 /** dri3_free_render_buffer
214 *
215 * Free everything associated with one render buffer including pixmap, fence
216 * stuff and the driver image
217 */
218 static void
219 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
220 struct loader_dri3_buffer *buffer)
221 {
222 if (buffer->own_pixmap)
223 xcb_free_pixmap(draw->conn, buffer->pixmap);
224 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
225 xshmfence_unmap_shm(buffer->shm_fence);
226 draw->ext->image->destroyImage(buffer->image);
227 if (buffer->linear_buffer)
228 draw->ext->image->destroyImage(buffer->linear_buffer);
229 free(buffer);
230 }
231
232 void
233 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
234 {
235 int i;
236
237 draw->ext->core->destroyDrawable(draw->dri_drawable);
238
239 for (i = 0; i < ARRAY_SIZE(draw->buffers); i++) {
240 if (draw->buffers[i])
241 dri3_free_render_buffer(draw, draw->buffers[i]);
242 }
243
244 if (draw->special_event) {
245 xcb_void_cookie_t cookie =
246 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
247 XCB_PRESENT_EVENT_MASK_NO_EVENT);
248
249 xcb_discard_reply(draw->conn, cookie.sequence);
250 xcb_unregister_for_special_event(draw->conn, draw->special_event);
251 }
252
253 cnd_destroy(&draw->event_cnd);
254 mtx_destroy(&draw->mtx);
255 }
256
257 int
258 loader_dri3_drawable_init(xcb_connection_t *conn,
259 xcb_drawable_t drawable,
260 __DRIscreen *dri_screen,
261 bool is_different_gpu,
262 bool multiplanes_available,
263 const __DRIconfig *dri_config,
264 struct loader_dri3_extensions *ext,
265 const struct loader_dri3_vtable *vtable,
266 struct loader_dri3_drawable *draw)
267 {
268 xcb_get_geometry_cookie_t cookie;
269 xcb_get_geometry_reply_t *reply;
270 xcb_generic_error_t *error;
271 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
272 int swap_interval;
273
274 draw->conn = conn;
275 draw->ext = ext;
276 draw->vtable = vtable;
277 draw->drawable = drawable;
278 draw->dri_screen = dri_screen;
279 draw->is_different_gpu = is_different_gpu;
280 draw->multiplanes_available = multiplanes_available;
281
282 draw->have_back = 0;
283 draw->have_fake_front = 0;
284 draw->first_init = true;
285
286 draw->cur_blit_source = -1;
287 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
288 mtx_init(&draw->mtx, mtx_plain);
289 cnd_init(&draw->event_cnd);
290
291 if (draw->ext->config)
292 draw->ext->config->configQueryi(draw->dri_screen,
293 "vblank_mode", &vblank_mode);
294
295 switch (vblank_mode) {
296 case DRI_CONF_VBLANK_NEVER:
297 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
298 swap_interval = 0;
299 break;
300 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
301 case DRI_CONF_VBLANK_ALWAYS_SYNC:
302 default:
303 swap_interval = 1;
304 break;
305 }
306 draw->swap_interval = swap_interval;
307
308 dri3_update_num_back(draw);
309
310 /* Create a new drawable */
311 draw->dri_drawable =
312 draw->ext->image_driver->createNewDrawable(dri_screen,
313 dri_config,
314 draw);
315
316 if (!draw->dri_drawable)
317 return 1;
318
319 cookie = xcb_get_geometry(draw->conn, draw->drawable);
320 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
321 if (reply == NULL || error != NULL) {
322 draw->ext->core->destroyDrawable(draw->dri_drawable);
323 return 1;
324 }
325
326 draw->width = reply->width;
327 draw->height = reply->height;
328 draw->depth = reply->depth;
329 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
330 free(reply);
331
332 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
333 if (draw->ext->core->base.version >= 2) {
334 (void )draw->ext->core->getConfigAttrib(dri_config,
335 __DRI_ATTRIB_SWAP_METHOD,
336 &draw->swap_method);
337 }
338
339 /*
340 * Make sure server has the same swap interval we do for the new
341 * drawable.
342 */
343 loader_dri3_set_swap_interval(draw, swap_interval);
344
345 return 0;
346 }
347
348 /*
349 * Process one Present event
350 */
351 static void
352 dri3_handle_present_event(struct loader_dri3_drawable *draw,
353 xcb_present_generic_event_t *ge)
354 {
355 switch (ge->evtype) {
356 case XCB_PRESENT_CONFIGURE_NOTIFY: {
357 xcb_present_configure_notify_event_t *ce = (void *) ge;
358
359 draw->width = ce->width;
360 draw->height = ce->height;
361 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
362 draw->ext->flush->invalidate(draw->dri_drawable);
363 break;
364 }
365 case XCB_PRESENT_COMPLETE_NOTIFY: {
366 xcb_present_complete_notify_event_t *ce = (void *) ge;
367
368 /* Compute the processed SBC number from the received 32-bit serial number
369 * merged with the upper 32-bits of the sent 64-bit serial number while
370 * checking for wrap.
371 */
372 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
373 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
374 if (draw->recv_sbc > draw->send_sbc)
375 draw->recv_sbc -= 0x100000000;
376
377 /* When moving from flip to copy, we assume that we can allocate in
378 * a more optimal way if we don't need to cater for the display
379 * controller.
380 */
381 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_COPY &&
382 draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP) {
383 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
384 if (draw->buffers[b])
385 draw->buffers[b]->reallocate = true;
386 }
387 }
388
389 /* If the server tells us that our allocation is suboptimal, we
390 * reallocate once.
391 */
392 #ifdef HAVE_DRI3_MODIFIERS
393 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_SUBOPTIMAL_COPY &&
394 draw->last_present_mode != ce->mode) {
395 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
396 if (draw->buffers[b])
397 draw->buffers[b]->reallocate = true;
398 }
399 }
400 #endif
401 draw->last_present_mode = ce->mode;
402
403 if (draw->vtable->show_fps)
404 draw->vtable->show_fps(draw, ce->ust);
405
406 draw->ust = ce->ust;
407 draw->msc = ce->msc;
408 } else if (ce->serial == draw->eid) {
409 draw->notify_ust = ce->ust;
410 draw->notify_msc = ce->msc;
411 }
412 break;
413 }
414 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
415 xcb_present_idle_notify_event_t *ie = (void *) ge;
416 int b;
417
418 for (b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
419 struct loader_dri3_buffer *buf = draw->buffers[b];
420
421 if (buf && buf->pixmap == ie->pixmap)
422 buf->busy = 0;
423
424 if (buf && draw->cur_blit_source != b && !buf->busy &&
425 (buf->reallocate ||
426 (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK))) {
427 dri3_free_render_buffer(draw, buf);
428 draw->buffers[b] = NULL;
429 }
430 }
431 break;
432 }
433 }
434 free(ge);
435 }
436
437 static bool
438 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
439 {
440 xcb_generic_event_t *ev;
441 xcb_present_generic_event_t *ge;
442
443 xcb_flush(draw->conn);
444
445 /* Only have one thread waiting for events at a time */
446 if (draw->has_event_waiter) {
447 cnd_wait(&draw->event_cnd, &draw->mtx);
448 /* Another thread has updated the protected info, so retest. */
449 return true;
450 } else {
451 draw->has_event_waiter = true;
452 /* Allow other threads access to the drawable while we're waiting. */
453 mtx_unlock(&draw->mtx);
454 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
455 mtx_lock(&draw->mtx);
456 draw->has_event_waiter = false;
457 cnd_broadcast(&draw->event_cnd);
458 }
459 if (!ev)
460 return false;
461 ge = (void *) ev;
462 dri3_handle_present_event(draw, ge);
463 return true;
464 }
465
466 /** loader_dri3_wait_for_msc
467 *
468 * Get the X server to send an event when the target msc/divisor/remainder is
469 * reached.
470 */
471 bool
472 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
473 int64_t target_msc,
474 int64_t divisor, int64_t remainder,
475 int64_t *ust, int64_t *msc, int64_t *sbc)
476 {
477 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
478 draw->drawable,
479 draw->eid,
480 target_msc,
481 divisor,
482 remainder);
483 xcb_generic_event_t *ev;
484 unsigned full_sequence;
485
486 mtx_lock(&draw->mtx);
487 xcb_flush(draw->conn);
488
489 /* Wait for the event */
490 do {
491 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
492 if (!ev) {
493 mtx_unlock(&draw->mtx);
494 return false;
495 }
496
497 full_sequence = ev->full_sequence;
498 dri3_handle_present_event(draw, (void *) ev);
499 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
500
501 *ust = draw->notify_ust;
502 *msc = draw->notify_msc;
503 *sbc = draw->recv_sbc;
504 mtx_unlock(&draw->mtx);
505
506 return true;
507 }
508
509 /** loader_dri3_wait_for_sbc
510 *
511 * Wait for the completed swap buffer count to reach the specified
512 * target. Presumably the application knows that this will be reached with
513 * outstanding complete events, or we're going to be here awhile.
514 */
515 int
516 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
517 int64_t target_sbc, int64_t *ust,
518 int64_t *msc, int64_t *sbc)
519 {
520 /* From the GLX_OML_sync_control spec:
521 *
522 * "If <target_sbc> = 0, the function will block until all previous
523 * swaps requested with glXSwapBuffersMscOML for that window have
524 * completed."
525 */
526 mtx_lock(&draw->mtx);
527 if (!target_sbc)
528 target_sbc = draw->send_sbc;
529
530 while (draw->recv_sbc < target_sbc) {
531 if (!dri3_wait_for_event_locked(draw)) {
532 mtx_unlock(&draw->mtx);
533 return 0;
534 }
535 }
536
537 *ust = draw->ust;
538 *msc = draw->msc;
539 *sbc = draw->recv_sbc;
540 mtx_unlock(&draw->mtx);
541 return 1;
542 }
543
544 /** loader_dri3_find_back
545 *
546 * Find an idle back buffer. If there isn't one, then
547 * wait for a present idle notify event from the X server
548 */
549 static int
550 dri3_find_back(struct loader_dri3_drawable *draw)
551 {
552 int b;
553 int num_to_consider;
554
555 mtx_lock(&draw->mtx);
556 /* Increase the likelyhood of reusing current buffer */
557 dri3_flush_present_events(draw);
558
559 /* Check whether we need to reuse the current back buffer as new back.
560 * In that case, wait until it's not busy anymore.
561 */
562 dri3_update_num_back(draw);
563 num_to_consider = draw->num_back;
564 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
565 num_to_consider = 1;
566 draw->cur_blit_source = -1;
567 }
568
569 for (;;) {
570 for (b = 0; b < num_to_consider; b++) {
571 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
572 struct loader_dri3_buffer *buffer = draw->buffers[id];
573
574 if (!buffer || !buffer->busy) {
575 draw->cur_back = id;
576 mtx_unlock(&draw->mtx);
577 return id;
578 }
579 }
580 if (!dri3_wait_for_event_locked(draw)) {
581 mtx_unlock(&draw->mtx);
582 return -1;
583 }
584 }
585 }
586
587 static xcb_gcontext_t
588 dri3_drawable_gc(struct loader_dri3_drawable *draw)
589 {
590 if (!draw->gc) {
591 uint32_t v = 0;
592 xcb_create_gc(draw->conn,
593 (draw->gc = xcb_generate_id(draw->conn)),
594 draw->drawable,
595 XCB_GC_GRAPHICS_EXPOSURES,
596 &v);
597 }
598 return draw->gc;
599 }
600
601
602 static struct loader_dri3_buffer *
603 dri3_back_buffer(struct loader_dri3_drawable *draw)
604 {
605 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
606 }
607
608 static struct loader_dri3_buffer *
609 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
610 {
611 return draw->buffers[LOADER_DRI3_FRONT_ID];
612 }
613
614 static void
615 dri3_copy_area(xcb_connection_t *c,
616 xcb_drawable_t src_drawable,
617 xcb_drawable_t dst_drawable,
618 xcb_gcontext_t gc,
619 int16_t src_x,
620 int16_t src_y,
621 int16_t dst_x,
622 int16_t dst_y,
623 uint16_t width,
624 uint16_t height)
625 {
626 xcb_void_cookie_t cookie;
627
628 cookie = xcb_copy_area_checked(c,
629 src_drawable,
630 dst_drawable,
631 gc,
632 src_x,
633 src_y,
634 dst_x,
635 dst_y,
636 width,
637 height);
638 xcb_discard_reply(c, cookie.sequence);
639 }
640
641 /**
642 * Asks the driver to flush any queued work necessary for serializing with the
643 * X command stream, and optionally the slightly more strict requirement of
644 * glFlush() equivalence (which would require flushing even if nothing had
645 * been drawn to a window system framebuffer, for example).
646 */
647 void
648 loader_dri3_flush(struct loader_dri3_drawable *draw,
649 unsigned flags,
650 enum __DRI2throttleReason throttle_reason)
651 {
652 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
653 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
654
655 if (dri_context) {
656 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
657 flags, throttle_reason);
658 }
659 }
660
661 void
662 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
663 int x, int y,
664 int width, int height,
665 bool flush)
666 {
667 struct loader_dri3_buffer *back;
668 unsigned flags = __DRI2_FLUSH_DRAWABLE;
669
670 /* Check we have the right attachments */
671 if (!draw->have_back || draw->is_pixmap)
672 return;
673
674 if (flush)
675 flags |= __DRI2_FLUSH_CONTEXT;
676 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
677
678 back = dri3_find_back_alloc(draw);
679 if (!back)
680 return;
681
682 y = draw->height - y - height;
683
684 if (draw->is_different_gpu) {
685 /* Update the linear buffer part of the back buffer
686 * for the dri3_copy_area operation
687 */
688 (void) loader_dri3_blit_image(draw,
689 back->linear_buffer,
690 back->image,
691 0, 0, back->width, back->height,
692 0, 0, __BLIT_FLAG_FLUSH);
693 }
694
695 loader_dri3_swapbuffer_barrier(draw);
696 dri3_fence_reset(draw->conn, back);
697 dri3_copy_area(draw->conn,
698 back->pixmap,
699 draw->drawable,
700 dri3_drawable_gc(draw),
701 x, y, x, y, width, height);
702 dri3_fence_trigger(draw->conn, back);
703 /* Refresh the fake front (if present) after we just damaged the real
704 * front.
705 */
706 if (draw->have_fake_front &&
707 !loader_dri3_blit_image(draw,
708 dri3_fake_front_buffer(draw)->image,
709 back->image,
710 x, y, width, height,
711 x, y, __BLIT_FLAG_FLUSH) &&
712 !draw->is_different_gpu) {
713 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
714 dri3_copy_area(draw->conn,
715 back->pixmap,
716 dri3_fake_front_buffer(draw)->pixmap,
717 dri3_drawable_gc(draw),
718 x, y, x, y, width, height);
719 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
720 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
721 }
722 dri3_fence_await(draw->conn, draw, back);
723 }
724
725 void
726 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
727 xcb_drawable_t dest,
728 xcb_drawable_t src)
729 {
730 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
731
732 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
733 dri3_copy_area(draw->conn,
734 src, dest,
735 dri3_drawable_gc(draw),
736 0, 0, 0, 0, draw->width, draw->height);
737 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
738 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
739 }
740
741 void
742 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
743 {
744 struct loader_dri3_buffer *front;
745
746 if (draw == NULL || !draw->have_fake_front)
747 return;
748
749 front = dri3_fake_front_buffer(draw);
750
751 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
752
753 /* In the psc->is_different_gpu case, the linear buffer has been updated,
754 * but not yet the tiled buffer.
755 * Copy back to the tiled buffer we use for rendering.
756 * Note that we don't need flushing.
757 */
758 if (draw->is_different_gpu)
759 (void) loader_dri3_blit_image(draw,
760 front->image,
761 front->linear_buffer,
762 0, 0, front->width, front->height,
763 0, 0, 0);
764 }
765
766 void
767 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
768 {
769 struct loader_dri3_buffer *front;
770
771 if (draw == NULL || !draw->have_fake_front)
772 return;
773
774 front = dri3_fake_front_buffer(draw);
775
776 /* In the psc->is_different_gpu case, we update the linear_buffer
777 * before updating the real front.
778 */
779 if (draw->is_different_gpu)
780 (void) loader_dri3_blit_image(draw,
781 front->linear_buffer,
782 front->image,
783 0, 0, front->width, front->height,
784 0, 0, __BLIT_FLAG_FLUSH);
785 loader_dri3_swapbuffer_barrier(draw);
786 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
787 }
788
789 /** dri3_flush_present_events
790 *
791 * Process any present events that have been received from the X server
792 */
793 static void
794 dri3_flush_present_events(struct loader_dri3_drawable *draw)
795 {
796 /* Check to see if any configuration changes have occurred
797 * since we were last invoked
798 */
799 if (draw->has_event_waiter)
800 return;
801
802 if (draw->special_event) {
803 xcb_generic_event_t *ev;
804
805 while ((ev = xcb_poll_for_special_event(draw->conn,
806 draw->special_event)) != NULL) {
807 xcb_present_generic_event_t *ge = (void *) ev;
808 dri3_handle_present_event(draw, ge);
809 }
810 }
811 }
812
813 /** loader_dri3_swap_buffers_msc
814 *
815 * Make the current back buffer visible using the present extension
816 */
817 int64_t
818 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
819 int64_t target_msc, int64_t divisor,
820 int64_t remainder, unsigned flush_flags,
821 bool force_copy)
822 {
823 struct loader_dri3_buffer *back;
824 int64_t ret = 0;
825 uint32_t options = XCB_PRESENT_OPTION_NONE;
826
827 draw->vtable->flush_drawable(draw, flush_flags);
828
829 back = dri3_find_back_alloc(draw);
830
831 mtx_lock(&draw->mtx);
832 if (draw->is_different_gpu && back) {
833 /* Update the linear buffer before presenting the pixmap */
834 (void) loader_dri3_blit_image(draw,
835 back->linear_buffer,
836 back->image,
837 0, 0, back->width, back->height,
838 0, 0, __BLIT_FLAG_FLUSH);
839 }
840
841 /* If we need to preload the new back buffer, remember the source.
842 * The force_copy parameter is used by EGL to attempt to preserve
843 * the back buffer across a call to this function.
844 */
845 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
846 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
847
848 /* Exchange the back and fake front. Even though the server knows about these
849 * buffers, it has no notion of back and fake front.
850 */
851 if (back && draw->have_fake_front) {
852 struct loader_dri3_buffer *tmp;
853
854 tmp = dri3_fake_front_buffer(draw);
855 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
856 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
857
858 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
859 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
860 }
861
862 dri3_flush_present_events(draw);
863
864 if (back && !draw->is_pixmap) {
865 dri3_fence_reset(draw->conn, back);
866
867 /* Compute when we want the frame shown by taking the last known
868 * successful MSC and adding in a swap interval for each outstanding swap
869 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
870 * semantic"
871 */
872 ++draw->send_sbc;
873 if (target_msc == 0 && divisor == 0 && remainder == 0)
874 target_msc = draw->msc + draw->swap_interval *
875 (draw->send_sbc - draw->recv_sbc);
876 else if (divisor == 0 && remainder > 0) {
877 /* From the GLX_OML_sync_control spec:
878 * "If <divisor> = 0, the swap will occur when MSC becomes
879 * greater than or equal to <target_msc>."
880 *
881 * Note that there's no mention of the remainder. The Present
882 * extension throws BadValue for remainder != 0 with divisor == 0, so
883 * just drop the passed in value.
884 */
885 remainder = 0;
886 }
887
888 /* From the GLX_EXT_swap_control spec
889 * and the EGL 1.4 spec (page 53):
890 *
891 * "If <interval> is set to a value of 0, buffer swaps are not
892 * synchronized to a video frame."
893 *
894 * Implementation note: It is possible to enable triple buffering
895 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
896 * the default.
897 */
898 if (draw->swap_interval == 0)
899 options |= XCB_PRESENT_OPTION_ASYNC;
900
901 /* If we need to populate the new back, but need to reuse the back
902 * buffer slot due to lack of local blit capabilities, make sure
903 * the server doesn't flip and we deadlock.
904 */
905 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
906 options |= XCB_PRESENT_OPTION_COPY;
907 #ifdef HAVE_DRI3_MODIFIERS
908 if (draw->multiplanes_available)
909 options |= XCB_PRESENT_OPTION_SUBOPTIMAL;
910 #endif
911 back->busy = 1;
912 back->last_swap = draw->send_sbc;
913 xcb_present_pixmap(draw->conn,
914 draw->drawable,
915 back->pixmap,
916 (uint32_t) draw->send_sbc,
917 0, /* valid */
918 0, /* update */
919 0, /* x_off */
920 0, /* y_off */
921 None, /* target_crtc */
922 None,
923 back->sync_fence,
924 options,
925 target_msc,
926 divisor,
927 remainder, 0, NULL);
928 ret = (int64_t) draw->send_sbc;
929
930 /* Schedule a server-side back-preserving blit if necessary.
931 * This happens iff all conditions below are satisfied:
932 * a) We have a fake front,
933 * b) We need to preserve the back buffer,
934 * c) We don't have local blit capabilities.
935 */
936 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
937 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
938 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
939 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
940
941 dri3_fence_reset(draw->conn, new_back);
942 dri3_copy_area(draw->conn, src->pixmap,
943 new_back->pixmap,
944 dri3_drawable_gc(draw),
945 0, 0, 0, 0, draw->width, draw->height);
946 dri3_fence_trigger(draw->conn, new_back);
947 new_back->last_swap = src->last_swap;
948 }
949
950 xcb_flush(draw->conn);
951 if (draw->stamp)
952 ++(*draw->stamp);
953 }
954 mtx_unlock(&draw->mtx);
955
956 draw->ext->flush->invalidate(draw->dri_drawable);
957
958 return ret;
959 }
960
961 int
962 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
963 {
964 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
965 int ret;
966
967 mtx_lock(&draw->mtx);
968 ret = (!back || back->last_swap == 0) ? 0 :
969 draw->send_sbc - back->last_swap + 1;
970 mtx_unlock(&draw->mtx);
971
972 return ret;
973 }
974
975 /** loader_dri3_open
976 *
977 * Wrapper around xcb_dri3_open
978 */
979 int
980 loader_dri3_open(xcb_connection_t *conn,
981 xcb_window_t root,
982 uint32_t provider)
983 {
984 xcb_dri3_open_cookie_t cookie;
985 xcb_dri3_open_reply_t *reply;
986 int fd;
987
988 cookie = xcb_dri3_open(conn,
989 root,
990 provider);
991
992 reply = xcb_dri3_open_reply(conn, cookie, NULL);
993 if (!reply)
994 return -1;
995
996 if (reply->nfd != 1) {
997 free(reply);
998 return -1;
999 }
1000
1001 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
1002 free(reply);
1003 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
1004
1005 return fd;
1006 }
1007
1008 static uint32_t
1009 dri3_cpp_for_format(uint32_t format) {
1010 switch (format) {
1011 case __DRI_IMAGE_FORMAT_R8:
1012 return 1;
1013 case __DRI_IMAGE_FORMAT_RGB565:
1014 case __DRI_IMAGE_FORMAT_GR88:
1015 return 2;
1016 case __DRI_IMAGE_FORMAT_XRGB8888:
1017 case __DRI_IMAGE_FORMAT_ARGB8888:
1018 case __DRI_IMAGE_FORMAT_ABGR8888:
1019 case __DRI_IMAGE_FORMAT_XBGR8888:
1020 case __DRI_IMAGE_FORMAT_XRGB2101010:
1021 case __DRI_IMAGE_FORMAT_ARGB2101010:
1022 case __DRI_IMAGE_FORMAT_XBGR2101010:
1023 case __DRI_IMAGE_FORMAT_ABGR2101010:
1024 case __DRI_IMAGE_FORMAT_SARGB8:
1025 case __DRI_IMAGE_FORMAT_SABGR8:
1026 return 4;
1027 case __DRI_IMAGE_FORMAT_NONE:
1028 default:
1029 return 0;
1030 }
1031 }
1032
1033 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1034 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1035 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1036 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1037 */
1038 static int
1039 image_format_to_fourcc(int format)
1040 {
1041
1042 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1043 switch (format) {
1044 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1045 case __DRI_IMAGE_FORMAT_SABGR8: return __DRI_IMAGE_FOURCC_SABGR8888;
1046 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1047 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1048 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1049 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1050 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1051 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1052 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1053 case __DRI_IMAGE_FORMAT_XBGR2101010: return __DRI_IMAGE_FOURCC_XBGR2101010;
1054 case __DRI_IMAGE_FORMAT_ABGR2101010: return __DRI_IMAGE_FOURCC_ABGR2101010;
1055 }
1056 return 0;
1057 }
1058
1059 #ifdef HAVE_DRI3_MODIFIERS
1060 static bool
1061 has_supported_modifier(struct loader_dri3_drawable *draw, unsigned int format,
1062 uint64_t *modifiers, uint32_t count)
1063 {
1064 uint64_t *supported_modifiers;
1065 int32_t supported_modifiers_count;
1066 bool found = false;
1067 int i, j;
1068
1069 if (!draw->ext->image->queryDmaBufModifiers(draw->dri_screen,
1070 format, 0, NULL, NULL,
1071 &supported_modifiers_count) ||
1072 supported_modifiers_count == 0)
1073 return false;
1074
1075 supported_modifiers = malloc(supported_modifiers_count * sizeof(uint64_t));
1076 if (!supported_modifiers)
1077 return false;
1078
1079 draw->ext->image->queryDmaBufModifiers(draw->dri_screen, format,
1080 supported_modifiers_count,
1081 supported_modifiers, NULL,
1082 &supported_modifiers_count);
1083
1084 for (i = 0; !found && i < supported_modifiers_count; i++) {
1085 for (j = 0; !found && j < count; j++) {
1086 if (supported_modifiers[i] == modifiers[j])
1087 found = true;
1088 }
1089 }
1090
1091 free(supported_modifiers);
1092 return found;
1093 }
1094 #endif
1095
1096 /** loader_dri3_alloc_render_buffer
1097 *
1098 * Use the driver createImage function to construct a __DRIimage, then
1099 * get a file descriptor for that and create an X pixmap from that
1100 *
1101 * Allocate an xshmfence for synchronization
1102 */
1103 static struct loader_dri3_buffer *
1104 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1105 int width, int height, int depth)
1106 {
1107 struct loader_dri3_buffer *buffer;
1108 __DRIimage *pixmap_buffer;
1109 xcb_pixmap_t pixmap;
1110 xcb_sync_fence_t sync_fence;
1111 struct xshmfence *shm_fence;
1112 int buffer_fds[4], fence_fd;
1113 int num_planes = 0;
1114 int i, mod;
1115 int ret;
1116
1117 /* Create an xshmfence object and
1118 * prepare to send that to the X server
1119 */
1120
1121 fence_fd = xshmfence_alloc_shm();
1122 if (fence_fd < 0)
1123 return NULL;
1124
1125 shm_fence = xshmfence_map_shm(fence_fd);
1126 if (shm_fence == NULL)
1127 goto no_shm_fence;
1128
1129 /* Allocate the image from the driver
1130 */
1131 buffer = calloc(1, sizeof *buffer);
1132 if (!buffer)
1133 goto no_buffer;
1134
1135 buffer->cpp = dri3_cpp_for_format(format);
1136 if (!buffer->cpp)
1137 goto no_image;
1138
1139 if (!draw->is_different_gpu) {
1140 #ifdef HAVE_DRI3_MODIFIERS
1141 if (draw->multiplanes_available &&
1142 draw->ext->image->base.version >= 15 &&
1143 draw->ext->image->queryDmaBufModifiers &&
1144 draw->ext->image->createImageWithModifiers) {
1145 xcb_dri3_get_supported_modifiers_cookie_t mod_cookie;
1146 xcb_dri3_get_supported_modifiers_reply_t *mod_reply;
1147 xcb_generic_error_t *error = NULL;
1148 uint64_t *modifiers = NULL;
1149 uint32_t count = 0;
1150
1151 mod_cookie = xcb_dri3_get_supported_modifiers(draw->conn,
1152 draw->drawable,
1153 depth, buffer->cpp * 8);
1154 mod_reply = xcb_dri3_get_supported_modifiers_reply(draw->conn,
1155 mod_cookie,
1156 &error);
1157 if (!mod_reply)
1158 goto no_image;
1159
1160 if (mod_reply->num_window_modifiers) {
1161 count = mod_reply->num_window_modifiers;
1162 modifiers = malloc(count * sizeof(uint64_t));
1163 if (!modifiers) {
1164 free(mod_reply);
1165 goto no_image;
1166 }
1167
1168 memcpy(modifiers,
1169 xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply),
1170 count * sizeof(uint64_t));
1171
1172 if (!has_supported_modifier(draw, image_format_to_fourcc(format),
1173 modifiers, count)) {
1174 free(modifiers);
1175 count = 0;
1176 modifiers = NULL;
1177 }
1178 }
1179
1180 if (mod_reply->num_screen_modifiers && modifiers == NULL) {
1181 count = mod_reply->num_screen_modifiers;
1182 modifiers = malloc(count * sizeof(uint64_t));
1183 if (!modifiers) {
1184 free(modifiers);
1185 free(mod_reply);
1186 goto no_image;
1187 }
1188
1189 memcpy(modifiers,
1190 xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply),
1191 count * sizeof(uint64_t));
1192 }
1193
1194 free(mod_reply);
1195
1196 buffer->image = draw->ext->image->createImageWithModifiers(draw->dri_screen,
1197 width, height,
1198 format,
1199 modifiers,
1200 count,
1201 buffer);
1202 free(modifiers);
1203 }
1204 #endif
1205 if (!buffer->image)
1206 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1207 width, height,
1208 format,
1209 __DRI_IMAGE_USE_SHARE |
1210 __DRI_IMAGE_USE_SCANOUT |
1211 __DRI_IMAGE_USE_BACKBUFFER,
1212 buffer);
1213
1214 pixmap_buffer = buffer->image;
1215
1216 if (!buffer->image)
1217 goto no_image;
1218 } else {
1219 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1220 width, height,
1221 format,
1222 0,
1223 buffer);
1224
1225 if (!buffer->image)
1226 goto no_image;
1227
1228 buffer->linear_buffer =
1229 draw->ext->image->createImage(draw->dri_screen,
1230 width, height, format,
1231 __DRI_IMAGE_USE_SHARE |
1232 __DRI_IMAGE_USE_LINEAR |
1233 __DRI_IMAGE_USE_BACKBUFFER,
1234 buffer);
1235 pixmap_buffer = buffer->linear_buffer;
1236
1237 if (!buffer->linear_buffer)
1238 goto no_linear_buffer;
1239 }
1240
1241 /* X want some information about the planes, so ask the image for it
1242 */
1243 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_NUM_PLANES,
1244 &num_planes))
1245 num_planes = 1;
1246
1247 for (i = 0; i < num_planes; i++) {
1248 __DRIimage *image = draw->ext->image->fromPlanar(pixmap_buffer, i, NULL);
1249
1250 if (!image) {
1251 assert(i == 0);
1252 image = pixmap_buffer;
1253 }
1254
1255 ret = draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_FD,
1256 &buffer_fds[i]);
1257 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_STRIDE,
1258 &buffer->strides[i]);
1259 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_OFFSET,
1260 &buffer->offsets[i]);
1261 if (image != pixmap_buffer)
1262 draw->ext->image->destroyImage(image);
1263
1264 if (!ret)
1265 goto no_buffer_attrib;
1266 }
1267
1268 ret = draw->ext->image->queryImage(pixmap_buffer,
1269 __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod);
1270 buffer->modifier = (uint64_t) mod << 32;
1271 ret &= draw->ext->image->queryImage(pixmap_buffer,
1272 __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod);
1273 buffer->modifier |= (uint64_t)(mod & 0xffffffff);
1274
1275 if (!ret)
1276 buffer->modifier = DRM_FORMAT_MOD_INVALID;
1277
1278 pixmap = xcb_generate_id(draw->conn);
1279 #ifdef HAVE_DRI3_MODIFIERS
1280 if (draw->multiplanes_available &&
1281 buffer->modifier != DRM_FORMAT_MOD_INVALID) {
1282 xcb_dri3_pixmap_from_buffers(draw->conn,
1283 pixmap,
1284 draw->drawable,
1285 num_planes,
1286 width, height,
1287 buffer->strides[0], buffer->offsets[0],
1288 buffer->strides[1], buffer->offsets[1],
1289 buffer->strides[2], buffer->offsets[2],
1290 buffer->strides[3], buffer->offsets[3],
1291 depth, buffer->cpp * 8,
1292 buffer->modifier,
1293 buffer_fds);
1294 } else
1295 #endif
1296 {
1297 xcb_dri3_pixmap_from_buffer(draw->conn,
1298 pixmap,
1299 draw->drawable,
1300 buffer->size,
1301 width, height, buffer->strides[0],
1302 depth, buffer->cpp * 8,
1303 buffer_fds[0]);
1304 }
1305
1306 xcb_dri3_fence_from_fd(draw->conn,
1307 pixmap,
1308 (sync_fence = xcb_generate_id(draw->conn)),
1309 false,
1310 fence_fd);
1311
1312 buffer->pixmap = pixmap;
1313 buffer->own_pixmap = true;
1314 buffer->sync_fence = sync_fence;
1315 buffer->shm_fence = shm_fence;
1316 buffer->width = width;
1317 buffer->height = height;
1318
1319 /* Mark the buffer as idle
1320 */
1321 dri3_fence_set(buffer);
1322
1323 return buffer;
1324
1325 no_buffer_attrib:
1326 do {
1327 close(buffer_fds[i]);
1328 } while (--i >= 0);
1329 draw->ext->image->destroyImage(pixmap_buffer);
1330 no_linear_buffer:
1331 if (draw->is_different_gpu)
1332 draw->ext->image->destroyImage(buffer->image);
1333 no_image:
1334 free(buffer);
1335 no_buffer:
1336 xshmfence_unmap_shm(shm_fence);
1337 no_shm_fence:
1338 close(fence_fd);
1339 return NULL;
1340 }
1341
1342 /** loader_dri3_update_drawable
1343 *
1344 * Called the first time we use the drawable and then
1345 * after we receive present configure notify events to
1346 * track the geometry of the drawable
1347 */
1348 static int
1349 dri3_update_drawable(__DRIdrawable *driDrawable,
1350 struct loader_dri3_drawable *draw)
1351 {
1352 mtx_lock(&draw->mtx);
1353 if (draw->first_init) {
1354 xcb_get_geometry_cookie_t geom_cookie;
1355 xcb_get_geometry_reply_t *geom_reply;
1356 xcb_void_cookie_t cookie;
1357 xcb_generic_error_t *error;
1358 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1359 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1360
1361 draw->first_init = false;
1362
1363 /* Try to select for input on the window.
1364 *
1365 * If the drawable is a window, this will get our events
1366 * delivered.
1367 *
1368 * Otherwise, we'll get a BadWindow error back from this request which
1369 * will let us know that the drawable is a pixmap instead.
1370 */
1371
1372 draw->eid = xcb_generate_id(draw->conn);
1373 cookie =
1374 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1375 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1376 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1377 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1378
1379 present_capabilities_cookie =
1380 xcb_present_query_capabilities(draw->conn, draw->drawable);
1381
1382 /* Create an XCB event queue to hold present events outside of the usual
1383 * application event queue
1384 */
1385 draw->special_event = xcb_register_for_special_xge(draw->conn,
1386 &xcb_present_id,
1387 draw->eid,
1388 draw->stamp);
1389 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1390
1391 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1392
1393 if (!geom_reply) {
1394 mtx_unlock(&draw->mtx);
1395 return false;
1396 }
1397
1398 draw->width = geom_reply->width;
1399 draw->height = geom_reply->height;
1400 draw->depth = geom_reply->depth;
1401 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1402
1403 free(geom_reply);
1404
1405 draw->is_pixmap = false;
1406
1407 /* Check to see if our select input call failed. If it failed with a
1408 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1409 * special event queue created above and mark the drawable as a pixmap
1410 */
1411
1412 error = xcb_request_check(draw->conn, cookie);
1413
1414 present_capabilities_reply =
1415 xcb_present_query_capabilities_reply(draw->conn,
1416 present_capabilities_cookie,
1417 NULL);
1418
1419 if (present_capabilities_reply) {
1420 draw->present_capabilities = present_capabilities_reply->capabilities;
1421 free(present_capabilities_reply);
1422 } else
1423 draw->present_capabilities = 0;
1424
1425 if (error) {
1426 if (error->error_code != BadWindow) {
1427 free(error);
1428 mtx_unlock(&draw->mtx);
1429 return false;
1430 }
1431 draw->is_pixmap = true;
1432 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1433 draw->special_event = NULL;
1434 }
1435 }
1436 dri3_flush_present_events(draw);
1437 mtx_unlock(&draw->mtx);
1438 return true;
1439 }
1440
1441 __DRIimage *
1442 loader_dri3_create_image(xcb_connection_t *c,
1443 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1444 unsigned int format,
1445 __DRIscreen *dri_screen,
1446 const __DRIimageExtension *image,
1447 void *loaderPrivate)
1448 {
1449 int *fds;
1450 __DRIimage *image_planar, *ret;
1451 int stride, offset;
1452
1453 /* Get an FD for the pixmap object
1454 */
1455 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1456
1457 stride = bp_reply->stride;
1458 offset = 0;
1459
1460 /* createImageFromFds creates a wrapper __DRIimage structure which
1461 * can deal with multiple planes for things like Yuv images. So, once
1462 * we've gotten the planar wrapper, pull the single plane out of it and
1463 * discard the wrapper.
1464 */
1465 image_planar = image->createImageFromFds(dri_screen,
1466 bp_reply->width,
1467 bp_reply->height,
1468 image_format_to_fourcc(format),
1469 fds, 1,
1470 &stride, &offset, loaderPrivate);
1471 close(fds[0]);
1472 if (!image_planar)
1473 return NULL;
1474
1475 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1476
1477 if (!ret)
1478 ret = image_planar;
1479 else
1480 image->destroyImage(image_planar);
1481
1482 return ret;
1483 }
1484
1485 #ifdef HAVE_DRI3_MODIFIERS
1486 __DRIimage *
1487 loader_dri3_create_image_from_buffers(xcb_connection_t *c,
1488 xcb_dri3_buffers_from_pixmap_reply_t *bp_reply,
1489 unsigned int format,
1490 __DRIscreen *dri_screen,
1491 const __DRIimageExtension *image,
1492 void *loaderPrivate)
1493 {
1494 __DRIimage *ret;
1495 int *fds;
1496 uint32_t *strides_in, *offsets_in;
1497 int strides[4], offsets[4];
1498 unsigned error;
1499 int i;
1500
1501 if (bp_reply->nfd > 4)
1502 return NULL;
1503
1504 fds = xcb_dri3_buffers_from_pixmap_reply_fds(c, bp_reply);
1505 strides_in = xcb_dri3_buffers_from_pixmap_strides(bp_reply);
1506 offsets_in = xcb_dri3_buffers_from_pixmap_offsets(bp_reply);
1507 for (i = 0; i < bp_reply->nfd; i++) {
1508 strides[i] = strides_in[i];
1509 offsets[i] = offsets_in[i];
1510 }
1511
1512 ret = image->createImageFromDmaBufs2(dri_screen,
1513 bp_reply->width,
1514 bp_reply->height,
1515 image_format_to_fourcc(format),
1516 bp_reply->modifier,
1517 fds, bp_reply->nfd,
1518 strides, offsets,
1519 0, 0, 0, 0, /* UNDEFINED */
1520 &error, loaderPrivate);
1521
1522 for (i = 0; i < bp_reply->nfd; i++)
1523 close(fds[i]);
1524
1525 return ret;
1526 }
1527 #endif
1528
1529 /** dri3_get_pixmap_buffer
1530 *
1531 * Get the DRM object for a pixmap from the X server and
1532 * wrap that with a __DRIimage structure using createImageFromFds
1533 */
1534 static struct loader_dri3_buffer *
1535 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1536 enum loader_dri3_buffer_type buffer_type,
1537 struct loader_dri3_drawable *draw)
1538 {
1539 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1540 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1541 xcb_drawable_t pixmap;
1542 xcb_sync_fence_t sync_fence;
1543 struct xshmfence *shm_fence;
1544 int width;
1545 int height;
1546 int fence_fd;
1547 __DRIscreen *cur_screen;
1548
1549 if (buffer)
1550 return buffer;
1551
1552 pixmap = draw->drawable;
1553
1554 buffer = calloc(1, sizeof *buffer);
1555 if (!buffer)
1556 goto no_buffer;
1557
1558 fence_fd = xshmfence_alloc_shm();
1559 if (fence_fd < 0)
1560 goto no_fence;
1561 shm_fence = xshmfence_map_shm(fence_fd);
1562 if (shm_fence == NULL) {
1563 close (fence_fd);
1564 goto no_fence;
1565 }
1566
1567 /* Get the currently-bound screen or revert to using the drawable's screen if
1568 * no contexts are currently bound. The latter case is at least necessary for
1569 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1570 */
1571 cur_screen = draw->vtable->get_dri_screen();
1572 if (!cur_screen) {
1573 cur_screen = draw->dri_screen;
1574 }
1575
1576 xcb_dri3_fence_from_fd(draw->conn,
1577 pixmap,
1578 (sync_fence = xcb_generate_id(draw->conn)),
1579 false,
1580 fence_fd);
1581 #ifdef HAVE_DRI3_MODIFIERS
1582 if (draw->multiplanes_available &&
1583 draw->ext->image->base.version >= 15 &&
1584 draw->ext->image->createImageFromDmaBufs2) {
1585 xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie;
1586 xcb_dri3_buffers_from_pixmap_reply_t *bps_reply;
1587
1588 bps_cookie = xcb_dri3_buffers_from_pixmap(draw->conn, pixmap);
1589 bps_reply = xcb_dri3_buffers_from_pixmap_reply(draw->conn, bps_cookie,
1590 NULL);
1591 if (!bps_reply)
1592 goto no_image;
1593 buffer->image =
1594 loader_dri3_create_image_from_buffers(draw->conn, bps_reply, format,
1595 cur_screen, draw->ext->image,
1596 buffer);
1597 width = bps_reply->width;
1598 height = bps_reply->height;
1599 free(bps_reply);
1600 } else
1601 #endif
1602 {
1603 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1604 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1605
1606 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1607 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1608 if (!bp_reply)
1609 goto no_image;
1610
1611 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1612 cur_screen, draw->ext->image,
1613 buffer);
1614 width = bp_reply->width;
1615 height = bp_reply->height;
1616 free(bp_reply);
1617 }
1618
1619 if (!buffer->image)
1620 goto no_image;
1621
1622 buffer->pixmap = pixmap;
1623 buffer->own_pixmap = false;
1624 buffer->width = width;
1625 buffer->height = height;
1626 buffer->shm_fence = shm_fence;
1627 buffer->sync_fence = sync_fence;
1628
1629 draw->buffers[buf_id] = buffer;
1630
1631 return buffer;
1632
1633 no_image:
1634 xcb_sync_destroy_fence(draw->conn, sync_fence);
1635 xshmfence_unmap_shm(shm_fence);
1636 no_fence:
1637 free(buffer);
1638 no_buffer:
1639 return NULL;
1640 }
1641
1642 /** dri3_get_buffer
1643 *
1644 * Find a front or back buffer, allocating new ones as necessary
1645 */
1646 static struct loader_dri3_buffer *
1647 dri3_get_buffer(__DRIdrawable *driDrawable,
1648 unsigned int format,
1649 enum loader_dri3_buffer_type buffer_type,
1650 struct loader_dri3_drawable *draw)
1651 {
1652 struct loader_dri3_buffer *buffer;
1653 int buf_id;
1654
1655 if (buffer_type == loader_dri3_buffer_back) {
1656 draw->back_format = format;
1657
1658 buf_id = dri3_find_back(draw);
1659
1660 if (buf_id < 0)
1661 return NULL;
1662 } else {
1663 buf_id = LOADER_DRI3_FRONT_ID;
1664 }
1665
1666 buffer = draw->buffers[buf_id];
1667
1668 /* Allocate a new buffer if there isn't an old one, if that
1669 * old one is the wrong size, or if it's suboptimal
1670 */
1671 if (!buffer || buffer->width != draw->width ||
1672 buffer->height != draw->height ||
1673 buffer->reallocate) {
1674 struct loader_dri3_buffer *new_buffer;
1675
1676 /* Allocate the new buffers
1677 */
1678 new_buffer = dri3_alloc_render_buffer(draw,
1679 format,
1680 draw->width,
1681 draw->height,
1682 draw->depth);
1683 if (!new_buffer)
1684 return NULL;
1685
1686 /* When resizing, copy the contents of the old buffer, waiting for that
1687 * copy to complete using our fences before proceeding
1688 */
1689 if ((buffer_type == loader_dri3_buffer_back ||
1690 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1691 && buffer) {
1692
1693 /* Fill the new buffer with data from an old buffer */
1694 dri3_fence_await(draw->conn, draw, buffer);
1695 if (!loader_dri3_blit_image(draw,
1696 new_buffer->image,
1697 buffer->image,
1698 0, 0, draw->width, draw->height,
1699 0, 0, 0) &&
1700 !buffer->linear_buffer) {
1701 dri3_fence_reset(draw->conn, new_buffer);
1702 dri3_copy_area(draw->conn,
1703 buffer->pixmap,
1704 new_buffer->pixmap,
1705 dri3_drawable_gc(draw),
1706 0, 0, 0, 0,
1707 draw->width, draw->height);
1708 dri3_fence_trigger(draw->conn, new_buffer);
1709 }
1710 dri3_free_render_buffer(draw, buffer);
1711 } else if (buffer_type == loader_dri3_buffer_front) {
1712 /* Fill the new fake front with data from a real front */
1713 loader_dri3_swapbuffer_barrier(draw);
1714 dri3_fence_reset(draw->conn, new_buffer);
1715 dri3_copy_area(draw->conn,
1716 draw->drawable,
1717 new_buffer->pixmap,
1718 dri3_drawable_gc(draw),
1719 0, 0, 0, 0,
1720 draw->width, draw->height);
1721 dri3_fence_trigger(draw->conn, new_buffer);
1722
1723 if (new_buffer->linear_buffer) {
1724 dri3_fence_await(draw->conn, draw, new_buffer);
1725 (void) loader_dri3_blit_image(draw,
1726 new_buffer->image,
1727 new_buffer->linear_buffer,
1728 0, 0, draw->width, draw->height,
1729 0, 0, 0);
1730 }
1731 }
1732 buffer = new_buffer;
1733 draw->buffers[buf_id] = buffer;
1734 }
1735 dri3_fence_await(draw->conn, draw, buffer);
1736
1737 /*
1738 * Do we need to preserve the content of a previous buffer?
1739 *
1740 * Note that this blit is needed only to avoid a wait for a buffer that
1741 * is currently in the flip chain or being scanned out from. That's really
1742 * a tradeoff. If we're ok with the wait we can reduce the number of back
1743 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1744 * but in the latter case we must disallow page-flipping.
1745 */
1746 if (buffer_type == loader_dri3_buffer_back &&
1747 draw->cur_blit_source != -1 &&
1748 draw->buffers[draw->cur_blit_source] &&
1749 buffer != draw->buffers[draw->cur_blit_source]) {
1750
1751 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1752
1753 /* Avoid flushing here. Will propably do good for tiling hardware. */
1754 (void) loader_dri3_blit_image(draw,
1755 buffer->image,
1756 source->image,
1757 0, 0, draw->width, draw->height,
1758 0, 0, 0);
1759 buffer->last_swap = source->last_swap;
1760 draw->cur_blit_source = -1;
1761 }
1762 /* Return the requested buffer */
1763 return buffer;
1764 }
1765
1766 /** dri3_free_buffers
1767 *
1768 * Free the front bufffer or all of the back buffers. Used
1769 * when the application changes which buffers it needs
1770 */
1771 static void
1772 dri3_free_buffers(__DRIdrawable *driDrawable,
1773 enum loader_dri3_buffer_type buffer_type,
1774 struct loader_dri3_drawable *draw)
1775 {
1776 struct loader_dri3_buffer *buffer;
1777 int first_id;
1778 int n_id;
1779 int buf_id;
1780
1781 switch (buffer_type) {
1782 case loader_dri3_buffer_back:
1783 first_id = LOADER_DRI3_BACK_ID(0);
1784 n_id = LOADER_DRI3_MAX_BACK;
1785 draw->cur_blit_source = -1;
1786 break;
1787 case loader_dri3_buffer_front:
1788 first_id = LOADER_DRI3_FRONT_ID;
1789 /* Don't free a fake front holding new backbuffer content. */
1790 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1791 }
1792
1793 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1794 buffer = draw->buffers[buf_id];
1795 if (buffer) {
1796 dri3_free_render_buffer(draw, buffer);
1797 draw->buffers[buf_id] = NULL;
1798 }
1799 }
1800 }
1801
1802 /** loader_dri3_get_buffers
1803 *
1804 * The published buffer allocation API.
1805 * Returns all of the necessary buffers, allocating
1806 * as needed.
1807 */
1808 int
1809 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1810 unsigned int format,
1811 uint32_t *stamp,
1812 void *loaderPrivate,
1813 uint32_t buffer_mask,
1814 struct __DRIimageList *buffers)
1815 {
1816 struct loader_dri3_drawable *draw = loaderPrivate;
1817 struct loader_dri3_buffer *front, *back;
1818
1819 buffers->image_mask = 0;
1820 buffers->front = NULL;
1821 buffers->back = NULL;
1822
1823 front = NULL;
1824 back = NULL;
1825
1826 if (!dri3_update_drawable(driDrawable, draw))
1827 return false;
1828
1829 /* pixmaps always have front buffers.
1830 * Exchange swaps also mandate fake front buffers.
1831 */
1832 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1833 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1834
1835 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1836 /* All pixmaps are owned by the server gpu.
1837 * When we use a different gpu, we can't use the pixmap
1838 * as buffer since it is potentially tiled a way
1839 * our device can't understand. In this case, use
1840 * a fake front buffer. Hopefully the pixmap
1841 * content will get synced with the fake front
1842 * buffer.
1843 */
1844 if (draw->is_pixmap && !draw->is_different_gpu)
1845 front = dri3_get_pixmap_buffer(driDrawable,
1846 format,
1847 loader_dri3_buffer_front,
1848 draw);
1849 else
1850 front = dri3_get_buffer(driDrawable,
1851 format,
1852 loader_dri3_buffer_front,
1853 draw);
1854
1855 if (!front)
1856 return false;
1857 } else {
1858 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1859 draw->have_fake_front = 0;
1860 }
1861
1862 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1863 back = dri3_get_buffer(driDrawable,
1864 format,
1865 loader_dri3_buffer_back,
1866 draw);
1867 if (!back)
1868 return false;
1869 draw->have_back = 1;
1870 } else {
1871 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1872 draw->have_back = 0;
1873 }
1874
1875 if (front) {
1876 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1877 buffers->front = front->image;
1878 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1879 }
1880
1881 if (back) {
1882 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1883 buffers->back = back->image;
1884 }
1885
1886 draw->stamp = stamp;
1887
1888 return true;
1889 }
1890
1891 /** loader_dri3_update_drawable_geometry
1892 *
1893 * Get the current drawable geometry.
1894 */
1895 void
1896 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1897 {
1898 xcb_get_geometry_cookie_t geom_cookie;
1899 xcb_get_geometry_reply_t *geom_reply;
1900
1901 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1902
1903 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1904
1905 if (geom_reply) {
1906 draw->width = geom_reply->width;
1907 draw->height = geom_reply->height;
1908 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1909 draw->ext->flush->invalidate(draw->dri_drawable);
1910
1911 free(geom_reply);
1912 }
1913 }
1914
1915
1916 /**
1917 * Make sure the server has flushed all pending swap buffers to hardware
1918 * for this drawable. Ideally we'd want to send an X protocol request to
1919 * have the server block our connection until the swaps are complete. That
1920 * would avoid the potential round-trip here.
1921 */
1922 void
1923 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1924 {
1925 int64_t ust, msc, sbc;
1926
1927 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1928 }
1929
1930 /**
1931 * Perform any cleanup associated with a close screen operation.
1932 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1933 *
1934 * This function destroys the screen's cached swap context if any.
1935 */
1936 void
1937 loader_dri3_close_screen(__DRIscreen *dri_screen)
1938 {
1939 mtx_lock(&blit_context.mtx);
1940 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1941 blit_context.core->destroyContext(blit_context.ctx);
1942 blit_context.ctx = NULL;
1943 }
1944 mtx_unlock(&blit_context.mtx);
1945 }
1946
1947 /**
1948 * Find a backbuffer slot - potentially allocating a back buffer
1949 *
1950 * \param draw[in,out] Pointer to the drawable for which to find back.
1951 * \return Pointer to a new back buffer or NULL if allocation failed or was
1952 * not mandated.
1953 *
1954 * Find a potentially new back buffer, and if it's not been allocated yet and
1955 * in addition needs initializing, then try to allocate and initialize it.
1956 */
1957 #include <stdio.h>
1958 static struct loader_dri3_buffer *
1959 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1960 {
1961 struct loader_dri3_buffer *back;
1962 int id;
1963
1964 id = dri3_find_back(draw);
1965 if (id < 0)
1966 return NULL;
1967
1968 back = draw->buffers[id];
1969 /* Allocate a new back if we haven't got one */
1970 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
1971 dri3_update_drawable(draw->dri_drawable, draw))
1972 back = dri3_alloc_render_buffer(draw, draw->back_format,
1973 draw->width, draw->height, draw->depth);
1974
1975 if (!back)
1976 return NULL;
1977
1978 draw->buffers[id] = back;
1979
1980 /* If necessary, prefill the back with data according to swap_method mode. */
1981 if (draw->cur_blit_source != -1 &&
1982 draw->buffers[draw->cur_blit_source] &&
1983 back != draw->buffers[draw->cur_blit_source]) {
1984 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1985
1986 dri3_fence_await(draw->conn, draw, source);
1987 dri3_fence_await(draw->conn, draw, back);
1988 (void) loader_dri3_blit_image(draw,
1989 back->image,
1990 source->image,
1991 0, 0, draw->width, draw->height,
1992 0, 0, 0);
1993 back->last_swap = source->last_swap;
1994 draw->cur_blit_source = -1;
1995 }
1996
1997 return back;
1998 }