egl/x11: Re-allocate buffers if format is suboptimal
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27 #include <string.h>
28
29 #include <X11/xshmfence.h>
30 #include <xcb/xcb.h>
31 #include <xcb/dri3.h>
32 #include <xcb/present.h>
33
34 #include <X11/Xlib-xcb.h>
35
36 #include "loader_dri3_helper.h"
37 #include "util/macros.h"
38 #include "drm_fourcc.h"
39
40 /* From xmlpool/options.h, user exposed so should be stable */
41 #define DRI_CONF_VBLANK_NEVER 0
42 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
43 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
44 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
45
46 /**
47 * A cached blit context.
48 */
49 struct loader_dri3_blit_context {
50 mtx_t mtx;
51 __DRIcontext *ctx;
52 __DRIscreen *cur_screen;
53 const __DRIcoreExtension *core;
54 };
55
56 /* For simplicity we maintain the cache only for a single screen at a time */
57 static struct loader_dri3_blit_context blit_context = {
58 _MTX_INITIALIZER_NP, NULL
59 };
60
61 static void
62 dri3_flush_present_events(struct loader_dri3_drawable *draw);
63
64 static struct loader_dri3_buffer *
65 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
66
67 /**
68 * Do we have blit functionality in the image blit extension?
69 *
70 * \param draw[in] The drawable intended to blit from / to.
71 * \return true if we have blit functionality. false otherwise.
72 */
73 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
74 {
75 return draw->ext->image->base.version >= 9 &&
76 draw->ext->image->blitImage != NULL;
77 }
78
79 /**
80 * Get and lock (for use with the current thread) a dri context associated
81 * with the drawable's dri screen. The context is intended to be used with
82 * the dri image extension's blitImage method.
83 *
84 * \param draw[in] Pointer to the drawable whose dri screen we want a
85 * dri context for.
86 * \return A dri context or NULL if context creation failed.
87 *
88 * When the caller is done with the context (even if the context returned was
89 * NULL), the caller must call loader_dri3_blit_context_put.
90 */
91 static __DRIcontext *
92 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
93 {
94 mtx_lock(&blit_context.mtx);
95
96 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
97 blit_context.core->destroyContext(blit_context.ctx);
98 blit_context.ctx = NULL;
99 }
100
101 if (!blit_context.ctx) {
102 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
103 NULL, NULL, NULL);
104 blit_context.cur_screen = draw->dri_screen;
105 blit_context.core = draw->ext->core;
106 }
107
108 return blit_context.ctx;
109 }
110
111 /**
112 * Release (for use with other threads) a dri context previously obtained using
113 * loader_dri3_blit_context_get.
114 */
115 static void
116 loader_dri3_blit_context_put(void)
117 {
118 mtx_unlock(&blit_context.mtx);
119 }
120
121 /**
122 * Blit (parts of) the contents of a DRI image to another dri image
123 *
124 * \param draw[in] The drawable which owns the images.
125 * \param dst[in] The destination image.
126 * \param src[in] The source image.
127 * \param dstx0[in] Start destination coordinate.
128 * \param dsty0[in] Start destination coordinate.
129 * \param width[in] Blit width.
130 * \param height[in] Blit height.
131 * \param srcx0[in] Start source coordinate.
132 * \param srcy0[in] Start source coordinate.
133 * \param flush_flag[in] Image blit flush flag.
134 * \return true iff successful.
135 */
136 static bool
137 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
138 __DRIimage *dst, __DRIimage *src,
139 int dstx0, int dsty0, int width, int height,
140 int srcx0, int srcy0, int flush_flag)
141 {
142 __DRIcontext *dri_context;
143 bool use_blit_context = false;
144
145 if (!loader_dri3_have_image_blit(draw))
146 return false;
147
148 dri_context = draw->vtable->get_dri_context(draw);
149
150 if (!dri_context || !draw->vtable->in_current_context(draw)) {
151 dri_context = loader_dri3_blit_context_get(draw);
152 use_blit_context = true;
153 flush_flag |= __BLIT_FLAG_FLUSH;
154 }
155
156 if (dri_context)
157 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
158 width, height, srcx0, srcy0,
159 width, height, flush_flag);
160
161 if (use_blit_context)
162 loader_dri3_blit_context_put();
163
164 return dri_context != NULL;
165 }
166
167 static inline void
168 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
169 {
170 xshmfence_reset(buffer->shm_fence);
171 }
172
173 static inline void
174 dri3_fence_set(struct loader_dri3_buffer *buffer)
175 {
176 xshmfence_trigger(buffer->shm_fence);
177 }
178
179 static inline void
180 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
181 {
182 xcb_sync_trigger_fence(c, buffer->sync_fence);
183 }
184
185 static inline void
186 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
187 struct loader_dri3_buffer *buffer)
188 {
189 xcb_flush(c);
190 xshmfence_await(buffer->shm_fence);
191 if (draw) {
192 mtx_lock(&draw->mtx);
193 dri3_flush_present_events(draw);
194 mtx_unlock(&draw->mtx);
195 }
196 }
197
198 static void
199 dri3_update_num_back(struct loader_dri3_drawable *draw)
200 {
201 if (draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP)
202 draw->num_back = 3;
203 else
204 draw->num_back = 2;
205 }
206
207 void
208 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
209 {
210 draw->swap_interval = interval;
211 }
212
213 /** dri3_free_render_buffer
214 *
215 * Free everything associated with one render buffer including pixmap, fence
216 * stuff and the driver image
217 */
218 static void
219 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
220 struct loader_dri3_buffer *buffer)
221 {
222 if (buffer->own_pixmap)
223 xcb_free_pixmap(draw->conn, buffer->pixmap);
224 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
225 xshmfence_unmap_shm(buffer->shm_fence);
226 draw->ext->image->destroyImage(buffer->image);
227 if (buffer->linear_buffer)
228 draw->ext->image->destroyImage(buffer->linear_buffer);
229 free(buffer);
230 }
231
232 void
233 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
234 {
235 int i;
236
237 draw->ext->core->destroyDrawable(draw->dri_drawable);
238
239 for (i = 0; i < ARRAY_SIZE(draw->buffers); i++) {
240 if (draw->buffers[i])
241 dri3_free_render_buffer(draw, draw->buffers[i]);
242 }
243
244 if (draw->special_event) {
245 xcb_void_cookie_t cookie =
246 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
247 XCB_PRESENT_EVENT_MASK_NO_EVENT);
248
249 xcb_discard_reply(draw->conn, cookie.sequence);
250 xcb_unregister_for_special_event(draw->conn, draw->special_event);
251 }
252
253 cnd_destroy(&draw->event_cnd);
254 mtx_destroy(&draw->mtx);
255 }
256
257 int
258 loader_dri3_drawable_init(xcb_connection_t *conn,
259 xcb_drawable_t drawable,
260 __DRIscreen *dri_screen,
261 bool is_different_gpu,
262 bool multiplanes_available,
263 const __DRIconfig *dri_config,
264 struct loader_dri3_extensions *ext,
265 const struct loader_dri3_vtable *vtable,
266 struct loader_dri3_drawable *draw)
267 {
268 xcb_get_geometry_cookie_t cookie;
269 xcb_get_geometry_reply_t *reply;
270 xcb_generic_error_t *error;
271 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
272 int swap_interval;
273
274 draw->conn = conn;
275 draw->ext = ext;
276 draw->vtable = vtable;
277 draw->drawable = drawable;
278 draw->dri_screen = dri_screen;
279 draw->is_different_gpu = is_different_gpu;
280 draw->multiplanes_available = multiplanes_available;
281
282 draw->have_back = 0;
283 draw->have_fake_front = 0;
284 draw->first_init = true;
285
286 draw->cur_blit_source = -1;
287 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
288 mtx_init(&draw->mtx, mtx_plain);
289 cnd_init(&draw->event_cnd);
290
291 if (draw->ext->config)
292 draw->ext->config->configQueryi(draw->dri_screen,
293 "vblank_mode", &vblank_mode);
294
295 switch (vblank_mode) {
296 case DRI_CONF_VBLANK_NEVER:
297 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
298 swap_interval = 0;
299 break;
300 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
301 case DRI_CONF_VBLANK_ALWAYS_SYNC:
302 default:
303 swap_interval = 1;
304 break;
305 }
306 draw->swap_interval = swap_interval;
307
308 dri3_update_num_back(draw);
309
310 /* Create a new drawable */
311 draw->dri_drawable =
312 draw->ext->image_driver->createNewDrawable(dri_screen,
313 dri_config,
314 draw);
315
316 if (!draw->dri_drawable)
317 return 1;
318
319 cookie = xcb_get_geometry(draw->conn, draw->drawable);
320 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
321 if (reply == NULL || error != NULL) {
322 draw->ext->core->destroyDrawable(draw->dri_drawable);
323 return 1;
324 }
325
326 draw->width = reply->width;
327 draw->height = reply->height;
328 draw->depth = reply->depth;
329 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
330 free(reply);
331
332 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
333 if (draw->ext->core->base.version >= 2) {
334 (void )draw->ext->core->getConfigAttrib(dri_config,
335 __DRI_ATTRIB_SWAP_METHOD,
336 &draw->swap_method);
337 }
338
339 /*
340 * Make sure server has the same swap interval we do for the new
341 * drawable.
342 */
343 loader_dri3_set_swap_interval(draw, swap_interval);
344
345 return 0;
346 }
347
348 /*
349 * Process one Present event
350 */
351 static void
352 dri3_handle_present_event(struct loader_dri3_drawable *draw,
353 xcb_present_generic_event_t *ge)
354 {
355 switch (ge->evtype) {
356 case XCB_PRESENT_CONFIGURE_NOTIFY: {
357 xcb_present_configure_notify_event_t *ce = (void *) ge;
358
359 draw->width = ce->width;
360 draw->height = ce->height;
361 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
362 draw->ext->flush->invalidate(draw->dri_drawable);
363 break;
364 }
365 case XCB_PRESENT_COMPLETE_NOTIFY: {
366 xcb_present_complete_notify_event_t *ce = (void *) ge;
367
368 /* Compute the processed SBC number from the received 32-bit serial number
369 * merged with the upper 32-bits of the sent 64-bit serial number while
370 * checking for wrap.
371 */
372 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
373 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
374 if (draw->recv_sbc > draw->send_sbc)
375 draw->recv_sbc -= 0x100000000;
376
377 /* When moving from flip to copy, we assume that we can allocate in
378 * a more optimal way if we don't need to cater for the display
379 * controller.
380 */
381 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_COPY &&
382 draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP) {
383 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
384 if (draw->buffers[b])
385 draw->buffers[b]->reallocate = true;
386 }
387 }
388
389 /* If the server tells us that our allocation is suboptimal, we
390 * reallocate once.
391 */
392 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_SUBOPTIMAL_COPY &&
393 draw->last_present_mode != ce->mode) {
394 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
395 if (draw->buffers[b])
396 draw->buffers[b]->reallocate = true;
397 }
398 }
399
400 draw->last_present_mode = ce->mode;
401
402 if (draw->vtable->show_fps)
403 draw->vtable->show_fps(draw, ce->ust);
404
405 draw->ust = ce->ust;
406 draw->msc = ce->msc;
407 } else if (ce->serial == draw->eid) {
408 draw->notify_ust = ce->ust;
409 draw->notify_msc = ce->msc;
410 }
411 break;
412 }
413 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
414 xcb_present_idle_notify_event_t *ie = (void *) ge;
415 int b;
416
417 for (b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
418 struct loader_dri3_buffer *buf = draw->buffers[b];
419
420 if (buf && buf->pixmap == ie->pixmap)
421 buf->busy = 0;
422
423 if (buf && draw->cur_blit_source != b && !buf->busy &&
424 (buf->reallocate ||
425 (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK))) {
426 dri3_free_render_buffer(draw, buf);
427 draw->buffers[b] = NULL;
428 }
429 }
430 break;
431 }
432 }
433 free(ge);
434 }
435
436 static bool
437 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
438 {
439 xcb_generic_event_t *ev;
440 xcb_present_generic_event_t *ge;
441
442 xcb_flush(draw->conn);
443
444 /* Only have one thread waiting for events at a time */
445 if (draw->has_event_waiter) {
446 cnd_wait(&draw->event_cnd, &draw->mtx);
447 /* Another thread has updated the protected info, so retest. */
448 return true;
449 } else {
450 draw->has_event_waiter = true;
451 /* Allow other threads access to the drawable while we're waiting. */
452 mtx_unlock(&draw->mtx);
453 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
454 mtx_lock(&draw->mtx);
455 draw->has_event_waiter = false;
456 cnd_broadcast(&draw->event_cnd);
457 }
458 if (!ev)
459 return false;
460 ge = (void *) ev;
461 dri3_handle_present_event(draw, ge);
462 return true;
463 }
464
465 /** loader_dri3_wait_for_msc
466 *
467 * Get the X server to send an event when the target msc/divisor/remainder is
468 * reached.
469 */
470 bool
471 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
472 int64_t target_msc,
473 int64_t divisor, int64_t remainder,
474 int64_t *ust, int64_t *msc, int64_t *sbc)
475 {
476 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
477 draw->drawable,
478 draw->eid,
479 target_msc,
480 divisor,
481 remainder);
482 xcb_generic_event_t *ev;
483 unsigned full_sequence;
484
485 mtx_lock(&draw->mtx);
486 xcb_flush(draw->conn);
487
488 /* Wait for the event */
489 do {
490 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
491 if (!ev) {
492 mtx_unlock(&draw->mtx);
493 return false;
494 }
495
496 full_sequence = ev->full_sequence;
497 dri3_handle_present_event(draw, (void *) ev);
498 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
499
500 *ust = draw->notify_ust;
501 *msc = draw->notify_msc;
502 *sbc = draw->recv_sbc;
503 mtx_unlock(&draw->mtx);
504
505 return true;
506 }
507
508 /** loader_dri3_wait_for_sbc
509 *
510 * Wait for the completed swap buffer count to reach the specified
511 * target. Presumably the application knows that this will be reached with
512 * outstanding complete events, or we're going to be here awhile.
513 */
514 int
515 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
516 int64_t target_sbc, int64_t *ust,
517 int64_t *msc, int64_t *sbc)
518 {
519 /* From the GLX_OML_sync_control spec:
520 *
521 * "If <target_sbc> = 0, the function will block until all previous
522 * swaps requested with glXSwapBuffersMscOML for that window have
523 * completed."
524 */
525 mtx_lock(&draw->mtx);
526 if (!target_sbc)
527 target_sbc = draw->send_sbc;
528
529 while (draw->recv_sbc < target_sbc) {
530 if (!dri3_wait_for_event_locked(draw)) {
531 mtx_unlock(&draw->mtx);
532 return 0;
533 }
534 }
535
536 *ust = draw->ust;
537 *msc = draw->msc;
538 *sbc = draw->recv_sbc;
539 mtx_unlock(&draw->mtx);
540 return 1;
541 }
542
543 /** loader_dri3_find_back
544 *
545 * Find an idle back buffer. If there isn't one, then
546 * wait for a present idle notify event from the X server
547 */
548 static int
549 dri3_find_back(struct loader_dri3_drawable *draw)
550 {
551 int b;
552 int num_to_consider;
553
554 mtx_lock(&draw->mtx);
555 /* Increase the likelyhood of reusing current buffer */
556 dri3_flush_present_events(draw);
557
558 /* Check whether we need to reuse the current back buffer as new back.
559 * In that case, wait until it's not busy anymore.
560 */
561 dri3_update_num_back(draw);
562 num_to_consider = draw->num_back;
563 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
564 num_to_consider = 1;
565 draw->cur_blit_source = -1;
566 }
567
568 for (;;) {
569 for (b = 0; b < num_to_consider; b++) {
570 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
571 struct loader_dri3_buffer *buffer = draw->buffers[id];
572
573 if (!buffer || !buffer->busy) {
574 draw->cur_back = id;
575 mtx_unlock(&draw->mtx);
576 return id;
577 }
578 }
579 if (!dri3_wait_for_event_locked(draw)) {
580 mtx_unlock(&draw->mtx);
581 return -1;
582 }
583 }
584 }
585
586 static xcb_gcontext_t
587 dri3_drawable_gc(struct loader_dri3_drawable *draw)
588 {
589 if (!draw->gc) {
590 uint32_t v = 0;
591 xcb_create_gc(draw->conn,
592 (draw->gc = xcb_generate_id(draw->conn)),
593 draw->drawable,
594 XCB_GC_GRAPHICS_EXPOSURES,
595 &v);
596 }
597 return draw->gc;
598 }
599
600
601 static struct loader_dri3_buffer *
602 dri3_back_buffer(struct loader_dri3_drawable *draw)
603 {
604 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
605 }
606
607 static struct loader_dri3_buffer *
608 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
609 {
610 return draw->buffers[LOADER_DRI3_FRONT_ID];
611 }
612
613 static void
614 dri3_copy_area(xcb_connection_t *c,
615 xcb_drawable_t src_drawable,
616 xcb_drawable_t dst_drawable,
617 xcb_gcontext_t gc,
618 int16_t src_x,
619 int16_t src_y,
620 int16_t dst_x,
621 int16_t dst_y,
622 uint16_t width,
623 uint16_t height)
624 {
625 xcb_void_cookie_t cookie;
626
627 cookie = xcb_copy_area_checked(c,
628 src_drawable,
629 dst_drawable,
630 gc,
631 src_x,
632 src_y,
633 dst_x,
634 dst_y,
635 width,
636 height);
637 xcb_discard_reply(c, cookie.sequence);
638 }
639
640 /**
641 * Asks the driver to flush any queued work necessary for serializing with the
642 * X command stream, and optionally the slightly more strict requirement of
643 * glFlush() equivalence (which would require flushing even if nothing had
644 * been drawn to a window system framebuffer, for example).
645 */
646 void
647 loader_dri3_flush(struct loader_dri3_drawable *draw,
648 unsigned flags,
649 enum __DRI2throttleReason throttle_reason)
650 {
651 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
652 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
653
654 if (dri_context) {
655 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
656 flags, throttle_reason);
657 }
658 }
659
660 void
661 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
662 int x, int y,
663 int width, int height,
664 bool flush)
665 {
666 struct loader_dri3_buffer *back;
667 unsigned flags = __DRI2_FLUSH_DRAWABLE;
668
669 /* Check we have the right attachments */
670 if (!draw->have_back || draw->is_pixmap)
671 return;
672
673 if (flush)
674 flags |= __DRI2_FLUSH_CONTEXT;
675 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
676
677 back = dri3_find_back_alloc(draw);
678 if (!back)
679 return;
680
681 y = draw->height - y - height;
682
683 if (draw->is_different_gpu) {
684 /* Update the linear buffer part of the back buffer
685 * for the dri3_copy_area operation
686 */
687 (void) loader_dri3_blit_image(draw,
688 back->linear_buffer,
689 back->image,
690 0, 0, back->width, back->height,
691 0, 0, __BLIT_FLAG_FLUSH);
692 }
693
694 loader_dri3_swapbuffer_barrier(draw);
695 dri3_fence_reset(draw->conn, back);
696 dri3_copy_area(draw->conn,
697 back->pixmap,
698 draw->drawable,
699 dri3_drawable_gc(draw),
700 x, y, x, y, width, height);
701 dri3_fence_trigger(draw->conn, back);
702 /* Refresh the fake front (if present) after we just damaged the real
703 * front.
704 */
705 if (draw->have_fake_front &&
706 !loader_dri3_blit_image(draw,
707 dri3_fake_front_buffer(draw)->image,
708 back->image,
709 x, y, width, height,
710 x, y, __BLIT_FLAG_FLUSH) &&
711 !draw->is_different_gpu) {
712 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
713 dri3_copy_area(draw->conn,
714 back->pixmap,
715 dri3_fake_front_buffer(draw)->pixmap,
716 dri3_drawable_gc(draw),
717 x, y, x, y, width, height);
718 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
719 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
720 }
721 dri3_fence_await(draw->conn, draw, back);
722 }
723
724 void
725 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
726 xcb_drawable_t dest,
727 xcb_drawable_t src)
728 {
729 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
730
731 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
732 dri3_copy_area(draw->conn,
733 src, dest,
734 dri3_drawable_gc(draw),
735 0, 0, 0, 0, draw->width, draw->height);
736 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
737 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
738 }
739
740 void
741 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
742 {
743 struct loader_dri3_buffer *front;
744
745 if (draw == NULL || !draw->have_fake_front)
746 return;
747
748 front = dri3_fake_front_buffer(draw);
749
750 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
751
752 /* In the psc->is_different_gpu case, the linear buffer has been updated,
753 * but not yet the tiled buffer.
754 * Copy back to the tiled buffer we use for rendering.
755 * Note that we don't need flushing.
756 */
757 if (draw->is_different_gpu)
758 (void) loader_dri3_blit_image(draw,
759 front->image,
760 front->linear_buffer,
761 0, 0, front->width, front->height,
762 0, 0, 0);
763 }
764
765 void
766 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
767 {
768 struct loader_dri3_buffer *front;
769
770 if (draw == NULL || !draw->have_fake_front)
771 return;
772
773 front = dri3_fake_front_buffer(draw);
774
775 /* In the psc->is_different_gpu case, we update the linear_buffer
776 * before updating the real front.
777 */
778 if (draw->is_different_gpu)
779 (void) loader_dri3_blit_image(draw,
780 front->linear_buffer,
781 front->image,
782 0, 0, front->width, front->height,
783 0, 0, __BLIT_FLAG_FLUSH);
784 loader_dri3_swapbuffer_barrier(draw);
785 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
786 }
787
788 /** dri3_flush_present_events
789 *
790 * Process any present events that have been received from the X server
791 */
792 static void
793 dri3_flush_present_events(struct loader_dri3_drawable *draw)
794 {
795 /* Check to see if any configuration changes have occurred
796 * since we were last invoked
797 */
798 if (draw->has_event_waiter)
799 return;
800
801 if (draw->special_event) {
802 xcb_generic_event_t *ev;
803
804 while ((ev = xcb_poll_for_special_event(draw->conn,
805 draw->special_event)) != NULL) {
806 xcb_present_generic_event_t *ge = (void *) ev;
807 dri3_handle_present_event(draw, ge);
808 }
809 }
810 }
811
812 /** loader_dri3_swap_buffers_msc
813 *
814 * Make the current back buffer visible using the present extension
815 */
816 int64_t
817 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
818 int64_t target_msc, int64_t divisor,
819 int64_t remainder, unsigned flush_flags,
820 bool force_copy)
821 {
822 struct loader_dri3_buffer *back;
823 int64_t ret = 0;
824 uint32_t options = XCB_PRESENT_OPTION_NONE;
825
826 draw->vtable->flush_drawable(draw, flush_flags);
827
828 back = dri3_find_back_alloc(draw);
829
830 mtx_lock(&draw->mtx);
831 if (draw->is_different_gpu && back) {
832 /* Update the linear buffer before presenting the pixmap */
833 (void) loader_dri3_blit_image(draw,
834 back->linear_buffer,
835 back->image,
836 0, 0, back->width, back->height,
837 0, 0, __BLIT_FLAG_FLUSH);
838 }
839
840 /* If we need to preload the new back buffer, remember the source.
841 * The force_copy parameter is used by EGL to attempt to preserve
842 * the back buffer across a call to this function.
843 */
844 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
845 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
846
847 /* Exchange the back and fake front. Even though the server knows about these
848 * buffers, it has no notion of back and fake front.
849 */
850 if (back && draw->have_fake_front) {
851 struct loader_dri3_buffer *tmp;
852
853 tmp = dri3_fake_front_buffer(draw);
854 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
855 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
856
857 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
858 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
859 }
860
861 dri3_flush_present_events(draw);
862
863 if (back && !draw->is_pixmap) {
864 dri3_fence_reset(draw->conn, back);
865
866 /* Compute when we want the frame shown by taking the last known
867 * successful MSC and adding in a swap interval for each outstanding swap
868 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
869 * semantic"
870 */
871 ++draw->send_sbc;
872 if (target_msc == 0 && divisor == 0 && remainder == 0)
873 target_msc = draw->msc + draw->swap_interval *
874 (draw->send_sbc - draw->recv_sbc);
875 else if (divisor == 0 && remainder > 0) {
876 /* From the GLX_OML_sync_control spec:
877 * "If <divisor> = 0, the swap will occur when MSC becomes
878 * greater than or equal to <target_msc>."
879 *
880 * Note that there's no mention of the remainder. The Present
881 * extension throws BadValue for remainder != 0 with divisor == 0, so
882 * just drop the passed in value.
883 */
884 remainder = 0;
885 }
886
887 /* From the GLX_EXT_swap_control spec
888 * and the EGL 1.4 spec (page 53):
889 *
890 * "If <interval> is set to a value of 0, buffer swaps are not
891 * synchronized to a video frame."
892 *
893 * Implementation note: It is possible to enable triple buffering
894 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
895 * the default.
896 */
897 if (draw->swap_interval == 0)
898 options |= XCB_PRESENT_OPTION_ASYNC;
899
900 /* If we need to populate the new back, but need to reuse the back
901 * buffer slot due to lack of local blit capabilities, make sure
902 * the server doesn't flip and we deadlock.
903 */
904 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
905 options |= XCB_PRESENT_OPTION_COPY;
906
907 if (draw->multiplanes_available)
908 options |= XCB_PRESENT_OPTION_SUBOPTIMAL;
909
910 back->busy = 1;
911 back->last_swap = draw->send_sbc;
912 xcb_present_pixmap(draw->conn,
913 draw->drawable,
914 back->pixmap,
915 (uint32_t) draw->send_sbc,
916 0, /* valid */
917 0, /* update */
918 0, /* x_off */
919 0, /* y_off */
920 None, /* target_crtc */
921 None,
922 back->sync_fence,
923 options,
924 target_msc,
925 divisor,
926 remainder, 0, NULL);
927 ret = (int64_t) draw->send_sbc;
928
929 /* Schedule a server-side back-preserving blit if necessary.
930 * This happens iff all conditions below are satisfied:
931 * a) We have a fake front,
932 * b) We need to preserve the back buffer,
933 * c) We don't have local blit capabilities.
934 */
935 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
936 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
937 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
938 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
939
940 dri3_fence_reset(draw->conn, new_back);
941 dri3_copy_area(draw->conn, src->pixmap,
942 new_back->pixmap,
943 dri3_drawable_gc(draw),
944 0, 0, 0, 0, draw->width, draw->height);
945 dri3_fence_trigger(draw->conn, new_back);
946 new_back->last_swap = src->last_swap;
947 }
948
949 xcb_flush(draw->conn);
950 if (draw->stamp)
951 ++(*draw->stamp);
952 }
953 mtx_unlock(&draw->mtx);
954
955 draw->ext->flush->invalidate(draw->dri_drawable);
956
957 return ret;
958 }
959
960 int
961 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
962 {
963 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
964 int ret;
965
966 mtx_lock(&draw->mtx);
967 ret = (!back || back->last_swap == 0) ? 0 :
968 draw->send_sbc - back->last_swap + 1;
969 mtx_unlock(&draw->mtx);
970
971 return ret;
972 }
973
974 /** loader_dri3_open
975 *
976 * Wrapper around xcb_dri3_open
977 */
978 int
979 loader_dri3_open(xcb_connection_t *conn,
980 xcb_window_t root,
981 uint32_t provider)
982 {
983 xcb_dri3_open_cookie_t cookie;
984 xcb_dri3_open_reply_t *reply;
985 int fd;
986
987 cookie = xcb_dri3_open(conn,
988 root,
989 provider);
990
991 reply = xcb_dri3_open_reply(conn, cookie, NULL);
992 if (!reply)
993 return -1;
994
995 if (reply->nfd != 1) {
996 free(reply);
997 return -1;
998 }
999
1000 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
1001 free(reply);
1002 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
1003
1004 return fd;
1005 }
1006
1007 static uint32_t
1008 dri3_cpp_for_format(uint32_t format) {
1009 switch (format) {
1010 case __DRI_IMAGE_FORMAT_R8:
1011 return 1;
1012 case __DRI_IMAGE_FORMAT_RGB565:
1013 case __DRI_IMAGE_FORMAT_GR88:
1014 return 2;
1015 case __DRI_IMAGE_FORMAT_XRGB8888:
1016 case __DRI_IMAGE_FORMAT_ARGB8888:
1017 case __DRI_IMAGE_FORMAT_ABGR8888:
1018 case __DRI_IMAGE_FORMAT_XBGR8888:
1019 case __DRI_IMAGE_FORMAT_XRGB2101010:
1020 case __DRI_IMAGE_FORMAT_ARGB2101010:
1021 case __DRI_IMAGE_FORMAT_XBGR2101010:
1022 case __DRI_IMAGE_FORMAT_ABGR2101010:
1023 case __DRI_IMAGE_FORMAT_SARGB8:
1024 return 4;
1025 case __DRI_IMAGE_FORMAT_NONE:
1026 default:
1027 return 0;
1028 }
1029 }
1030
1031 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1032 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1033 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1034 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1035 */
1036 static int
1037 image_format_to_fourcc(int format)
1038 {
1039
1040 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1041 switch (format) {
1042 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1043 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1044 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1045 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1046 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1047 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1048 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1049 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1050 case __DRI_IMAGE_FORMAT_XBGR2101010: return __DRI_IMAGE_FOURCC_XBGR2101010;
1051 case __DRI_IMAGE_FORMAT_ABGR2101010: return __DRI_IMAGE_FOURCC_ABGR2101010;
1052 }
1053 return 0;
1054 }
1055
1056 static bool
1057 has_supported_modifier(struct loader_dri3_drawable *draw, unsigned int format,
1058 uint64_t *modifiers, uint32_t count)
1059 {
1060 uint64_t *supported_modifiers;
1061 int32_t supported_modifiers_count;
1062 bool found = false;
1063 int i, j;
1064
1065 if (!draw->ext->image->queryDmaBufModifiers(draw->dri_screen,
1066 format, 0, NULL, NULL,
1067 &supported_modifiers_count) ||
1068 supported_modifiers_count == 0)
1069 return false;
1070
1071 supported_modifiers = malloc(supported_modifiers_count * sizeof(uint64_t));
1072 if (!supported_modifiers)
1073 return false;
1074
1075 draw->ext->image->queryDmaBufModifiers(draw->dri_screen, format,
1076 supported_modifiers_count,
1077 supported_modifiers, NULL,
1078 &supported_modifiers_count);
1079
1080 for (i = 0; !found && i < supported_modifiers_count; i++) {
1081 for (j = 0; !found && j < count; j++) {
1082 if (supported_modifiers[i] == modifiers[j])
1083 found = true;
1084 }
1085 }
1086
1087 free(supported_modifiers);
1088 return found;
1089 }
1090
1091 /** loader_dri3_alloc_render_buffer
1092 *
1093 * Use the driver createImage function to construct a __DRIimage, then
1094 * get a file descriptor for that and create an X pixmap from that
1095 *
1096 * Allocate an xshmfence for synchronization
1097 */
1098 static struct loader_dri3_buffer *
1099 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1100 int width, int height, int depth)
1101 {
1102 struct loader_dri3_buffer *buffer;
1103 __DRIimage *pixmap_buffer;
1104 xcb_pixmap_t pixmap;
1105 xcb_sync_fence_t sync_fence;
1106 struct xshmfence *shm_fence;
1107 int buffer_fds[4], fence_fd;
1108 int num_planes = 0;
1109 int i, mod;
1110 int ret;
1111
1112 /* Create an xshmfence object and
1113 * prepare to send that to the X server
1114 */
1115
1116 fence_fd = xshmfence_alloc_shm();
1117 if (fence_fd < 0)
1118 return NULL;
1119
1120 shm_fence = xshmfence_map_shm(fence_fd);
1121 if (shm_fence == NULL)
1122 goto no_shm_fence;
1123
1124 /* Allocate the image from the driver
1125 */
1126 buffer = calloc(1, sizeof *buffer);
1127 if (!buffer)
1128 goto no_buffer;
1129
1130 buffer->cpp = dri3_cpp_for_format(format);
1131 if (!buffer->cpp)
1132 goto no_image;
1133
1134 if (!draw->is_different_gpu) {
1135 if (draw->multiplanes_available &&
1136 draw->ext->image->base.version >= 15 &&
1137 draw->ext->image->queryDmaBufModifiers &&
1138 draw->ext->image->createImageWithModifiers) {
1139 xcb_dri3_get_supported_modifiers_cookie_t mod_cookie;
1140 xcb_dri3_get_supported_modifiers_reply_t *mod_reply;
1141 xcb_generic_error_t *error = NULL;
1142 uint64_t *modifiers = NULL;
1143 uint32_t count = 0;
1144
1145 mod_cookie = xcb_dri3_get_supported_modifiers(draw->conn,
1146 draw->drawable,
1147 depth, buffer->cpp * 8);
1148 mod_reply = xcb_dri3_get_supported_modifiers_reply(draw->conn,
1149 mod_cookie,
1150 &error);
1151 if (!mod_reply)
1152 goto no_image;
1153
1154 if (mod_reply->num_window_modifiers) {
1155 count = mod_reply->num_window_modifiers;
1156 modifiers = malloc(count * sizeof(uint64_t));
1157 if (!modifiers) {
1158 free(mod_reply);
1159 goto no_image;
1160 }
1161
1162 memcpy(modifiers,
1163 xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply),
1164 count * sizeof(uint64_t));
1165
1166 if (!has_supported_modifier(draw, image_format_to_fourcc(format),
1167 modifiers, count)) {
1168 free(modifiers);
1169 count = 0;
1170 modifiers = NULL;
1171 }
1172 }
1173
1174 if (mod_reply->num_screen_modifiers && modifiers == NULL) {
1175 count = mod_reply->num_screen_modifiers;
1176 modifiers = malloc(count * sizeof(uint64_t));
1177 if (!modifiers) {
1178 free(modifiers);
1179 free(mod_reply);
1180 goto no_image;
1181 }
1182
1183 memcpy(modifiers,
1184 xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply),
1185 count * sizeof(uint64_t));
1186 }
1187
1188 free(mod_reply);
1189
1190 buffer->image = draw->ext->image->createImageWithModifiers(draw->dri_screen,
1191 width, height,
1192 format,
1193 modifiers,
1194 count,
1195 buffer);
1196 free(modifiers);
1197 }
1198
1199 if (!buffer->image)
1200 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1201 width, height,
1202 format,
1203 __DRI_IMAGE_USE_SHARE |
1204 __DRI_IMAGE_USE_SCANOUT |
1205 __DRI_IMAGE_USE_BACKBUFFER,
1206 buffer);
1207
1208 pixmap_buffer = buffer->image;
1209
1210 if (!buffer->image)
1211 goto no_image;
1212 } else {
1213 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1214 width, height,
1215 format,
1216 0,
1217 buffer);
1218
1219 if (!buffer->image)
1220 goto no_image;
1221
1222 buffer->linear_buffer =
1223 draw->ext->image->createImage(draw->dri_screen,
1224 width, height, format,
1225 __DRI_IMAGE_USE_SHARE |
1226 __DRI_IMAGE_USE_LINEAR |
1227 __DRI_IMAGE_USE_BACKBUFFER,
1228 buffer);
1229 pixmap_buffer = buffer->linear_buffer;
1230
1231 if (!buffer->linear_buffer)
1232 goto no_linear_buffer;
1233 }
1234
1235 /* X want some information about the planes, so ask the image for it
1236 */
1237 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_NUM_PLANES,
1238 &num_planes))
1239 num_planes = 1;
1240
1241 for (i = 0; i < num_planes; i++) {
1242 __DRIimage *image = draw->ext->image->fromPlanar(pixmap_buffer, i, NULL);
1243
1244 if (!image) {
1245 assert(i == 0);
1246 image = pixmap_buffer;
1247 }
1248
1249 ret = draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_FD,
1250 &buffer_fds[i]);
1251 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_STRIDE,
1252 &buffer->strides[i]);
1253 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_OFFSET,
1254 &buffer->offsets[i]);
1255 if (image != pixmap_buffer)
1256 draw->ext->image->destroyImage(image);
1257
1258 if (!ret)
1259 goto no_buffer_attrib;
1260 }
1261
1262 ret = draw->ext->image->queryImage(pixmap_buffer,
1263 __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod);
1264 buffer->modifier = (uint64_t) mod << 32;
1265 ret &= draw->ext->image->queryImage(pixmap_buffer,
1266 __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod);
1267 buffer->modifier |= (uint64_t)(mod & 0xffffffff);
1268
1269 if (!ret)
1270 buffer->modifier = DRM_FORMAT_MOD_INVALID;
1271
1272 pixmap = xcb_generate_id(draw->conn);
1273 if (draw->multiplanes_available &&
1274 buffer->modifier != DRM_FORMAT_MOD_INVALID) {
1275 xcb_dri3_pixmap_from_buffers(draw->conn,
1276 pixmap,
1277 draw->drawable,
1278 num_planes,
1279 width, height,
1280 buffer->strides[0], buffer->offsets[0],
1281 buffer->strides[1], buffer->offsets[1],
1282 buffer->strides[2], buffer->offsets[2],
1283 buffer->strides[3], buffer->offsets[3],
1284 depth, buffer->cpp * 8,
1285 buffer->modifier,
1286 buffer_fds);
1287 } else {
1288 xcb_dri3_pixmap_from_buffer(draw->conn,
1289 pixmap,
1290 draw->drawable,
1291 buffer->size,
1292 width, height, buffer->strides[0],
1293 depth, buffer->cpp * 8,
1294 buffer_fds[0]);
1295 }
1296
1297 xcb_dri3_fence_from_fd(draw->conn,
1298 pixmap,
1299 (sync_fence = xcb_generate_id(draw->conn)),
1300 false,
1301 fence_fd);
1302
1303 buffer->pixmap = pixmap;
1304 buffer->own_pixmap = true;
1305 buffer->sync_fence = sync_fence;
1306 buffer->shm_fence = shm_fence;
1307 buffer->width = width;
1308 buffer->height = height;
1309
1310 /* Mark the buffer as idle
1311 */
1312 dri3_fence_set(buffer);
1313
1314 return buffer;
1315
1316 no_buffer_attrib:
1317 do {
1318 close(buffer_fds[i]);
1319 } while (--i >= 0);
1320 draw->ext->image->destroyImage(pixmap_buffer);
1321 no_linear_buffer:
1322 if (draw->is_different_gpu)
1323 draw->ext->image->destroyImage(buffer->image);
1324 no_image:
1325 free(buffer);
1326 no_buffer:
1327 xshmfence_unmap_shm(shm_fence);
1328 no_shm_fence:
1329 close(fence_fd);
1330 return NULL;
1331 }
1332
1333 /** loader_dri3_update_drawable
1334 *
1335 * Called the first time we use the drawable and then
1336 * after we receive present configure notify events to
1337 * track the geometry of the drawable
1338 */
1339 static int
1340 dri3_update_drawable(__DRIdrawable *driDrawable,
1341 struct loader_dri3_drawable *draw)
1342 {
1343 mtx_lock(&draw->mtx);
1344 if (draw->first_init) {
1345 xcb_get_geometry_cookie_t geom_cookie;
1346 xcb_get_geometry_reply_t *geom_reply;
1347 xcb_void_cookie_t cookie;
1348 xcb_generic_error_t *error;
1349 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1350 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1351
1352 draw->first_init = false;
1353
1354 /* Try to select for input on the window.
1355 *
1356 * If the drawable is a window, this will get our events
1357 * delivered.
1358 *
1359 * Otherwise, we'll get a BadWindow error back from this request which
1360 * will let us know that the drawable is a pixmap instead.
1361 */
1362
1363 draw->eid = xcb_generate_id(draw->conn);
1364 cookie =
1365 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1366 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1367 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1368 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1369
1370 present_capabilities_cookie =
1371 xcb_present_query_capabilities(draw->conn, draw->drawable);
1372
1373 /* Create an XCB event queue to hold present events outside of the usual
1374 * application event queue
1375 */
1376 draw->special_event = xcb_register_for_special_xge(draw->conn,
1377 &xcb_present_id,
1378 draw->eid,
1379 draw->stamp);
1380 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1381
1382 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1383
1384 if (!geom_reply) {
1385 mtx_unlock(&draw->mtx);
1386 return false;
1387 }
1388
1389 draw->width = geom_reply->width;
1390 draw->height = geom_reply->height;
1391 draw->depth = geom_reply->depth;
1392 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1393
1394 free(geom_reply);
1395
1396 draw->is_pixmap = false;
1397
1398 /* Check to see if our select input call failed. If it failed with a
1399 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1400 * special event queue created above and mark the drawable as a pixmap
1401 */
1402
1403 error = xcb_request_check(draw->conn, cookie);
1404
1405 present_capabilities_reply =
1406 xcb_present_query_capabilities_reply(draw->conn,
1407 present_capabilities_cookie,
1408 NULL);
1409
1410 if (present_capabilities_reply) {
1411 draw->present_capabilities = present_capabilities_reply->capabilities;
1412 free(present_capabilities_reply);
1413 } else
1414 draw->present_capabilities = 0;
1415
1416 if (error) {
1417 if (error->error_code != BadWindow) {
1418 free(error);
1419 mtx_unlock(&draw->mtx);
1420 return false;
1421 }
1422 draw->is_pixmap = true;
1423 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1424 draw->special_event = NULL;
1425 }
1426 }
1427 dri3_flush_present_events(draw);
1428 mtx_unlock(&draw->mtx);
1429 return true;
1430 }
1431
1432 __DRIimage *
1433 loader_dri3_create_image(xcb_connection_t *c,
1434 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1435 unsigned int format,
1436 __DRIscreen *dri_screen,
1437 const __DRIimageExtension *image,
1438 void *loaderPrivate)
1439 {
1440 int *fds;
1441 __DRIimage *image_planar, *ret;
1442 int stride, offset;
1443
1444 /* Get an FD for the pixmap object
1445 */
1446 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1447
1448 stride = bp_reply->stride;
1449 offset = 0;
1450
1451 /* createImageFromFds creates a wrapper __DRIimage structure which
1452 * can deal with multiple planes for things like Yuv images. So, once
1453 * we've gotten the planar wrapper, pull the single plane out of it and
1454 * discard the wrapper.
1455 */
1456 image_planar = image->createImageFromFds(dri_screen,
1457 bp_reply->width,
1458 bp_reply->height,
1459 image_format_to_fourcc(format),
1460 fds, 1,
1461 &stride, &offset, loaderPrivate);
1462 close(fds[0]);
1463 if (!image_planar)
1464 return NULL;
1465
1466 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1467
1468 if (!ret)
1469 ret = image_planar;
1470 else
1471 image->destroyImage(image_planar);
1472
1473 return ret;
1474 }
1475
1476 __DRIimage *
1477 loader_dri3_create_image_from_buffers(xcb_connection_t *c,
1478 xcb_dri3_buffers_from_pixmap_reply_t *bp_reply,
1479 unsigned int format,
1480 __DRIscreen *dri_screen,
1481 const __DRIimageExtension *image,
1482 void *loaderPrivate)
1483 {
1484 __DRIimage *ret;
1485 int *fds;
1486 uint32_t *strides_in, *offsets_in;
1487 int strides[4], offsets[4];
1488 unsigned error;
1489 int i;
1490
1491 if (bp_reply->nfd > 4)
1492 return NULL;
1493
1494 fds = xcb_dri3_buffers_from_pixmap_reply_fds(c, bp_reply);
1495 strides_in = xcb_dri3_buffers_from_pixmap_strides(bp_reply);
1496 offsets_in = xcb_dri3_buffers_from_pixmap_offsets(bp_reply);
1497 for (i = 0; i < bp_reply->nfd; i++) {
1498 strides[i] = strides_in[i];
1499 offsets[i] = offsets_in[i];
1500 }
1501
1502 ret = image->createImageFromDmaBufs2(dri_screen,
1503 bp_reply->width,
1504 bp_reply->height,
1505 image_format_to_fourcc(format),
1506 bp_reply->modifier,
1507 fds, bp_reply->nfd,
1508 strides, offsets,
1509 0, 0, 0, 0, /* UNDEFINED */
1510 &error, loaderPrivate);
1511
1512 for (i = 0; i < bp_reply->nfd; i++)
1513 close(fds[i]);
1514
1515 return ret;
1516 }
1517
1518 /** dri3_get_pixmap_buffer
1519 *
1520 * Get the DRM object for a pixmap from the X server and
1521 * wrap that with a __DRIimage structure using createImageFromFds
1522 */
1523 static struct loader_dri3_buffer *
1524 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1525 enum loader_dri3_buffer_type buffer_type,
1526 struct loader_dri3_drawable *draw)
1527 {
1528 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1529 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1530 xcb_drawable_t pixmap;
1531 xcb_sync_fence_t sync_fence;
1532 struct xshmfence *shm_fence;
1533 int width;
1534 int height;
1535 int fence_fd;
1536 __DRIscreen *cur_screen;
1537
1538 if (buffer)
1539 return buffer;
1540
1541 pixmap = draw->drawable;
1542
1543 buffer = calloc(1, sizeof *buffer);
1544 if (!buffer)
1545 goto no_buffer;
1546
1547 fence_fd = xshmfence_alloc_shm();
1548 if (fence_fd < 0)
1549 goto no_fence;
1550 shm_fence = xshmfence_map_shm(fence_fd);
1551 if (shm_fence == NULL) {
1552 close (fence_fd);
1553 goto no_fence;
1554 }
1555
1556 /* Get the currently-bound screen or revert to using the drawable's screen if
1557 * no contexts are currently bound. The latter case is at least necessary for
1558 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1559 */
1560 cur_screen = draw->vtable->get_dri_screen();
1561 if (!cur_screen) {
1562 cur_screen = draw->dri_screen;
1563 }
1564
1565 xcb_dri3_fence_from_fd(draw->conn,
1566 pixmap,
1567 (sync_fence = xcb_generate_id(draw->conn)),
1568 false,
1569 fence_fd);
1570
1571 if (draw->multiplanes_available &&
1572 draw->ext->image->base.version >= 15 &&
1573 draw->ext->image->createImageFromDmaBufs2) {
1574 xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie;
1575 xcb_dri3_buffers_from_pixmap_reply_t *bps_reply;
1576
1577 bps_cookie = xcb_dri3_buffers_from_pixmap(draw->conn, pixmap);
1578 bps_reply = xcb_dri3_buffers_from_pixmap_reply(draw->conn, bps_cookie,
1579 NULL);
1580 if (!bps_reply)
1581 goto no_image;
1582 buffer->image =
1583 loader_dri3_create_image_from_buffers(draw->conn, bps_reply, format,
1584 cur_screen, draw->ext->image,
1585 buffer);
1586 width = bps_reply->width;
1587 height = bps_reply->height;
1588 free(bps_reply);
1589 } else {
1590 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1591 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1592
1593 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1594 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1595 if (!bp_reply)
1596 goto no_image;
1597
1598 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1599 cur_screen, draw->ext->image,
1600 buffer);
1601 width = bp_reply->width;
1602 height = bp_reply->height;
1603 free(bp_reply);
1604 }
1605
1606 if (!buffer->image)
1607 goto no_image;
1608
1609 buffer->pixmap = pixmap;
1610 buffer->own_pixmap = false;
1611 buffer->width = width;
1612 buffer->height = height;
1613 buffer->shm_fence = shm_fence;
1614 buffer->sync_fence = sync_fence;
1615
1616 draw->buffers[buf_id] = buffer;
1617
1618 return buffer;
1619
1620 no_image:
1621 xcb_sync_destroy_fence(draw->conn, sync_fence);
1622 xshmfence_unmap_shm(shm_fence);
1623 no_fence:
1624 free(buffer);
1625 no_buffer:
1626 return NULL;
1627 }
1628
1629 /** dri3_get_buffer
1630 *
1631 * Find a front or back buffer, allocating new ones as necessary
1632 */
1633 static struct loader_dri3_buffer *
1634 dri3_get_buffer(__DRIdrawable *driDrawable,
1635 unsigned int format,
1636 enum loader_dri3_buffer_type buffer_type,
1637 struct loader_dri3_drawable *draw)
1638 {
1639 struct loader_dri3_buffer *buffer;
1640 int buf_id;
1641
1642 if (buffer_type == loader_dri3_buffer_back) {
1643 draw->back_format = format;
1644
1645 buf_id = dri3_find_back(draw);
1646
1647 if (buf_id < 0)
1648 return NULL;
1649 } else {
1650 buf_id = LOADER_DRI3_FRONT_ID;
1651 }
1652
1653 buffer = draw->buffers[buf_id];
1654
1655 /* Allocate a new buffer if there isn't an old one, if that
1656 * old one is the wrong size, or if it's suboptimal
1657 */
1658 if (!buffer || buffer->width != draw->width ||
1659 buffer->height != draw->height ||
1660 buffer->reallocate) {
1661 struct loader_dri3_buffer *new_buffer;
1662
1663 /* Allocate the new buffers
1664 */
1665 new_buffer = dri3_alloc_render_buffer(draw,
1666 format,
1667 draw->width,
1668 draw->height,
1669 draw->depth);
1670 if (!new_buffer)
1671 return NULL;
1672
1673 /* When resizing, copy the contents of the old buffer, waiting for that
1674 * copy to complete using our fences before proceeding
1675 */
1676 if ((buffer_type == loader_dri3_buffer_back ||
1677 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1678 && buffer) {
1679
1680 /* Fill the new buffer with data from an old buffer */
1681 dri3_fence_await(draw->conn, draw, buffer);
1682 if (!loader_dri3_blit_image(draw,
1683 new_buffer->image,
1684 buffer->image,
1685 0, 0, draw->width, draw->height,
1686 0, 0, 0) &&
1687 !buffer->linear_buffer) {
1688 dri3_fence_reset(draw->conn, new_buffer);
1689 dri3_copy_area(draw->conn,
1690 buffer->pixmap,
1691 new_buffer->pixmap,
1692 dri3_drawable_gc(draw),
1693 0, 0, 0, 0,
1694 draw->width, draw->height);
1695 dri3_fence_trigger(draw->conn, new_buffer);
1696 }
1697 dri3_free_render_buffer(draw, buffer);
1698 } else if (buffer_type == loader_dri3_buffer_front) {
1699 /* Fill the new fake front with data from a real front */
1700 loader_dri3_swapbuffer_barrier(draw);
1701 dri3_fence_reset(draw->conn, new_buffer);
1702 dri3_copy_area(draw->conn,
1703 draw->drawable,
1704 new_buffer->pixmap,
1705 dri3_drawable_gc(draw),
1706 0, 0, 0, 0,
1707 draw->width, draw->height);
1708 dri3_fence_trigger(draw->conn, new_buffer);
1709
1710 if (new_buffer->linear_buffer) {
1711 dri3_fence_await(draw->conn, draw, new_buffer);
1712 (void) loader_dri3_blit_image(draw,
1713 new_buffer->image,
1714 new_buffer->linear_buffer,
1715 0, 0, draw->width, draw->height,
1716 0, 0, 0);
1717 }
1718 }
1719 buffer = new_buffer;
1720 draw->buffers[buf_id] = buffer;
1721 }
1722 dri3_fence_await(draw->conn, draw, buffer);
1723
1724 /*
1725 * Do we need to preserve the content of a previous buffer?
1726 *
1727 * Note that this blit is needed only to avoid a wait for a buffer that
1728 * is currently in the flip chain or being scanned out from. That's really
1729 * a tradeoff. If we're ok with the wait we can reduce the number of back
1730 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1731 * but in the latter case we must disallow page-flipping.
1732 */
1733 if (buffer_type == loader_dri3_buffer_back &&
1734 draw->cur_blit_source != -1 &&
1735 draw->buffers[draw->cur_blit_source] &&
1736 buffer != draw->buffers[draw->cur_blit_source]) {
1737
1738 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1739
1740 /* Avoid flushing here. Will propably do good for tiling hardware. */
1741 (void) loader_dri3_blit_image(draw,
1742 buffer->image,
1743 source->image,
1744 0, 0, draw->width, draw->height,
1745 0, 0, 0);
1746 buffer->last_swap = source->last_swap;
1747 draw->cur_blit_source = -1;
1748 }
1749 /* Return the requested buffer */
1750 return buffer;
1751 }
1752
1753 /** dri3_free_buffers
1754 *
1755 * Free the front bufffer or all of the back buffers. Used
1756 * when the application changes which buffers it needs
1757 */
1758 static void
1759 dri3_free_buffers(__DRIdrawable *driDrawable,
1760 enum loader_dri3_buffer_type buffer_type,
1761 struct loader_dri3_drawable *draw)
1762 {
1763 struct loader_dri3_buffer *buffer;
1764 int first_id;
1765 int n_id;
1766 int buf_id;
1767
1768 switch (buffer_type) {
1769 case loader_dri3_buffer_back:
1770 first_id = LOADER_DRI3_BACK_ID(0);
1771 n_id = LOADER_DRI3_MAX_BACK;
1772 draw->cur_blit_source = -1;
1773 break;
1774 case loader_dri3_buffer_front:
1775 first_id = LOADER_DRI3_FRONT_ID;
1776 /* Don't free a fake front holding new backbuffer content. */
1777 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1778 }
1779
1780 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1781 buffer = draw->buffers[buf_id];
1782 if (buffer) {
1783 dri3_free_render_buffer(draw, buffer);
1784 draw->buffers[buf_id] = NULL;
1785 }
1786 }
1787 }
1788
1789 /** loader_dri3_get_buffers
1790 *
1791 * The published buffer allocation API.
1792 * Returns all of the necessary buffers, allocating
1793 * as needed.
1794 */
1795 int
1796 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1797 unsigned int format,
1798 uint32_t *stamp,
1799 void *loaderPrivate,
1800 uint32_t buffer_mask,
1801 struct __DRIimageList *buffers)
1802 {
1803 struct loader_dri3_drawable *draw = loaderPrivate;
1804 struct loader_dri3_buffer *front, *back;
1805
1806 buffers->image_mask = 0;
1807 buffers->front = NULL;
1808 buffers->back = NULL;
1809
1810 front = NULL;
1811 back = NULL;
1812
1813 if (!dri3_update_drawable(driDrawable, draw))
1814 return false;
1815
1816 /* pixmaps always have front buffers.
1817 * Exchange swaps also mandate fake front buffers.
1818 */
1819 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1820 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1821
1822 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1823 /* All pixmaps are owned by the server gpu.
1824 * When we use a different gpu, we can't use the pixmap
1825 * as buffer since it is potentially tiled a way
1826 * our device can't understand. In this case, use
1827 * a fake front buffer. Hopefully the pixmap
1828 * content will get synced with the fake front
1829 * buffer.
1830 */
1831 if (draw->is_pixmap && !draw->is_different_gpu)
1832 front = dri3_get_pixmap_buffer(driDrawable,
1833 format,
1834 loader_dri3_buffer_front,
1835 draw);
1836 else
1837 front = dri3_get_buffer(driDrawable,
1838 format,
1839 loader_dri3_buffer_front,
1840 draw);
1841
1842 if (!front)
1843 return false;
1844 } else {
1845 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1846 draw->have_fake_front = 0;
1847 }
1848
1849 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1850 back = dri3_get_buffer(driDrawable,
1851 format,
1852 loader_dri3_buffer_back,
1853 draw);
1854 if (!back)
1855 return false;
1856 draw->have_back = 1;
1857 } else {
1858 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1859 draw->have_back = 0;
1860 }
1861
1862 if (front) {
1863 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1864 buffers->front = front->image;
1865 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1866 }
1867
1868 if (back) {
1869 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1870 buffers->back = back->image;
1871 }
1872
1873 draw->stamp = stamp;
1874
1875 return true;
1876 }
1877
1878 /** loader_dri3_update_drawable_geometry
1879 *
1880 * Get the current drawable geometry.
1881 */
1882 void
1883 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1884 {
1885 xcb_get_geometry_cookie_t geom_cookie;
1886 xcb_get_geometry_reply_t *geom_reply;
1887
1888 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1889
1890 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1891
1892 if (geom_reply) {
1893 draw->width = geom_reply->width;
1894 draw->height = geom_reply->height;
1895 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1896 draw->ext->flush->invalidate(draw->dri_drawable);
1897
1898 free(geom_reply);
1899 }
1900 }
1901
1902
1903 /**
1904 * Make sure the server has flushed all pending swap buffers to hardware
1905 * for this drawable. Ideally we'd want to send an X protocol request to
1906 * have the server block our connection until the swaps are complete. That
1907 * would avoid the potential round-trip here.
1908 */
1909 void
1910 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1911 {
1912 int64_t ust, msc, sbc;
1913
1914 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1915 }
1916
1917 /**
1918 * Perform any cleanup associated with a close screen operation.
1919 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1920 *
1921 * This function destroys the screen's cached swap context if any.
1922 */
1923 void
1924 loader_dri3_close_screen(__DRIscreen *dri_screen)
1925 {
1926 mtx_lock(&blit_context.mtx);
1927 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1928 blit_context.core->destroyContext(blit_context.ctx);
1929 blit_context.ctx = NULL;
1930 }
1931 mtx_unlock(&blit_context.mtx);
1932 }
1933
1934 /**
1935 * Find a backbuffer slot - potentially allocating a back buffer
1936 *
1937 * \param draw[in,out] Pointer to the drawable for which to find back.
1938 * \return Pointer to a new back buffer or NULL if allocation failed or was
1939 * not mandated.
1940 *
1941 * Find a potentially new back buffer, and if it's not been allocated yet and
1942 * in addition needs initializing, then try to allocate and initialize it.
1943 */
1944 #include <stdio.h>
1945 static struct loader_dri3_buffer *
1946 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1947 {
1948 struct loader_dri3_buffer *back;
1949 int id;
1950
1951 id = dri3_find_back(draw);
1952 if (id < 0)
1953 return NULL;
1954
1955 back = draw->buffers[id];
1956 /* Allocate a new back if we haven't got one */
1957 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
1958 dri3_update_drawable(draw->dri_drawable, draw))
1959 back = dri3_alloc_render_buffer(draw, draw->back_format,
1960 draw->width, draw->height, draw->depth);
1961
1962 if (!back)
1963 return NULL;
1964
1965 draw->buffers[id] = back;
1966
1967 /* If necessary, prefill the back with data according to swap_method mode. */
1968 if (draw->cur_blit_source != -1 &&
1969 draw->buffers[draw->cur_blit_source] &&
1970 back != draw->buffers[draw->cur_blit_source]) {
1971 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1972
1973 dri3_fence_await(draw->conn, draw, source);
1974 dri3_fence_await(draw->conn, draw, back);
1975 (void) loader_dri3_blit_image(draw,
1976 back->image,
1977 source->image,
1978 0, 0, draw->width, draw->height,
1979 0, 0, 0);
1980 back->last_swap = source->last_swap;
1981 draw->cur_blit_source = -1;
1982 }
1983
1984 return back;
1985 }