dri3: Only update number of back buffers in loader_dri3_get_buffers
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27 #include <string.h>
28
29 #include <X11/xshmfence.h>
30 #include <xcb/xcb.h>
31 #include <xcb/dri3.h>
32 #include <xcb/present.h>
33
34 #include <X11/Xlib-xcb.h>
35
36 #include "loader_dri3_helper.h"
37 #include "util/macros.h"
38 #include "drm_fourcc.h"
39
40 /* From xmlpool/options.h, user exposed so should be stable */
41 #define DRI_CONF_VBLANK_NEVER 0
42 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
43 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
44 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
45
46 /**
47 * A cached blit context.
48 */
49 struct loader_dri3_blit_context {
50 mtx_t mtx;
51 __DRIcontext *ctx;
52 __DRIscreen *cur_screen;
53 const __DRIcoreExtension *core;
54 };
55
56 /* For simplicity we maintain the cache only for a single screen at a time */
57 static struct loader_dri3_blit_context blit_context = {
58 _MTX_INITIALIZER_NP, NULL
59 };
60
61 static void
62 dri3_flush_present_events(struct loader_dri3_drawable *draw);
63
64 static struct loader_dri3_buffer *
65 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
66
67 /**
68 * Do we have blit functionality in the image blit extension?
69 *
70 * \param draw[in] The drawable intended to blit from / to.
71 * \return true if we have blit functionality. false otherwise.
72 */
73 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
74 {
75 return draw->ext->image->base.version >= 9 &&
76 draw->ext->image->blitImage != NULL;
77 }
78
79 /**
80 * Get and lock (for use with the current thread) a dri context associated
81 * with the drawable's dri screen. The context is intended to be used with
82 * the dri image extension's blitImage method.
83 *
84 * \param draw[in] Pointer to the drawable whose dri screen we want a
85 * dri context for.
86 * \return A dri context or NULL if context creation failed.
87 *
88 * When the caller is done with the context (even if the context returned was
89 * NULL), the caller must call loader_dri3_blit_context_put.
90 */
91 static __DRIcontext *
92 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
93 {
94 mtx_lock(&blit_context.mtx);
95
96 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
97 blit_context.core->destroyContext(blit_context.ctx);
98 blit_context.ctx = NULL;
99 }
100
101 if (!blit_context.ctx) {
102 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
103 NULL, NULL, NULL);
104 blit_context.cur_screen = draw->dri_screen;
105 blit_context.core = draw->ext->core;
106 }
107
108 return blit_context.ctx;
109 }
110
111 /**
112 * Release (for use with other threads) a dri context previously obtained using
113 * loader_dri3_blit_context_get.
114 */
115 static void
116 loader_dri3_blit_context_put(void)
117 {
118 mtx_unlock(&blit_context.mtx);
119 }
120
121 /**
122 * Blit (parts of) the contents of a DRI image to another dri image
123 *
124 * \param draw[in] The drawable which owns the images.
125 * \param dst[in] The destination image.
126 * \param src[in] The source image.
127 * \param dstx0[in] Start destination coordinate.
128 * \param dsty0[in] Start destination coordinate.
129 * \param width[in] Blit width.
130 * \param height[in] Blit height.
131 * \param srcx0[in] Start source coordinate.
132 * \param srcy0[in] Start source coordinate.
133 * \param flush_flag[in] Image blit flush flag.
134 * \return true iff successful.
135 */
136 static bool
137 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
138 __DRIimage *dst, __DRIimage *src,
139 int dstx0, int dsty0, int width, int height,
140 int srcx0, int srcy0, int flush_flag)
141 {
142 __DRIcontext *dri_context;
143 bool use_blit_context = false;
144
145 if (!loader_dri3_have_image_blit(draw))
146 return false;
147
148 dri_context = draw->vtable->get_dri_context(draw);
149
150 if (!dri_context || !draw->vtable->in_current_context(draw)) {
151 dri_context = loader_dri3_blit_context_get(draw);
152 use_blit_context = true;
153 flush_flag |= __BLIT_FLAG_FLUSH;
154 }
155
156 if (dri_context)
157 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
158 width, height, srcx0, srcy0,
159 width, height, flush_flag);
160
161 if (use_blit_context)
162 loader_dri3_blit_context_put();
163
164 return dri_context != NULL;
165 }
166
167 static inline void
168 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
169 {
170 xshmfence_reset(buffer->shm_fence);
171 }
172
173 static inline void
174 dri3_fence_set(struct loader_dri3_buffer *buffer)
175 {
176 xshmfence_trigger(buffer->shm_fence);
177 }
178
179 static inline void
180 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
181 {
182 xcb_sync_trigger_fence(c, buffer->sync_fence);
183 }
184
185 static inline void
186 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
187 struct loader_dri3_buffer *buffer)
188 {
189 xcb_flush(c);
190 xshmfence_await(buffer->shm_fence);
191 if (draw) {
192 mtx_lock(&draw->mtx);
193 dri3_flush_present_events(draw);
194 mtx_unlock(&draw->mtx);
195 }
196 }
197
198 static void
199 dri3_update_num_back(struct loader_dri3_drawable *draw)
200 {
201 if (draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP)
202 draw->num_back = 3;
203 else
204 draw->num_back = 2;
205 }
206
207 void
208 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
209 {
210 draw->swap_interval = interval;
211 }
212
213 /** dri3_free_render_buffer
214 *
215 * Free everything associated with one render buffer including pixmap, fence
216 * stuff and the driver image
217 */
218 static void
219 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
220 struct loader_dri3_buffer *buffer)
221 {
222 if (buffer->own_pixmap)
223 xcb_free_pixmap(draw->conn, buffer->pixmap);
224 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
225 xshmfence_unmap_shm(buffer->shm_fence);
226 draw->ext->image->destroyImage(buffer->image);
227 if (buffer->linear_buffer)
228 draw->ext->image->destroyImage(buffer->linear_buffer);
229 free(buffer);
230 }
231
232 void
233 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
234 {
235 int i;
236
237 draw->ext->core->destroyDrawable(draw->dri_drawable);
238
239 for (i = 0; i < ARRAY_SIZE(draw->buffers); i++) {
240 if (draw->buffers[i])
241 dri3_free_render_buffer(draw, draw->buffers[i]);
242 }
243
244 if (draw->special_event) {
245 xcb_void_cookie_t cookie =
246 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
247 XCB_PRESENT_EVENT_MASK_NO_EVENT);
248
249 xcb_discard_reply(draw->conn, cookie.sequence);
250 xcb_unregister_for_special_event(draw->conn, draw->special_event);
251 }
252
253 cnd_destroy(&draw->event_cnd);
254 mtx_destroy(&draw->mtx);
255 }
256
257 int
258 loader_dri3_drawable_init(xcb_connection_t *conn,
259 xcb_drawable_t drawable,
260 __DRIscreen *dri_screen,
261 bool is_different_gpu,
262 bool multiplanes_available,
263 const __DRIconfig *dri_config,
264 struct loader_dri3_extensions *ext,
265 const struct loader_dri3_vtable *vtable,
266 struct loader_dri3_drawable *draw)
267 {
268 xcb_get_geometry_cookie_t cookie;
269 xcb_get_geometry_reply_t *reply;
270 xcb_generic_error_t *error;
271 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
272 int swap_interval;
273
274 draw->conn = conn;
275 draw->ext = ext;
276 draw->vtable = vtable;
277 draw->drawable = drawable;
278 draw->dri_screen = dri_screen;
279 draw->is_different_gpu = is_different_gpu;
280 draw->multiplanes_available = multiplanes_available;
281
282 draw->have_back = 0;
283 draw->have_fake_front = 0;
284 draw->first_init = true;
285
286 draw->cur_blit_source = -1;
287 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
288 mtx_init(&draw->mtx, mtx_plain);
289 cnd_init(&draw->event_cnd);
290
291 if (draw->ext->config)
292 draw->ext->config->configQueryi(draw->dri_screen,
293 "vblank_mode", &vblank_mode);
294
295 switch (vblank_mode) {
296 case DRI_CONF_VBLANK_NEVER:
297 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
298 swap_interval = 0;
299 break;
300 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
301 case DRI_CONF_VBLANK_ALWAYS_SYNC:
302 default:
303 swap_interval = 1;
304 break;
305 }
306 draw->swap_interval = swap_interval;
307
308 dri3_update_num_back(draw);
309
310 /* Create a new drawable */
311 draw->dri_drawable =
312 draw->ext->image_driver->createNewDrawable(dri_screen,
313 dri_config,
314 draw);
315
316 if (!draw->dri_drawable)
317 return 1;
318
319 cookie = xcb_get_geometry(draw->conn, draw->drawable);
320 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
321 if (reply == NULL || error != NULL) {
322 draw->ext->core->destroyDrawable(draw->dri_drawable);
323 return 1;
324 }
325
326 draw->width = reply->width;
327 draw->height = reply->height;
328 draw->depth = reply->depth;
329 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
330 free(reply);
331
332 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
333 if (draw->ext->core->base.version >= 2) {
334 (void )draw->ext->core->getConfigAttrib(dri_config,
335 __DRI_ATTRIB_SWAP_METHOD,
336 &draw->swap_method);
337 }
338
339 /*
340 * Make sure server has the same swap interval we do for the new
341 * drawable.
342 */
343 loader_dri3_set_swap_interval(draw, swap_interval);
344
345 return 0;
346 }
347
348 /*
349 * Process one Present event
350 */
351 static void
352 dri3_handle_present_event(struct loader_dri3_drawable *draw,
353 xcb_present_generic_event_t *ge)
354 {
355 switch (ge->evtype) {
356 case XCB_PRESENT_CONFIGURE_NOTIFY: {
357 xcb_present_configure_notify_event_t *ce = (void *) ge;
358
359 draw->width = ce->width;
360 draw->height = ce->height;
361 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
362 draw->ext->flush->invalidate(draw->dri_drawable);
363 break;
364 }
365 case XCB_PRESENT_COMPLETE_NOTIFY: {
366 xcb_present_complete_notify_event_t *ce = (void *) ge;
367
368 /* Compute the processed SBC number from the received 32-bit serial number
369 * merged with the upper 32-bits of the sent 64-bit serial number while
370 * checking for wrap.
371 */
372 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
373 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
374 if (draw->recv_sbc > draw->send_sbc)
375 draw->recv_sbc -= 0x100000000;
376
377 /* When moving from flip to copy, we assume that we can allocate in
378 * a more optimal way if we don't need to cater for the display
379 * controller.
380 */
381 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_COPY &&
382 draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP) {
383 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
384 if (draw->buffers[b])
385 draw->buffers[b]->reallocate = true;
386 }
387 }
388
389 /* If the server tells us that our allocation is suboptimal, we
390 * reallocate once.
391 */
392 #ifdef HAVE_DRI3_MODIFIERS
393 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_SUBOPTIMAL_COPY &&
394 draw->last_present_mode != ce->mode) {
395 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
396 if (draw->buffers[b])
397 draw->buffers[b]->reallocate = true;
398 }
399 }
400 #endif
401 draw->last_present_mode = ce->mode;
402
403 if (draw->vtable->show_fps)
404 draw->vtable->show_fps(draw, ce->ust);
405
406 draw->ust = ce->ust;
407 draw->msc = ce->msc;
408 } else if (ce->serial == draw->eid) {
409 draw->notify_ust = ce->ust;
410 draw->notify_msc = ce->msc;
411 }
412 break;
413 }
414 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
415 xcb_present_idle_notify_event_t *ie = (void *) ge;
416 int b;
417
418 for (b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
419 struct loader_dri3_buffer *buf = draw->buffers[b];
420
421 if (buf && buf->pixmap == ie->pixmap)
422 buf->busy = 0;
423 }
424 break;
425 }
426 }
427 free(ge);
428 }
429
430 static bool
431 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
432 {
433 xcb_generic_event_t *ev;
434 xcb_present_generic_event_t *ge;
435
436 xcb_flush(draw->conn);
437
438 /* Only have one thread waiting for events at a time */
439 if (draw->has_event_waiter) {
440 cnd_wait(&draw->event_cnd, &draw->mtx);
441 /* Another thread has updated the protected info, so retest. */
442 return true;
443 } else {
444 draw->has_event_waiter = true;
445 /* Allow other threads access to the drawable while we're waiting. */
446 mtx_unlock(&draw->mtx);
447 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
448 mtx_lock(&draw->mtx);
449 draw->has_event_waiter = false;
450 cnd_broadcast(&draw->event_cnd);
451 }
452 if (!ev)
453 return false;
454 ge = (void *) ev;
455 dri3_handle_present_event(draw, ge);
456 return true;
457 }
458
459 /** loader_dri3_wait_for_msc
460 *
461 * Get the X server to send an event when the target msc/divisor/remainder is
462 * reached.
463 */
464 bool
465 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
466 int64_t target_msc,
467 int64_t divisor, int64_t remainder,
468 int64_t *ust, int64_t *msc, int64_t *sbc)
469 {
470 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
471 draw->drawable,
472 draw->eid,
473 target_msc,
474 divisor,
475 remainder);
476 xcb_generic_event_t *ev;
477 unsigned full_sequence;
478
479 mtx_lock(&draw->mtx);
480 xcb_flush(draw->conn);
481
482 /* Wait for the event */
483 do {
484 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
485 if (!ev) {
486 mtx_unlock(&draw->mtx);
487 return false;
488 }
489
490 full_sequence = ev->full_sequence;
491 dri3_handle_present_event(draw, (void *) ev);
492 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
493
494 *ust = draw->notify_ust;
495 *msc = draw->notify_msc;
496 *sbc = draw->recv_sbc;
497 mtx_unlock(&draw->mtx);
498
499 return true;
500 }
501
502 /** loader_dri3_wait_for_sbc
503 *
504 * Wait for the completed swap buffer count to reach the specified
505 * target. Presumably the application knows that this will be reached with
506 * outstanding complete events, or we're going to be here awhile.
507 */
508 int
509 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
510 int64_t target_sbc, int64_t *ust,
511 int64_t *msc, int64_t *sbc)
512 {
513 /* From the GLX_OML_sync_control spec:
514 *
515 * "If <target_sbc> = 0, the function will block until all previous
516 * swaps requested with glXSwapBuffersMscOML for that window have
517 * completed."
518 */
519 mtx_lock(&draw->mtx);
520 if (!target_sbc)
521 target_sbc = draw->send_sbc;
522
523 while (draw->recv_sbc < target_sbc) {
524 if (!dri3_wait_for_event_locked(draw)) {
525 mtx_unlock(&draw->mtx);
526 return 0;
527 }
528 }
529
530 *ust = draw->ust;
531 *msc = draw->msc;
532 *sbc = draw->recv_sbc;
533 mtx_unlock(&draw->mtx);
534 return 1;
535 }
536
537 /** loader_dri3_find_back
538 *
539 * Find an idle back buffer. If there isn't one, then
540 * wait for a present idle notify event from the X server
541 */
542 static int
543 dri3_find_back(struct loader_dri3_drawable *draw)
544 {
545 int b;
546 int num_to_consider;
547
548 mtx_lock(&draw->mtx);
549 /* Increase the likelyhood of reusing current buffer */
550 dri3_flush_present_events(draw);
551
552 /* Check whether we need to reuse the current back buffer as new back.
553 * In that case, wait until it's not busy anymore.
554 */
555 num_to_consider = draw->num_back;
556 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
557 num_to_consider = 1;
558 draw->cur_blit_source = -1;
559 }
560
561 for (;;) {
562 for (b = 0; b < num_to_consider; b++) {
563 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
564 struct loader_dri3_buffer *buffer = draw->buffers[id];
565
566 if (!buffer || !buffer->busy) {
567 draw->cur_back = id;
568 mtx_unlock(&draw->mtx);
569 return id;
570 }
571 }
572 if (!dri3_wait_for_event_locked(draw)) {
573 mtx_unlock(&draw->mtx);
574 return -1;
575 }
576 }
577 }
578
579 static xcb_gcontext_t
580 dri3_drawable_gc(struct loader_dri3_drawable *draw)
581 {
582 if (!draw->gc) {
583 uint32_t v = 0;
584 xcb_create_gc(draw->conn,
585 (draw->gc = xcb_generate_id(draw->conn)),
586 draw->drawable,
587 XCB_GC_GRAPHICS_EXPOSURES,
588 &v);
589 }
590 return draw->gc;
591 }
592
593
594 static struct loader_dri3_buffer *
595 dri3_back_buffer(struct loader_dri3_drawable *draw)
596 {
597 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
598 }
599
600 static struct loader_dri3_buffer *
601 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
602 {
603 return draw->buffers[LOADER_DRI3_FRONT_ID];
604 }
605
606 static void
607 dri3_copy_area(xcb_connection_t *c,
608 xcb_drawable_t src_drawable,
609 xcb_drawable_t dst_drawable,
610 xcb_gcontext_t gc,
611 int16_t src_x,
612 int16_t src_y,
613 int16_t dst_x,
614 int16_t dst_y,
615 uint16_t width,
616 uint16_t height)
617 {
618 xcb_void_cookie_t cookie;
619
620 cookie = xcb_copy_area_checked(c,
621 src_drawable,
622 dst_drawable,
623 gc,
624 src_x,
625 src_y,
626 dst_x,
627 dst_y,
628 width,
629 height);
630 xcb_discard_reply(c, cookie.sequence);
631 }
632
633 /**
634 * Asks the driver to flush any queued work necessary for serializing with the
635 * X command stream, and optionally the slightly more strict requirement of
636 * glFlush() equivalence (which would require flushing even if nothing had
637 * been drawn to a window system framebuffer, for example).
638 */
639 void
640 loader_dri3_flush(struct loader_dri3_drawable *draw,
641 unsigned flags,
642 enum __DRI2throttleReason throttle_reason)
643 {
644 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
645 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
646
647 if (dri_context) {
648 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
649 flags, throttle_reason);
650 }
651 }
652
653 void
654 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
655 int x, int y,
656 int width, int height,
657 bool flush)
658 {
659 struct loader_dri3_buffer *back;
660 unsigned flags = __DRI2_FLUSH_DRAWABLE;
661
662 /* Check we have the right attachments */
663 if (!draw->have_back || draw->is_pixmap)
664 return;
665
666 if (flush)
667 flags |= __DRI2_FLUSH_CONTEXT;
668 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
669
670 back = dri3_find_back_alloc(draw);
671 if (!back)
672 return;
673
674 y = draw->height - y - height;
675
676 if (draw->is_different_gpu) {
677 /* Update the linear buffer part of the back buffer
678 * for the dri3_copy_area operation
679 */
680 (void) loader_dri3_blit_image(draw,
681 back->linear_buffer,
682 back->image,
683 0, 0, back->width, back->height,
684 0, 0, __BLIT_FLAG_FLUSH);
685 }
686
687 loader_dri3_swapbuffer_barrier(draw);
688 dri3_fence_reset(draw->conn, back);
689 dri3_copy_area(draw->conn,
690 back->pixmap,
691 draw->drawable,
692 dri3_drawable_gc(draw),
693 x, y, x, y, width, height);
694 dri3_fence_trigger(draw->conn, back);
695 /* Refresh the fake front (if present) after we just damaged the real
696 * front.
697 */
698 if (draw->have_fake_front &&
699 !loader_dri3_blit_image(draw,
700 dri3_fake_front_buffer(draw)->image,
701 back->image,
702 x, y, width, height,
703 x, y, __BLIT_FLAG_FLUSH) &&
704 !draw->is_different_gpu) {
705 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
706 dri3_copy_area(draw->conn,
707 back->pixmap,
708 dri3_fake_front_buffer(draw)->pixmap,
709 dri3_drawable_gc(draw),
710 x, y, x, y, width, height);
711 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
712 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
713 }
714 dri3_fence_await(draw->conn, draw, back);
715 }
716
717 void
718 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
719 xcb_drawable_t dest,
720 xcb_drawable_t src)
721 {
722 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
723
724 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
725 dri3_copy_area(draw->conn,
726 src, dest,
727 dri3_drawable_gc(draw),
728 0, 0, 0, 0, draw->width, draw->height);
729 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
730 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
731 }
732
733 void
734 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
735 {
736 struct loader_dri3_buffer *front;
737
738 if (draw == NULL || !draw->have_fake_front)
739 return;
740
741 front = dri3_fake_front_buffer(draw);
742
743 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
744
745 /* In the psc->is_different_gpu case, the linear buffer has been updated,
746 * but not yet the tiled buffer.
747 * Copy back to the tiled buffer we use for rendering.
748 * Note that we don't need flushing.
749 */
750 if (draw->is_different_gpu)
751 (void) loader_dri3_blit_image(draw,
752 front->image,
753 front->linear_buffer,
754 0, 0, front->width, front->height,
755 0, 0, 0);
756 }
757
758 void
759 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
760 {
761 struct loader_dri3_buffer *front;
762
763 if (draw == NULL || !draw->have_fake_front)
764 return;
765
766 front = dri3_fake_front_buffer(draw);
767
768 /* In the psc->is_different_gpu case, we update the linear_buffer
769 * before updating the real front.
770 */
771 if (draw->is_different_gpu)
772 (void) loader_dri3_blit_image(draw,
773 front->linear_buffer,
774 front->image,
775 0, 0, front->width, front->height,
776 0, 0, __BLIT_FLAG_FLUSH);
777 loader_dri3_swapbuffer_barrier(draw);
778 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
779 }
780
781 /** dri3_flush_present_events
782 *
783 * Process any present events that have been received from the X server
784 */
785 static void
786 dri3_flush_present_events(struct loader_dri3_drawable *draw)
787 {
788 /* Check to see if any configuration changes have occurred
789 * since we were last invoked
790 */
791 if (draw->has_event_waiter)
792 return;
793
794 if (draw->special_event) {
795 xcb_generic_event_t *ev;
796
797 while ((ev = xcb_poll_for_special_event(draw->conn,
798 draw->special_event)) != NULL) {
799 xcb_present_generic_event_t *ge = (void *) ev;
800 dri3_handle_present_event(draw, ge);
801 }
802 }
803 }
804
805 /** loader_dri3_swap_buffers_msc
806 *
807 * Make the current back buffer visible using the present extension
808 */
809 int64_t
810 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
811 int64_t target_msc, int64_t divisor,
812 int64_t remainder, unsigned flush_flags,
813 bool force_copy)
814 {
815 struct loader_dri3_buffer *back;
816 int64_t ret = 0;
817 uint32_t options = XCB_PRESENT_OPTION_NONE;
818
819 draw->vtable->flush_drawable(draw, flush_flags);
820
821 back = dri3_find_back_alloc(draw);
822
823 mtx_lock(&draw->mtx);
824 if (draw->is_different_gpu && back) {
825 /* Update the linear buffer before presenting the pixmap */
826 (void) loader_dri3_blit_image(draw,
827 back->linear_buffer,
828 back->image,
829 0, 0, back->width, back->height,
830 0, 0, __BLIT_FLAG_FLUSH);
831 }
832
833 /* If we need to preload the new back buffer, remember the source.
834 * The force_copy parameter is used by EGL to attempt to preserve
835 * the back buffer across a call to this function.
836 */
837 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
838 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
839
840 /* Exchange the back and fake front. Even though the server knows about these
841 * buffers, it has no notion of back and fake front.
842 */
843 if (back && draw->have_fake_front) {
844 struct loader_dri3_buffer *tmp;
845
846 tmp = dri3_fake_front_buffer(draw);
847 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
848 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
849
850 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
851 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
852 }
853
854 dri3_flush_present_events(draw);
855
856 if (back && !draw->is_pixmap) {
857 dri3_fence_reset(draw->conn, back);
858
859 /* Compute when we want the frame shown by taking the last known
860 * successful MSC and adding in a swap interval for each outstanding swap
861 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
862 * semantic"
863 */
864 ++draw->send_sbc;
865 if (target_msc == 0 && divisor == 0 && remainder == 0)
866 target_msc = draw->msc + draw->swap_interval *
867 (draw->send_sbc - draw->recv_sbc);
868 else if (divisor == 0 && remainder > 0) {
869 /* From the GLX_OML_sync_control spec:
870 * "If <divisor> = 0, the swap will occur when MSC becomes
871 * greater than or equal to <target_msc>."
872 *
873 * Note that there's no mention of the remainder. The Present
874 * extension throws BadValue for remainder != 0 with divisor == 0, so
875 * just drop the passed in value.
876 */
877 remainder = 0;
878 }
879
880 /* From the GLX_EXT_swap_control spec
881 * and the EGL 1.4 spec (page 53):
882 *
883 * "If <interval> is set to a value of 0, buffer swaps are not
884 * synchronized to a video frame."
885 *
886 * Implementation note: It is possible to enable triple buffering
887 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
888 * the default.
889 */
890 if (draw->swap_interval == 0)
891 options |= XCB_PRESENT_OPTION_ASYNC;
892
893 /* If we need to populate the new back, but need to reuse the back
894 * buffer slot due to lack of local blit capabilities, make sure
895 * the server doesn't flip and we deadlock.
896 */
897 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
898 options |= XCB_PRESENT_OPTION_COPY;
899 #ifdef HAVE_DRI3_MODIFIERS
900 if (draw->multiplanes_available)
901 options |= XCB_PRESENT_OPTION_SUBOPTIMAL;
902 #endif
903 back->busy = 1;
904 back->last_swap = draw->send_sbc;
905 xcb_present_pixmap(draw->conn,
906 draw->drawable,
907 back->pixmap,
908 (uint32_t) draw->send_sbc,
909 0, /* valid */
910 0, /* update */
911 0, /* x_off */
912 0, /* y_off */
913 None, /* target_crtc */
914 None,
915 back->sync_fence,
916 options,
917 target_msc,
918 divisor,
919 remainder, 0, NULL);
920 ret = (int64_t) draw->send_sbc;
921
922 /* Schedule a server-side back-preserving blit if necessary.
923 * This happens iff all conditions below are satisfied:
924 * a) We have a fake front,
925 * b) We need to preserve the back buffer,
926 * c) We don't have local blit capabilities.
927 */
928 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
929 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
930 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
931 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
932
933 dri3_fence_reset(draw->conn, new_back);
934 dri3_copy_area(draw->conn, src->pixmap,
935 new_back->pixmap,
936 dri3_drawable_gc(draw),
937 0, 0, 0, 0, draw->width, draw->height);
938 dri3_fence_trigger(draw->conn, new_back);
939 new_back->last_swap = src->last_swap;
940 }
941
942 xcb_flush(draw->conn);
943 if (draw->stamp)
944 ++(*draw->stamp);
945 }
946 mtx_unlock(&draw->mtx);
947
948 draw->ext->flush->invalidate(draw->dri_drawable);
949
950 return ret;
951 }
952
953 int
954 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
955 {
956 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
957 int ret;
958
959 mtx_lock(&draw->mtx);
960 ret = (!back || back->last_swap == 0) ? 0 :
961 draw->send_sbc - back->last_swap + 1;
962 mtx_unlock(&draw->mtx);
963
964 return ret;
965 }
966
967 /** loader_dri3_open
968 *
969 * Wrapper around xcb_dri3_open
970 */
971 int
972 loader_dri3_open(xcb_connection_t *conn,
973 xcb_window_t root,
974 uint32_t provider)
975 {
976 xcb_dri3_open_cookie_t cookie;
977 xcb_dri3_open_reply_t *reply;
978 int fd;
979
980 cookie = xcb_dri3_open(conn,
981 root,
982 provider);
983
984 reply = xcb_dri3_open_reply(conn, cookie, NULL);
985 if (!reply)
986 return -1;
987
988 if (reply->nfd != 1) {
989 free(reply);
990 return -1;
991 }
992
993 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
994 free(reply);
995 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
996
997 return fd;
998 }
999
1000 static uint32_t
1001 dri3_cpp_for_format(uint32_t format) {
1002 switch (format) {
1003 case __DRI_IMAGE_FORMAT_R8:
1004 return 1;
1005 case __DRI_IMAGE_FORMAT_RGB565:
1006 case __DRI_IMAGE_FORMAT_GR88:
1007 return 2;
1008 case __DRI_IMAGE_FORMAT_XRGB8888:
1009 case __DRI_IMAGE_FORMAT_ARGB8888:
1010 case __DRI_IMAGE_FORMAT_ABGR8888:
1011 case __DRI_IMAGE_FORMAT_XBGR8888:
1012 case __DRI_IMAGE_FORMAT_XRGB2101010:
1013 case __DRI_IMAGE_FORMAT_ARGB2101010:
1014 case __DRI_IMAGE_FORMAT_XBGR2101010:
1015 case __DRI_IMAGE_FORMAT_ABGR2101010:
1016 case __DRI_IMAGE_FORMAT_SARGB8:
1017 case __DRI_IMAGE_FORMAT_SABGR8:
1018 return 4;
1019 case __DRI_IMAGE_FORMAT_NONE:
1020 default:
1021 return 0;
1022 }
1023 }
1024
1025 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1026 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1027 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1028 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1029 */
1030 static int
1031 image_format_to_fourcc(int format)
1032 {
1033
1034 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1035 switch (format) {
1036 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1037 case __DRI_IMAGE_FORMAT_SABGR8: return __DRI_IMAGE_FOURCC_SABGR8888;
1038 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1039 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1040 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1041 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1042 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1043 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1044 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1045 case __DRI_IMAGE_FORMAT_XBGR2101010: return __DRI_IMAGE_FOURCC_XBGR2101010;
1046 case __DRI_IMAGE_FORMAT_ABGR2101010: return __DRI_IMAGE_FOURCC_ABGR2101010;
1047 }
1048 return 0;
1049 }
1050
1051 #ifdef HAVE_DRI3_MODIFIERS
1052 static bool
1053 has_supported_modifier(struct loader_dri3_drawable *draw, unsigned int format,
1054 uint64_t *modifiers, uint32_t count)
1055 {
1056 uint64_t *supported_modifiers;
1057 int32_t supported_modifiers_count;
1058 bool found = false;
1059 int i, j;
1060
1061 if (!draw->ext->image->queryDmaBufModifiers(draw->dri_screen,
1062 format, 0, NULL, NULL,
1063 &supported_modifiers_count) ||
1064 supported_modifiers_count == 0)
1065 return false;
1066
1067 supported_modifiers = malloc(supported_modifiers_count * sizeof(uint64_t));
1068 if (!supported_modifiers)
1069 return false;
1070
1071 draw->ext->image->queryDmaBufModifiers(draw->dri_screen, format,
1072 supported_modifiers_count,
1073 supported_modifiers, NULL,
1074 &supported_modifiers_count);
1075
1076 for (i = 0; !found && i < supported_modifiers_count; i++) {
1077 for (j = 0; !found && j < count; j++) {
1078 if (supported_modifiers[i] == modifiers[j])
1079 found = true;
1080 }
1081 }
1082
1083 free(supported_modifiers);
1084 return found;
1085 }
1086 #endif
1087
1088 /** loader_dri3_alloc_render_buffer
1089 *
1090 * Use the driver createImage function to construct a __DRIimage, then
1091 * get a file descriptor for that and create an X pixmap from that
1092 *
1093 * Allocate an xshmfence for synchronization
1094 */
1095 static struct loader_dri3_buffer *
1096 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1097 int width, int height, int depth)
1098 {
1099 struct loader_dri3_buffer *buffer;
1100 __DRIimage *pixmap_buffer;
1101 xcb_pixmap_t pixmap;
1102 xcb_sync_fence_t sync_fence;
1103 struct xshmfence *shm_fence;
1104 int buffer_fds[4], fence_fd;
1105 int num_planes = 0;
1106 int i, mod;
1107 int ret;
1108
1109 /* Create an xshmfence object and
1110 * prepare to send that to the X server
1111 */
1112
1113 fence_fd = xshmfence_alloc_shm();
1114 if (fence_fd < 0)
1115 return NULL;
1116
1117 shm_fence = xshmfence_map_shm(fence_fd);
1118 if (shm_fence == NULL)
1119 goto no_shm_fence;
1120
1121 /* Allocate the image from the driver
1122 */
1123 buffer = calloc(1, sizeof *buffer);
1124 if (!buffer)
1125 goto no_buffer;
1126
1127 buffer->cpp = dri3_cpp_for_format(format);
1128 if (!buffer->cpp)
1129 goto no_image;
1130
1131 if (!draw->is_different_gpu) {
1132 #ifdef HAVE_DRI3_MODIFIERS
1133 if (draw->multiplanes_available &&
1134 draw->ext->image->base.version >= 15 &&
1135 draw->ext->image->queryDmaBufModifiers &&
1136 draw->ext->image->createImageWithModifiers) {
1137 xcb_dri3_get_supported_modifiers_cookie_t mod_cookie;
1138 xcb_dri3_get_supported_modifiers_reply_t *mod_reply;
1139 xcb_generic_error_t *error = NULL;
1140 uint64_t *modifiers = NULL;
1141 uint32_t count = 0;
1142
1143 mod_cookie = xcb_dri3_get_supported_modifiers(draw->conn,
1144 draw->drawable,
1145 depth, buffer->cpp * 8);
1146 mod_reply = xcb_dri3_get_supported_modifiers_reply(draw->conn,
1147 mod_cookie,
1148 &error);
1149 if (!mod_reply)
1150 goto no_image;
1151
1152 if (mod_reply->num_window_modifiers) {
1153 count = mod_reply->num_window_modifiers;
1154 modifiers = malloc(count * sizeof(uint64_t));
1155 if (!modifiers) {
1156 free(mod_reply);
1157 goto no_image;
1158 }
1159
1160 memcpy(modifiers,
1161 xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply),
1162 count * sizeof(uint64_t));
1163
1164 if (!has_supported_modifier(draw, image_format_to_fourcc(format),
1165 modifiers, count)) {
1166 free(modifiers);
1167 count = 0;
1168 modifiers = NULL;
1169 }
1170 }
1171
1172 if (mod_reply->num_screen_modifiers && modifiers == NULL) {
1173 count = mod_reply->num_screen_modifiers;
1174 modifiers = malloc(count * sizeof(uint64_t));
1175 if (!modifiers) {
1176 free(modifiers);
1177 free(mod_reply);
1178 goto no_image;
1179 }
1180
1181 memcpy(modifiers,
1182 xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply),
1183 count * sizeof(uint64_t));
1184 }
1185
1186 free(mod_reply);
1187
1188 buffer->image = draw->ext->image->createImageWithModifiers(draw->dri_screen,
1189 width, height,
1190 format,
1191 modifiers,
1192 count,
1193 buffer);
1194 free(modifiers);
1195 }
1196 #endif
1197 if (!buffer->image)
1198 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1199 width, height,
1200 format,
1201 __DRI_IMAGE_USE_SHARE |
1202 __DRI_IMAGE_USE_SCANOUT |
1203 __DRI_IMAGE_USE_BACKBUFFER,
1204 buffer);
1205
1206 pixmap_buffer = buffer->image;
1207
1208 if (!buffer->image)
1209 goto no_image;
1210 } else {
1211 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1212 width, height,
1213 format,
1214 0,
1215 buffer);
1216
1217 if (!buffer->image)
1218 goto no_image;
1219
1220 buffer->linear_buffer =
1221 draw->ext->image->createImage(draw->dri_screen,
1222 width, height, format,
1223 __DRI_IMAGE_USE_SHARE |
1224 __DRI_IMAGE_USE_LINEAR |
1225 __DRI_IMAGE_USE_BACKBUFFER,
1226 buffer);
1227 pixmap_buffer = buffer->linear_buffer;
1228
1229 if (!buffer->linear_buffer)
1230 goto no_linear_buffer;
1231 }
1232
1233 /* X want some information about the planes, so ask the image for it
1234 */
1235 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_NUM_PLANES,
1236 &num_planes))
1237 num_planes = 1;
1238
1239 for (i = 0; i < num_planes; i++) {
1240 __DRIimage *image = draw->ext->image->fromPlanar(pixmap_buffer, i, NULL);
1241
1242 if (!image) {
1243 assert(i == 0);
1244 image = pixmap_buffer;
1245 }
1246
1247 ret = draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_FD,
1248 &buffer_fds[i]);
1249 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_STRIDE,
1250 &buffer->strides[i]);
1251 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_OFFSET,
1252 &buffer->offsets[i]);
1253 if (image != pixmap_buffer)
1254 draw->ext->image->destroyImage(image);
1255
1256 if (!ret)
1257 goto no_buffer_attrib;
1258 }
1259
1260 ret = draw->ext->image->queryImage(pixmap_buffer,
1261 __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod);
1262 buffer->modifier = (uint64_t) mod << 32;
1263 ret &= draw->ext->image->queryImage(pixmap_buffer,
1264 __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod);
1265 buffer->modifier |= (uint64_t)(mod & 0xffffffff);
1266
1267 if (!ret)
1268 buffer->modifier = DRM_FORMAT_MOD_INVALID;
1269
1270 pixmap = xcb_generate_id(draw->conn);
1271 #ifdef HAVE_DRI3_MODIFIERS
1272 if (draw->multiplanes_available &&
1273 buffer->modifier != DRM_FORMAT_MOD_INVALID) {
1274 xcb_dri3_pixmap_from_buffers(draw->conn,
1275 pixmap,
1276 draw->drawable,
1277 num_planes,
1278 width, height,
1279 buffer->strides[0], buffer->offsets[0],
1280 buffer->strides[1], buffer->offsets[1],
1281 buffer->strides[2], buffer->offsets[2],
1282 buffer->strides[3], buffer->offsets[3],
1283 depth, buffer->cpp * 8,
1284 buffer->modifier,
1285 buffer_fds);
1286 } else
1287 #endif
1288 {
1289 xcb_dri3_pixmap_from_buffer(draw->conn,
1290 pixmap,
1291 draw->drawable,
1292 buffer->size,
1293 width, height, buffer->strides[0],
1294 depth, buffer->cpp * 8,
1295 buffer_fds[0]);
1296 }
1297
1298 xcb_dri3_fence_from_fd(draw->conn,
1299 pixmap,
1300 (sync_fence = xcb_generate_id(draw->conn)),
1301 false,
1302 fence_fd);
1303
1304 buffer->pixmap = pixmap;
1305 buffer->own_pixmap = true;
1306 buffer->sync_fence = sync_fence;
1307 buffer->shm_fence = shm_fence;
1308 buffer->width = width;
1309 buffer->height = height;
1310
1311 /* Mark the buffer as idle
1312 */
1313 dri3_fence_set(buffer);
1314
1315 return buffer;
1316
1317 no_buffer_attrib:
1318 do {
1319 close(buffer_fds[i]);
1320 } while (--i >= 0);
1321 draw->ext->image->destroyImage(pixmap_buffer);
1322 no_linear_buffer:
1323 if (draw->is_different_gpu)
1324 draw->ext->image->destroyImage(buffer->image);
1325 no_image:
1326 free(buffer);
1327 no_buffer:
1328 xshmfence_unmap_shm(shm_fence);
1329 no_shm_fence:
1330 close(fence_fd);
1331 return NULL;
1332 }
1333
1334 /** loader_dri3_update_drawable
1335 *
1336 * Called the first time we use the drawable and then
1337 * after we receive present configure notify events to
1338 * track the geometry of the drawable
1339 */
1340 static int
1341 dri3_update_drawable(__DRIdrawable *driDrawable,
1342 struct loader_dri3_drawable *draw)
1343 {
1344 mtx_lock(&draw->mtx);
1345 if (draw->first_init) {
1346 xcb_get_geometry_cookie_t geom_cookie;
1347 xcb_get_geometry_reply_t *geom_reply;
1348 xcb_void_cookie_t cookie;
1349 xcb_generic_error_t *error;
1350 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1351 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1352
1353 draw->first_init = false;
1354
1355 /* Try to select for input on the window.
1356 *
1357 * If the drawable is a window, this will get our events
1358 * delivered.
1359 *
1360 * Otherwise, we'll get a BadWindow error back from this request which
1361 * will let us know that the drawable is a pixmap instead.
1362 */
1363
1364 draw->eid = xcb_generate_id(draw->conn);
1365 cookie =
1366 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1367 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1368 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1369 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1370
1371 present_capabilities_cookie =
1372 xcb_present_query_capabilities(draw->conn, draw->drawable);
1373
1374 /* Create an XCB event queue to hold present events outside of the usual
1375 * application event queue
1376 */
1377 draw->special_event = xcb_register_for_special_xge(draw->conn,
1378 &xcb_present_id,
1379 draw->eid,
1380 draw->stamp);
1381 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1382
1383 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1384
1385 if (!geom_reply) {
1386 mtx_unlock(&draw->mtx);
1387 return false;
1388 }
1389
1390 draw->width = geom_reply->width;
1391 draw->height = geom_reply->height;
1392 draw->depth = geom_reply->depth;
1393 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1394
1395 free(geom_reply);
1396
1397 draw->is_pixmap = false;
1398
1399 /* Check to see if our select input call failed. If it failed with a
1400 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1401 * special event queue created above and mark the drawable as a pixmap
1402 */
1403
1404 error = xcb_request_check(draw->conn, cookie);
1405
1406 present_capabilities_reply =
1407 xcb_present_query_capabilities_reply(draw->conn,
1408 present_capabilities_cookie,
1409 NULL);
1410
1411 if (present_capabilities_reply) {
1412 draw->present_capabilities = present_capabilities_reply->capabilities;
1413 free(present_capabilities_reply);
1414 } else
1415 draw->present_capabilities = 0;
1416
1417 if (error) {
1418 if (error->error_code != BadWindow) {
1419 free(error);
1420 mtx_unlock(&draw->mtx);
1421 return false;
1422 }
1423 draw->is_pixmap = true;
1424 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1425 draw->special_event = NULL;
1426 }
1427 }
1428 dri3_flush_present_events(draw);
1429 mtx_unlock(&draw->mtx);
1430 return true;
1431 }
1432
1433 __DRIimage *
1434 loader_dri3_create_image(xcb_connection_t *c,
1435 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1436 unsigned int format,
1437 __DRIscreen *dri_screen,
1438 const __DRIimageExtension *image,
1439 void *loaderPrivate)
1440 {
1441 int *fds;
1442 __DRIimage *image_planar, *ret;
1443 int stride, offset;
1444
1445 /* Get an FD for the pixmap object
1446 */
1447 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1448
1449 stride = bp_reply->stride;
1450 offset = 0;
1451
1452 /* createImageFromFds creates a wrapper __DRIimage structure which
1453 * can deal with multiple planes for things like Yuv images. So, once
1454 * we've gotten the planar wrapper, pull the single plane out of it and
1455 * discard the wrapper.
1456 */
1457 image_planar = image->createImageFromFds(dri_screen,
1458 bp_reply->width,
1459 bp_reply->height,
1460 image_format_to_fourcc(format),
1461 fds, 1,
1462 &stride, &offset, loaderPrivate);
1463 close(fds[0]);
1464 if (!image_planar)
1465 return NULL;
1466
1467 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1468
1469 if (!ret)
1470 ret = image_planar;
1471 else
1472 image->destroyImage(image_planar);
1473
1474 return ret;
1475 }
1476
1477 #ifdef HAVE_DRI3_MODIFIERS
1478 __DRIimage *
1479 loader_dri3_create_image_from_buffers(xcb_connection_t *c,
1480 xcb_dri3_buffers_from_pixmap_reply_t *bp_reply,
1481 unsigned int format,
1482 __DRIscreen *dri_screen,
1483 const __DRIimageExtension *image,
1484 void *loaderPrivate)
1485 {
1486 __DRIimage *ret;
1487 int *fds;
1488 uint32_t *strides_in, *offsets_in;
1489 int strides[4], offsets[4];
1490 unsigned error;
1491 int i;
1492
1493 if (bp_reply->nfd > 4)
1494 return NULL;
1495
1496 fds = xcb_dri3_buffers_from_pixmap_reply_fds(c, bp_reply);
1497 strides_in = xcb_dri3_buffers_from_pixmap_strides(bp_reply);
1498 offsets_in = xcb_dri3_buffers_from_pixmap_offsets(bp_reply);
1499 for (i = 0; i < bp_reply->nfd; i++) {
1500 strides[i] = strides_in[i];
1501 offsets[i] = offsets_in[i];
1502 }
1503
1504 ret = image->createImageFromDmaBufs2(dri_screen,
1505 bp_reply->width,
1506 bp_reply->height,
1507 image_format_to_fourcc(format),
1508 bp_reply->modifier,
1509 fds, bp_reply->nfd,
1510 strides, offsets,
1511 0, 0, 0, 0, /* UNDEFINED */
1512 &error, loaderPrivate);
1513
1514 for (i = 0; i < bp_reply->nfd; i++)
1515 close(fds[i]);
1516
1517 return ret;
1518 }
1519 #endif
1520
1521 /** dri3_get_pixmap_buffer
1522 *
1523 * Get the DRM object for a pixmap from the X server and
1524 * wrap that with a __DRIimage structure using createImageFromFds
1525 */
1526 static struct loader_dri3_buffer *
1527 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1528 enum loader_dri3_buffer_type buffer_type,
1529 struct loader_dri3_drawable *draw)
1530 {
1531 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1532 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1533 xcb_drawable_t pixmap;
1534 xcb_sync_fence_t sync_fence;
1535 struct xshmfence *shm_fence;
1536 int width;
1537 int height;
1538 int fence_fd;
1539 __DRIscreen *cur_screen;
1540
1541 if (buffer)
1542 return buffer;
1543
1544 pixmap = draw->drawable;
1545
1546 buffer = calloc(1, sizeof *buffer);
1547 if (!buffer)
1548 goto no_buffer;
1549
1550 fence_fd = xshmfence_alloc_shm();
1551 if (fence_fd < 0)
1552 goto no_fence;
1553 shm_fence = xshmfence_map_shm(fence_fd);
1554 if (shm_fence == NULL) {
1555 close (fence_fd);
1556 goto no_fence;
1557 }
1558
1559 /* Get the currently-bound screen or revert to using the drawable's screen if
1560 * no contexts are currently bound. The latter case is at least necessary for
1561 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1562 */
1563 cur_screen = draw->vtable->get_dri_screen();
1564 if (!cur_screen) {
1565 cur_screen = draw->dri_screen;
1566 }
1567
1568 xcb_dri3_fence_from_fd(draw->conn,
1569 pixmap,
1570 (sync_fence = xcb_generate_id(draw->conn)),
1571 false,
1572 fence_fd);
1573 #ifdef HAVE_DRI3_MODIFIERS
1574 if (draw->multiplanes_available &&
1575 draw->ext->image->base.version >= 15 &&
1576 draw->ext->image->createImageFromDmaBufs2) {
1577 xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie;
1578 xcb_dri3_buffers_from_pixmap_reply_t *bps_reply;
1579
1580 bps_cookie = xcb_dri3_buffers_from_pixmap(draw->conn, pixmap);
1581 bps_reply = xcb_dri3_buffers_from_pixmap_reply(draw->conn, bps_cookie,
1582 NULL);
1583 if (!bps_reply)
1584 goto no_image;
1585 buffer->image =
1586 loader_dri3_create_image_from_buffers(draw->conn, bps_reply, format,
1587 cur_screen, draw->ext->image,
1588 buffer);
1589 width = bps_reply->width;
1590 height = bps_reply->height;
1591 free(bps_reply);
1592 } else
1593 #endif
1594 {
1595 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1596 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1597
1598 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1599 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1600 if (!bp_reply)
1601 goto no_image;
1602
1603 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1604 cur_screen, draw->ext->image,
1605 buffer);
1606 width = bp_reply->width;
1607 height = bp_reply->height;
1608 free(bp_reply);
1609 }
1610
1611 if (!buffer->image)
1612 goto no_image;
1613
1614 buffer->pixmap = pixmap;
1615 buffer->own_pixmap = false;
1616 buffer->width = width;
1617 buffer->height = height;
1618 buffer->shm_fence = shm_fence;
1619 buffer->sync_fence = sync_fence;
1620
1621 draw->buffers[buf_id] = buffer;
1622
1623 return buffer;
1624
1625 no_image:
1626 xcb_sync_destroy_fence(draw->conn, sync_fence);
1627 xshmfence_unmap_shm(shm_fence);
1628 no_fence:
1629 free(buffer);
1630 no_buffer:
1631 return NULL;
1632 }
1633
1634 /** dri3_get_buffer
1635 *
1636 * Find a front or back buffer, allocating new ones as necessary
1637 */
1638 static struct loader_dri3_buffer *
1639 dri3_get_buffer(__DRIdrawable *driDrawable,
1640 unsigned int format,
1641 enum loader_dri3_buffer_type buffer_type,
1642 struct loader_dri3_drawable *draw)
1643 {
1644 struct loader_dri3_buffer *buffer;
1645 int buf_id;
1646
1647 if (buffer_type == loader_dri3_buffer_back) {
1648 draw->back_format = format;
1649
1650 buf_id = dri3_find_back(draw);
1651
1652 if (buf_id < 0)
1653 return NULL;
1654 } else {
1655 buf_id = LOADER_DRI3_FRONT_ID;
1656 }
1657
1658 buffer = draw->buffers[buf_id];
1659
1660 /* Allocate a new buffer if there isn't an old one, if that
1661 * old one is the wrong size, or if it's suboptimal
1662 */
1663 if (!buffer || buffer->width != draw->width ||
1664 buffer->height != draw->height ||
1665 buffer->reallocate) {
1666 struct loader_dri3_buffer *new_buffer;
1667
1668 /* Allocate the new buffers
1669 */
1670 new_buffer = dri3_alloc_render_buffer(draw,
1671 format,
1672 draw->width,
1673 draw->height,
1674 draw->depth);
1675 if (!new_buffer)
1676 return NULL;
1677
1678 /* When resizing, copy the contents of the old buffer, waiting for that
1679 * copy to complete using our fences before proceeding
1680 */
1681 if ((buffer_type == loader_dri3_buffer_back ||
1682 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1683 && buffer) {
1684
1685 /* Fill the new buffer with data from an old buffer */
1686 dri3_fence_await(draw->conn, draw, buffer);
1687 if (!loader_dri3_blit_image(draw,
1688 new_buffer->image,
1689 buffer->image,
1690 0, 0, draw->width, draw->height,
1691 0, 0, 0) &&
1692 !buffer->linear_buffer) {
1693 dri3_fence_reset(draw->conn, new_buffer);
1694 dri3_copy_area(draw->conn,
1695 buffer->pixmap,
1696 new_buffer->pixmap,
1697 dri3_drawable_gc(draw),
1698 0, 0, 0, 0,
1699 draw->width, draw->height);
1700 dri3_fence_trigger(draw->conn, new_buffer);
1701 }
1702 dri3_free_render_buffer(draw, buffer);
1703 } else if (buffer_type == loader_dri3_buffer_front) {
1704 /* Fill the new fake front with data from a real front */
1705 loader_dri3_swapbuffer_barrier(draw);
1706 dri3_fence_reset(draw->conn, new_buffer);
1707 dri3_copy_area(draw->conn,
1708 draw->drawable,
1709 new_buffer->pixmap,
1710 dri3_drawable_gc(draw),
1711 0, 0, 0, 0,
1712 draw->width, draw->height);
1713 dri3_fence_trigger(draw->conn, new_buffer);
1714
1715 if (new_buffer->linear_buffer) {
1716 dri3_fence_await(draw->conn, draw, new_buffer);
1717 (void) loader_dri3_blit_image(draw,
1718 new_buffer->image,
1719 new_buffer->linear_buffer,
1720 0, 0, draw->width, draw->height,
1721 0, 0, 0);
1722 }
1723 }
1724 buffer = new_buffer;
1725 draw->buffers[buf_id] = buffer;
1726 }
1727 dri3_fence_await(draw->conn, draw, buffer);
1728
1729 /*
1730 * Do we need to preserve the content of a previous buffer?
1731 *
1732 * Note that this blit is needed only to avoid a wait for a buffer that
1733 * is currently in the flip chain or being scanned out from. That's really
1734 * a tradeoff. If we're ok with the wait we can reduce the number of back
1735 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1736 * but in the latter case we must disallow page-flipping.
1737 */
1738 if (buffer_type == loader_dri3_buffer_back &&
1739 draw->cur_blit_source != -1 &&
1740 draw->buffers[draw->cur_blit_source] &&
1741 buffer != draw->buffers[draw->cur_blit_source]) {
1742
1743 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1744
1745 /* Avoid flushing here. Will propably do good for tiling hardware. */
1746 (void) loader_dri3_blit_image(draw,
1747 buffer->image,
1748 source->image,
1749 0, 0, draw->width, draw->height,
1750 0, 0, 0);
1751 buffer->last_swap = source->last_swap;
1752 draw->cur_blit_source = -1;
1753 }
1754 /* Return the requested buffer */
1755 return buffer;
1756 }
1757
1758 /** dri3_free_buffers
1759 *
1760 * Free the front bufffer or all of the back buffers. Used
1761 * when the application changes which buffers it needs
1762 */
1763 static void
1764 dri3_free_buffers(__DRIdrawable *driDrawable,
1765 enum loader_dri3_buffer_type buffer_type,
1766 struct loader_dri3_drawable *draw)
1767 {
1768 struct loader_dri3_buffer *buffer;
1769 int first_id;
1770 int n_id;
1771 int buf_id;
1772
1773 switch (buffer_type) {
1774 case loader_dri3_buffer_back:
1775 first_id = LOADER_DRI3_BACK_ID(0);
1776 n_id = LOADER_DRI3_MAX_BACK;
1777 draw->cur_blit_source = -1;
1778 break;
1779 case loader_dri3_buffer_front:
1780 first_id = LOADER_DRI3_FRONT_ID;
1781 /* Don't free a fake front holding new backbuffer content. */
1782 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1783 }
1784
1785 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1786 buffer = draw->buffers[buf_id];
1787 if (buffer) {
1788 dri3_free_render_buffer(draw, buffer);
1789 draw->buffers[buf_id] = NULL;
1790 }
1791 }
1792 }
1793
1794 /** loader_dri3_get_buffers
1795 *
1796 * The published buffer allocation API.
1797 * Returns all of the necessary buffers, allocating
1798 * as needed.
1799 */
1800 int
1801 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1802 unsigned int format,
1803 uint32_t *stamp,
1804 void *loaderPrivate,
1805 uint32_t buffer_mask,
1806 struct __DRIimageList *buffers)
1807 {
1808 struct loader_dri3_drawable *draw = loaderPrivate;
1809 struct loader_dri3_buffer *front, *back;
1810 int buf_id;
1811
1812 buffers->image_mask = 0;
1813 buffers->front = NULL;
1814 buffers->back = NULL;
1815
1816 front = NULL;
1817 back = NULL;
1818
1819 if (!dri3_update_drawable(driDrawable, draw))
1820 return false;
1821
1822 dri3_update_num_back(draw);
1823
1824 /* Free no longer needed back buffers */
1825 for (buf_id = draw->num_back; buf_id < LOADER_DRI3_MAX_BACK; buf_id++) {
1826 if (draw->cur_blit_source != buf_id && draw->buffers[buf_id]) {
1827 dri3_free_render_buffer(draw, draw->buffers[buf_id]);
1828 draw->buffers[buf_id] = NULL;
1829 }
1830 }
1831
1832 /* pixmaps always have front buffers.
1833 * Exchange swaps also mandate fake front buffers.
1834 */
1835 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1836 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1837
1838 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1839 /* All pixmaps are owned by the server gpu.
1840 * When we use a different gpu, we can't use the pixmap
1841 * as buffer since it is potentially tiled a way
1842 * our device can't understand. In this case, use
1843 * a fake front buffer. Hopefully the pixmap
1844 * content will get synced with the fake front
1845 * buffer.
1846 */
1847 if (draw->is_pixmap && !draw->is_different_gpu)
1848 front = dri3_get_pixmap_buffer(driDrawable,
1849 format,
1850 loader_dri3_buffer_front,
1851 draw);
1852 else
1853 front = dri3_get_buffer(driDrawable,
1854 format,
1855 loader_dri3_buffer_front,
1856 draw);
1857
1858 if (!front)
1859 return false;
1860 } else {
1861 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1862 draw->have_fake_front = 0;
1863 }
1864
1865 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1866 back = dri3_get_buffer(driDrawable,
1867 format,
1868 loader_dri3_buffer_back,
1869 draw);
1870 if (!back)
1871 return false;
1872 draw->have_back = 1;
1873 } else {
1874 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1875 draw->have_back = 0;
1876 }
1877
1878 if (front) {
1879 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1880 buffers->front = front->image;
1881 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1882 }
1883
1884 if (back) {
1885 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1886 buffers->back = back->image;
1887 }
1888
1889 draw->stamp = stamp;
1890
1891 return true;
1892 }
1893
1894 /** loader_dri3_update_drawable_geometry
1895 *
1896 * Get the current drawable geometry.
1897 */
1898 void
1899 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1900 {
1901 xcb_get_geometry_cookie_t geom_cookie;
1902 xcb_get_geometry_reply_t *geom_reply;
1903
1904 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1905
1906 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1907
1908 if (geom_reply) {
1909 draw->width = geom_reply->width;
1910 draw->height = geom_reply->height;
1911 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1912 draw->ext->flush->invalidate(draw->dri_drawable);
1913
1914 free(geom_reply);
1915 }
1916 }
1917
1918
1919 /**
1920 * Make sure the server has flushed all pending swap buffers to hardware
1921 * for this drawable. Ideally we'd want to send an X protocol request to
1922 * have the server block our connection until the swaps are complete. That
1923 * would avoid the potential round-trip here.
1924 */
1925 void
1926 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1927 {
1928 int64_t ust, msc, sbc;
1929
1930 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1931 }
1932
1933 /**
1934 * Perform any cleanup associated with a close screen operation.
1935 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1936 *
1937 * This function destroys the screen's cached swap context if any.
1938 */
1939 void
1940 loader_dri3_close_screen(__DRIscreen *dri_screen)
1941 {
1942 mtx_lock(&blit_context.mtx);
1943 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1944 blit_context.core->destroyContext(blit_context.ctx);
1945 blit_context.ctx = NULL;
1946 }
1947 mtx_unlock(&blit_context.mtx);
1948 }
1949
1950 /**
1951 * Find a backbuffer slot - potentially allocating a back buffer
1952 *
1953 * \param draw[in,out] Pointer to the drawable for which to find back.
1954 * \return Pointer to a new back buffer or NULL if allocation failed or was
1955 * not mandated.
1956 *
1957 * Find a potentially new back buffer, and if it's not been allocated yet and
1958 * in addition needs initializing, then try to allocate and initialize it.
1959 */
1960 #include <stdio.h>
1961 static struct loader_dri3_buffer *
1962 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1963 {
1964 struct loader_dri3_buffer *back;
1965 int id;
1966
1967 id = dri3_find_back(draw);
1968 if (id < 0)
1969 return NULL;
1970
1971 back = draw->buffers[id];
1972 /* Allocate a new back if we haven't got one */
1973 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
1974 dri3_update_drawable(draw->dri_drawable, draw))
1975 back = dri3_alloc_render_buffer(draw, draw->back_format,
1976 draw->width, draw->height, draw->depth);
1977
1978 if (!back)
1979 return NULL;
1980
1981 draw->buffers[id] = back;
1982
1983 /* If necessary, prefill the back with data according to swap_method mode. */
1984 if (draw->cur_blit_source != -1 &&
1985 draw->buffers[draw->cur_blit_source] &&
1986 back != draw->buffers[draw->cur_blit_source]) {
1987 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1988
1989 dri3_fence_await(draw->conn, draw, source);
1990 dri3_fence_await(draw->conn, draw, back);
1991 (void) loader_dri3_blit_image(draw,
1992 back->image,
1993 source->image,
1994 0, 0, draw->width, draw->height,
1995 0, 0, 0);
1996 back->last_swap = source->last_swap;
1997 draw->cur_blit_source = -1;
1998 }
1999
2000 return back;
2001 }