7cd6b1e8ab66d7ed8db5994bd4958ad3a656af41
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27 #include <string.h>
28
29 #include <X11/xshmfence.h>
30 #include <xcb/xcb.h>
31 #include <xcb/dri3.h>
32 #include <xcb/present.h>
33
34 #include <X11/Xlib-xcb.h>
35
36 #include "loader_dri3_helper.h"
37 #include "util/macros.h"
38 #include "drm_fourcc.h"
39
40 /* From xmlpool/options.h, user exposed so should be stable */
41 #define DRI_CONF_VBLANK_NEVER 0
42 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
43 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
44 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
45
46 /**
47 * A cached blit context.
48 */
49 struct loader_dri3_blit_context {
50 mtx_t mtx;
51 __DRIcontext *ctx;
52 __DRIscreen *cur_screen;
53 const __DRIcoreExtension *core;
54 };
55
56 /* For simplicity we maintain the cache only for a single screen at a time */
57 static struct loader_dri3_blit_context blit_context = {
58 _MTX_INITIALIZER_NP, NULL
59 };
60
61 static void
62 dri3_flush_present_events(struct loader_dri3_drawable *draw);
63
64 static struct loader_dri3_buffer *
65 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
66
67 static xcb_screen_t *
68 get_screen_for_root(xcb_connection_t *conn, xcb_window_t root)
69 {
70 xcb_screen_iterator_t screen_iter =
71 xcb_setup_roots_iterator(xcb_get_setup(conn));
72
73 for (; screen_iter.rem; xcb_screen_next (&screen_iter)) {
74 if (screen_iter.data->root == root)
75 return screen_iter.data;
76 }
77
78 return NULL;
79 }
80
81 static xcb_visualtype_t *
82 get_xcb_visualtype_for_depth(struct loader_dri3_drawable *draw, int depth)
83 {
84 xcb_visualtype_iterator_t visual_iter;
85 xcb_screen_t *screen = draw->screen;
86 xcb_depth_iterator_t depth_iter;
87
88 if (!screen)
89 return NULL;
90
91 depth_iter = xcb_screen_allowed_depths_iterator(screen);
92 for (; depth_iter.rem; xcb_depth_next(&depth_iter)) {
93 if (depth_iter.data->depth != depth)
94 continue;
95
96 visual_iter = xcb_depth_visuals_iterator(depth_iter.data);
97 if (visual_iter.rem)
98 return visual_iter.data;
99 }
100
101 return NULL;
102 }
103
104 /* Get red channel mask for given drawable at given depth. */
105 static unsigned int
106 dri3_get_red_mask_for_depth(struct loader_dri3_drawable *draw, int depth)
107 {
108 xcb_visualtype_t *visual = get_xcb_visualtype_for_depth(draw, depth);
109
110 if (visual)
111 return visual->red_mask;
112
113 return 0;
114 }
115
116 /**
117 * Do we have blit functionality in the image blit extension?
118 *
119 * \param draw[in] The drawable intended to blit from / to.
120 * \return true if we have blit functionality. false otherwise.
121 */
122 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
123 {
124 return draw->ext->image->base.version >= 9 &&
125 draw->ext->image->blitImage != NULL;
126 }
127
128 /**
129 * Get and lock (for use with the current thread) a dri context associated
130 * with the drawable's dri screen. The context is intended to be used with
131 * the dri image extension's blitImage method.
132 *
133 * \param draw[in] Pointer to the drawable whose dri screen we want a
134 * dri context for.
135 * \return A dri context or NULL if context creation failed.
136 *
137 * When the caller is done with the context (even if the context returned was
138 * NULL), the caller must call loader_dri3_blit_context_put.
139 */
140 static __DRIcontext *
141 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
142 {
143 mtx_lock(&blit_context.mtx);
144
145 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
146 blit_context.core->destroyContext(blit_context.ctx);
147 blit_context.ctx = NULL;
148 }
149
150 if (!blit_context.ctx) {
151 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
152 NULL, NULL, NULL);
153 blit_context.cur_screen = draw->dri_screen;
154 blit_context.core = draw->ext->core;
155 }
156
157 return blit_context.ctx;
158 }
159
160 /**
161 * Release (for use with other threads) a dri context previously obtained using
162 * loader_dri3_blit_context_get.
163 */
164 static void
165 loader_dri3_blit_context_put(void)
166 {
167 mtx_unlock(&blit_context.mtx);
168 }
169
170 /**
171 * Blit (parts of) the contents of a DRI image to another dri image
172 *
173 * \param draw[in] The drawable which owns the images.
174 * \param dst[in] The destination image.
175 * \param src[in] The source image.
176 * \param dstx0[in] Start destination coordinate.
177 * \param dsty0[in] Start destination coordinate.
178 * \param width[in] Blit width.
179 * \param height[in] Blit height.
180 * \param srcx0[in] Start source coordinate.
181 * \param srcy0[in] Start source coordinate.
182 * \param flush_flag[in] Image blit flush flag.
183 * \return true iff successful.
184 */
185 static bool
186 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
187 __DRIimage *dst, __DRIimage *src,
188 int dstx0, int dsty0, int width, int height,
189 int srcx0, int srcy0, int flush_flag)
190 {
191 __DRIcontext *dri_context;
192 bool use_blit_context = false;
193
194 if (!loader_dri3_have_image_blit(draw))
195 return false;
196
197 dri_context = draw->vtable->get_dri_context(draw);
198
199 if (!dri_context || !draw->vtable->in_current_context(draw)) {
200 dri_context = loader_dri3_blit_context_get(draw);
201 use_blit_context = true;
202 flush_flag |= __BLIT_FLAG_FLUSH;
203 }
204
205 if (dri_context)
206 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
207 width, height, srcx0, srcy0,
208 width, height, flush_flag);
209
210 if (use_blit_context)
211 loader_dri3_blit_context_put();
212
213 return dri_context != NULL;
214 }
215
216 static inline void
217 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
218 {
219 xshmfence_reset(buffer->shm_fence);
220 }
221
222 static inline void
223 dri3_fence_set(struct loader_dri3_buffer *buffer)
224 {
225 xshmfence_trigger(buffer->shm_fence);
226 }
227
228 static inline void
229 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
230 {
231 xcb_sync_trigger_fence(c, buffer->sync_fence);
232 }
233
234 static inline void
235 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
236 struct loader_dri3_buffer *buffer)
237 {
238 xcb_flush(c);
239 xshmfence_await(buffer->shm_fence);
240 if (draw) {
241 mtx_lock(&draw->mtx);
242 dri3_flush_present_events(draw);
243 mtx_unlock(&draw->mtx);
244 }
245 }
246
247 static void
248 dri3_update_num_back(struct loader_dri3_drawable *draw)
249 {
250 if (draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP)
251 draw->num_back = 3;
252 else
253 draw->num_back = 2;
254 }
255
256 void
257 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
258 {
259 draw->swap_interval = interval;
260 }
261
262 /** dri3_free_render_buffer
263 *
264 * Free everything associated with one render buffer including pixmap, fence
265 * stuff and the driver image
266 */
267 static void
268 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
269 struct loader_dri3_buffer *buffer)
270 {
271 if (buffer->own_pixmap)
272 xcb_free_pixmap(draw->conn, buffer->pixmap);
273 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
274 xshmfence_unmap_shm(buffer->shm_fence);
275 draw->ext->image->destroyImage(buffer->image);
276 if (buffer->linear_buffer)
277 draw->ext->image->destroyImage(buffer->linear_buffer);
278 free(buffer);
279 }
280
281 void
282 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
283 {
284 int i;
285
286 draw->ext->core->destroyDrawable(draw->dri_drawable);
287
288 for (i = 0; i < ARRAY_SIZE(draw->buffers); i++) {
289 if (draw->buffers[i])
290 dri3_free_render_buffer(draw, draw->buffers[i]);
291 }
292
293 if (draw->special_event) {
294 xcb_void_cookie_t cookie =
295 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
296 XCB_PRESENT_EVENT_MASK_NO_EVENT);
297
298 xcb_discard_reply(draw->conn, cookie.sequence);
299 xcb_unregister_for_special_event(draw->conn, draw->special_event);
300 }
301
302 cnd_destroy(&draw->event_cnd);
303 mtx_destroy(&draw->mtx);
304 }
305
306 int
307 loader_dri3_drawable_init(xcb_connection_t *conn,
308 xcb_drawable_t drawable,
309 __DRIscreen *dri_screen,
310 bool is_different_gpu,
311 bool multiplanes_available,
312 const __DRIconfig *dri_config,
313 struct loader_dri3_extensions *ext,
314 const struct loader_dri3_vtable *vtable,
315 struct loader_dri3_drawable *draw)
316 {
317 xcb_get_geometry_cookie_t cookie;
318 xcb_get_geometry_reply_t *reply;
319 xcb_generic_error_t *error;
320 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
321 int swap_interval;
322
323 draw->conn = conn;
324 draw->ext = ext;
325 draw->vtable = vtable;
326 draw->drawable = drawable;
327 draw->dri_screen = dri_screen;
328 draw->is_different_gpu = is_different_gpu;
329 draw->multiplanes_available = multiplanes_available;
330
331 draw->have_back = 0;
332 draw->have_fake_front = 0;
333 draw->first_init = true;
334
335 draw->cur_blit_source = -1;
336 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
337 mtx_init(&draw->mtx, mtx_plain);
338 cnd_init(&draw->event_cnd);
339
340 if (draw->ext->config)
341 draw->ext->config->configQueryi(draw->dri_screen,
342 "vblank_mode", &vblank_mode);
343
344 switch (vblank_mode) {
345 case DRI_CONF_VBLANK_NEVER:
346 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
347 swap_interval = 0;
348 break;
349 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
350 case DRI_CONF_VBLANK_ALWAYS_SYNC:
351 default:
352 swap_interval = 1;
353 break;
354 }
355 draw->swap_interval = swap_interval;
356
357 dri3_update_num_back(draw);
358
359 /* Create a new drawable */
360 draw->dri_drawable =
361 draw->ext->image_driver->createNewDrawable(dri_screen,
362 dri_config,
363 draw);
364
365 if (!draw->dri_drawable)
366 return 1;
367
368 cookie = xcb_get_geometry(draw->conn, draw->drawable);
369 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
370 if (reply == NULL || error != NULL) {
371 draw->ext->core->destroyDrawable(draw->dri_drawable);
372 return 1;
373 }
374
375 draw->screen = get_screen_for_root(draw->conn, reply->root);
376 draw->width = reply->width;
377 draw->height = reply->height;
378 draw->depth = reply->depth;
379 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
380 free(reply);
381
382 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
383 if (draw->ext->core->base.version >= 2) {
384 (void )draw->ext->core->getConfigAttrib(dri_config,
385 __DRI_ATTRIB_SWAP_METHOD,
386 &draw->swap_method);
387 }
388
389 /*
390 * Make sure server has the same swap interval we do for the new
391 * drawable.
392 */
393 loader_dri3_set_swap_interval(draw, swap_interval);
394
395 return 0;
396 }
397
398 /*
399 * Process one Present event
400 */
401 static void
402 dri3_handle_present_event(struct loader_dri3_drawable *draw,
403 xcb_present_generic_event_t *ge)
404 {
405 switch (ge->evtype) {
406 case XCB_PRESENT_CONFIGURE_NOTIFY: {
407 xcb_present_configure_notify_event_t *ce = (void *) ge;
408
409 draw->width = ce->width;
410 draw->height = ce->height;
411 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
412 draw->ext->flush->invalidate(draw->dri_drawable);
413 break;
414 }
415 case XCB_PRESENT_COMPLETE_NOTIFY: {
416 xcb_present_complete_notify_event_t *ce = (void *) ge;
417
418 /* Compute the processed SBC number from the received 32-bit serial number
419 * merged with the upper 32-bits of the sent 64-bit serial number while
420 * checking for wrap.
421 */
422 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
423 uint64_t recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
424
425 /* Only assume wraparound if that results in exactly the previous
426 * SBC + 1, otherwise ignore received SBC > sent SBC (those are
427 * probably from a previous loader_dri3_drawable instance) to avoid
428 * calculating bogus target MSC values in loader_dri3_swap_buffers_msc
429 */
430 if (recv_sbc <= draw->send_sbc)
431 draw->recv_sbc = recv_sbc;
432 else if (recv_sbc == (draw->recv_sbc + 0x100000001ULL))
433 draw->recv_sbc = recv_sbc - 0x100000000ULL;
434
435 /* When moving from flip to copy, we assume that we can allocate in
436 * a more optimal way if we don't need to cater for the display
437 * controller.
438 */
439 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_COPY &&
440 draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP) {
441 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
442 if (draw->buffers[b])
443 draw->buffers[b]->reallocate = true;
444 }
445 }
446
447 /* If the server tells us that our allocation is suboptimal, we
448 * reallocate once.
449 */
450 #ifdef HAVE_DRI3_MODIFIERS
451 if (ce->mode == XCB_PRESENT_COMPLETE_MODE_SUBOPTIMAL_COPY &&
452 draw->last_present_mode != ce->mode) {
453 for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
454 if (draw->buffers[b])
455 draw->buffers[b]->reallocate = true;
456 }
457 }
458 #endif
459 draw->last_present_mode = ce->mode;
460
461 if (draw->vtable->show_fps)
462 draw->vtable->show_fps(draw, ce->ust);
463
464 draw->ust = ce->ust;
465 draw->msc = ce->msc;
466 } else if (ce->serial == draw->eid) {
467 draw->notify_ust = ce->ust;
468 draw->notify_msc = ce->msc;
469 }
470 break;
471 }
472 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
473 xcb_present_idle_notify_event_t *ie = (void *) ge;
474 int b;
475
476 for (b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
477 struct loader_dri3_buffer *buf = draw->buffers[b];
478
479 if (buf && buf->pixmap == ie->pixmap)
480 buf->busy = 0;
481 }
482 break;
483 }
484 }
485 free(ge);
486 }
487
488 static bool
489 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
490 {
491 xcb_generic_event_t *ev;
492 xcb_present_generic_event_t *ge;
493
494 xcb_flush(draw->conn);
495
496 /* Only have one thread waiting for events at a time */
497 if (draw->has_event_waiter) {
498 cnd_wait(&draw->event_cnd, &draw->mtx);
499 /* Another thread has updated the protected info, so retest. */
500 return true;
501 } else {
502 draw->has_event_waiter = true;
503 /* Allow other threads access to the drawable while we're waiting. */
504 mtx_unlock(&draw->mtx);
505 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
506 mtx_lock(&draw->mtx);
507 draw->has_event_waiter = false;
508 cnd_broadcast(&draw->event_cnd);
509 }
510 if (!ev)
511 return false;
512 ge = (void *) ev;
513 dri3_handle_present_event(draw, ge);
514 return true;
515 }
516
517 /** loader_dri3_wait_for_msc
518 *
519 * Get the X server to send an event when the target msc/divisor/remainder is
520 * reached.
521 */
522 bool
523 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
524 int64_t target_msc,
525 int64_t divisor, int64_t remainder,
526 int64_t *ust, int64_t *msc, int64_t *sbc)
527 {
528 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
529 draw->drawable,
530 draw->eid,
531 target_msc,
532 divisor,
533 remainder);
534 xcb_generic_event_t *ev;
535 unsigned full_sequence;
536
537 mtx_lock(&draw->mtx);
538 xcb_flush(draw->conn);
539
540 /* Wait for the event */
541 do {
542 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
543 if (!ev) {
544 mtx_unlock(&draw->mtx);
545 return false;
546 }
547
548 full_sequence = ev->full_sequence;
549 dri3_handle_present_event(draw, (void *) ev);
550 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
551
552 *ust = draw->notify_ust;
553 *msc = draw->notify_msc;
554 *sbc = draw->recv_sbc;
555 mtx_unlock(&draw->mtx);
556
557 return true;
558 }
559
560 /** loader_dri3_wait_for_sbc
561 *
562 * Wait for the completed swap buffer count to reach the specified
563 * target. Presumably the application knows that this will be reached with
564 * outstanding complete events, or we're going to be here awhile.
565 */
566 int
567 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
568 int64_t target_sbc, int64_t *ust,
569 int64_t *msc, int64_t *sbc)
570 {
571 /* From the GLX_OML_sync_control spec:
572 *
573 * "If <target_sbc> = 0, the function will block until all previous
574 * swaps requested with glXSwapBuffersMscOML for that window have
575 * completed."
576 */
577 mtx_lock(&draw->mtx);
578 if (!target_sbc)
579 target_sbc = draw->send_sbc;
580
581 while (draw->recv_sbc < target_sbc) {
582 if (!dri3_wait_for_event_locked(draw)) {
583 mtx_unlock(&draw->mtx);
584 return 0;
585 }
586 }
587
588 *ust = draw->ust;
589 *msc = draw->msc;
590 *sbc = draw->recv_sbc;
591 mtx_unlock(&draw->mtx);
592 return 1;
593 }
594
595 /** loader_dri3_find_back
596 *
597 * Find an idle back buffer. If there isn't one, then
598 * wait for a present idle notify event from the X server
599 */
600 static int
601 dri3_find_back(struct loader_dri3_drawable *draw)
602 {
603 int b;
604 int num_to_consider;
605
606 mtx_lock(&draw->mtx);
607 /* Increase the likelyhood of reusing current buffer */
608 dri3_flush_present_events(draw);
609
610 /* Check whether we need to reuse the current back buffer as new back.
611 * In that case, wait until it's not busy anymore.
612 */
613 num_to_consider = draw->num_back;
614 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
615 num_to_consider = 1;
616 draw->cur_blit_source = -1;
617 }
618
619 for (;;) {
620 for (b = 0; b < num_to_consider; b++) {
621 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
622 struct loader_dri3_buffer *buffer = draw->buffers[id];
623
624 if (!buffer || !buffer->busy) {
625 draw->cur_back = id;
626 mtx_unlock(&draw->mtx);
627 return id;
628 }
629 }
630 if (!dri3_wait_for_event_locked(draw)) {
631 mtx_unlock(&draw->mtx);
632 return -1;
633 }
634 }
635 }
636
637 static xcb_gcontext_t
638 dri3_drawable_gc(struct loader_dri3_drawable *draw)
639 {
640 if (!draw->gc) {
641 uint32_t v = 0;
642 xcb_create_gc(draw->conn,
643 (draw->gc = xcb_generate_id(draw->conn)),
644 draw->drawable,
645 XCB_GC_GRAPHICS_EXPOSURES,
646 &v);
647 }
648 return draw->gc;
649 }
650
651
652 static struct loader_dri3_buffer *
653 dri3_back_buffer(struct loader_dri3_drawable *draw)
654 {
655 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
656 }
657
658 static struct loader_dri3_buffer *
659 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
660 {
661 return draw->buffers[LOADER_DRI3_FRONT_ID];
662 }
663
664 static void
665 dri3_copy_area(xcb_connection_t *c,
666 xcb_drawable_t src_drawable,
667 xcb_drawable_t dst_drawable,
668 xcb_gcontext_t gc,
669 int16_t src_x,
670 int16_t src_y,
671 int16_t dst_x,
672 int16_t dst_y,
673 uint16_t width,
674 uint16_t height)
675 {
676 xcb_void_cookie_t cookie;
677
678 cookie = xcb_copy_area_checked(c,
679 src_drawable,
680 dst_drawable,
681 gc,
682 src_x,
683 src_y,
684 dst_x,
685 dst_y,
686 width,
687 height);
688 xcb_discard_reply(c, cookie.sequence);
689 }
690
691 /**
692 * Asks the driver to flush any queued work necessary for serializing with the
693 * X command stream, and optionally the slightly more strict requirement of
694 * glFlush() equivalence (which would require flushing even if nothing had
695 * been drawn to a window system framebuffer, for example).
696 */
697 void
698 loader_dri3_flush(struct loader_dri3_drawable *draw,
699 unsigned flags,
700 enum __DRI2throttleReason throttle_reason)
701 {
702 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
703 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
704
705 if (dri_context) {
706 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
707 flags, throttle_reason);
708 }
709 }
710
711 void
712 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
713 int x, int y,
714 int width, int height,
715 bool flush)
716 {
717 struct loader_dri3_buffer *back;
718 unsigned flags = __DRI2_FLUSH_DRAWABLE;
719
720 /* Check we have the right attachments */
721 if (!draw->have_back || draw->is_pixmap)
722 return;
723
724 if (flush)
725 flags |= __DRI2_FLUSH_CONTEXT;
726 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
727
728 back = dri3_find_back_alloc(draw);
729 if (!back)
730 return;
731
732 y = draw->height - y - height;
733
734 if (draw->is_different_gpu) {
735 /* Update the linear buffer part of the back buffer
736 * for the dri3_copy_area operation
737 */
738 (void) loader_dri3_blit_image(draw,
739 back->linear_buffer,
740 back->image,
741 0, 0, back->width, back->height,
742 0, 0, __BLIT_FLAG_FLUSH);
743 }
744
745 loader_dri3_swapbuffer_barrier(draw);
746 dri3_fence_reset(draw->conn, back);
747 dri3_copy_area(draw->conn,
748 back->pixmap,
749 draw->drawable,
750 dri3_drawable_gc(draw),
751 x, y, x, y, width, height);
752 dri3_fence_trigger(draw->conn, back);
753 /* Refresh the fake front (if present) after we just damaged the real
754 * front.
755 */
756 if (draw->have_fake_front &&
757 !loader_dri3_blit_image(draw,
758 dri3_fake_front_buffer(draw)->image,
759 back->image,
760 x, y, width, height,
761 x, y, __BLIT_FLAG_FLUSH) &&
762 !draw->is_different_gpu) {
763 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
764 dri3_copy_area(draw->conn,
765 back->pixmap,
766 dri3_fake_front_buffer(draw)->pixmap,
767 dri3_drawable_gc(draw),
768 x, y, x, y, width, height);
769 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
770 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
771 }
772 dri3_fence_await(draw->conn, draw, back);
773 }
774
775 void
776 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
777 xcb_drawable_t dest,
778 xcb_drawable_t src)
779 {
780 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
781
782 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
783 dri3_copy_area(draw->conn,
784 src, dest,
785 dri3_drawable_gc(draw),
786 0, 0, 0, 0, draw->width, draw->height);
787 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
788 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
789 }
790
791 void
792 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
793 {
794 struct loader_dri3_buffer *front;
795
796 if (draw == NULL || !draw->have_fake_front)
797 return;
798
799 front = dri3_fake_front_buffer(draw);
800
801 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
802
803 /* In the psc->is_different_gpu case, the linear buffer has been updated,
804 * but not yet the tiled buffer.
805 * Copy back to the tiled buffer we use for rendering.
806 * Note that we don't need flushing.
807 */
808 if (draw->is_different_gpu)
809 (void) loader_dri3_blit_image(draw,
810 front->image,
811 front->linear_buffer,
812 0, 0, front->width, front->height,
813 0, 0, 0);
814 }
815
816 void
817 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
818 {
819 struct loader_dri3_buffer *front;
820
821 if (draw == NULL || !draw->have_fake_front)
822 return;
823
824 front = dri3_fake_front_buffer(draw);
825
826 /* In the psc->is_different_gpu case, we update the linear_buffer
827 * before updating the real front.
828 */
829 if (draw->is_different_gpu)
830 (void) loader_dri3_blit_image(draw,
831 front->linear_buffer,
832 front->image,
833 0, 0, front->width, front->height,
834 0, 0, __BLIT_FLAG_FLUSH);
835 loader_dri3_swapbuffer_barrier(draw);
836 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
837 }
838
839 /** dri3_flush_present_events
840 *
841 * Process any present events that have been received from the X server
842 */
843 static void
844 dri3_flush_present_events(struct loader_dri3_drawable *draw)
845 {
846 /* Check to see if any configuration changes have occurred
847 * since we were last invoked
848 */
849 if (draw->has_event_waiter)
850 return;
851
852 if (draw->special_event) {
853 xcb_generic_event_t *ev;
854
855 while ((ev = xcb_poll_for_special_event(draw->conn,
856 draw->special_event)) != NULL) {
857 xcb_present_generic_event_t *ge = (void *) ev;
858 dri3_handle_present_event(draw, ge);
859 }
860 }
861 }
862
863 /** loader_dri3_swap_buffers_msc
864 *
865 * Make the current back buffer visible using the present extension
866 */
867 int64_t
868 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
869 int64_t target_msc, int64_t divisor,
870 int64_t remainder, unsigned flush_flags,
871 bool force_copy)
872 {
873 struct loader_dri3_buffer *back;
874 int64_t ret = 0;
875 uint32_t options = XCB_PRESENT_OPTION_NONE;
876
877 draw->vtable->flush_drawable(draw, flush_flags);
878
879 back = dri3_find_back_alloc(draw);
880
881 mtx_lock(&draw->mtx);
882 if (draw->is_different_gpu && back) {
883 /* Update the linear buffer before presenting the pixmap */
884 (void) loader_dri3_blit_image(draw,
885 back->linear_buffer,
886 back->image,
887 0, 0, back->width, back->height,
888 0, 0, __BLIT_FLAG_FLUSH);
889 }
890
891 /* If we need to preload the new back buffer, remember the source.
892 * The force_copy parameter is used by EGL to attempt to preserve
893 * the back buffer across a call to this function.
894 */
895 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
896 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
897
898 /* Exchange the back and fake front. Even though the server knows about these
899 * buffers, it has no notion of back and fake front.
900 */
901 if (back && draw->have_fake_front) {
902 struct loader_dri3_buffer *tmp;
903
904 tmp = dri3_fake_front_buffer(draw);
905 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
906 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
907
908 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
909 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
910 }
911
912 dri3_flush_present_events(draw);
913
914 if (back && !draw->is_pixmap) {
915 dri3_fence_reset(draw->conn, back);
916
917 /* Compute when we want the frame shown by taking the last known
918 * successful MSC and adding in a swap interval for each outstanding swap
919 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
920 * semantic"
921 */
922 ++draw->send_sbc;
923 if (target_msc == 0 && divisor == 0 && remainder == 0)
924 target_msc = draw->msc + draw->swap_interval *
925 (draw->send_sbc - draw->recv_sbc);
926 else if (divisor == 0 && remainder > 0) {
927 /* From the GLX_OML_sync_control spec:
928 * "If <divisor> = 0, the swap will occur when MSC becomes
929 * greater than or equal to <target_msc>."
930 *
931 * Note that there's no mention of the remainder. The Present
932 * extension throws BadValue for remainder != 0 with divisor == 0, so
933 * just drop the passed in value.
934 */
935 remainder = 0;
936 }
937
938 /* From the GLX_EXT_swap_control spec
939 * and the EGL 1.4 spec (page 53):
940 *
941 * "If <interval> is set to a value of 0, buffer swaps are not
942 * synchronized to a video frame."
943 *
944 * Implementation note: It is possible to enable triple buffering
945 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
946 * the default.
947 */
948 if (draw->swap_interval == 0)
949 options |= XCB_PRESENT_OPTION_ASYNC;
950
951 /* If we need to populate the new back, but need to reuse the back
952 * buffer slot due to lack of local blit capabilities, make sure
953 * the server doesn't flip and we deadlock.
954 */
955 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
956 options |= XCB_PRESENT_OPTION_COPY;
957 #ifdef HAVE_DRI3_MODIFIERS
958 if (draw->multiplanes_available)
959 options |= XCB_PRESENT_OPTION_SUBOPTIMAL;
960 #endif
961 back->busy = 1;
962 back->last_swap = draw->send_sbc;
963 xcb_present_pixmap(draw->conn,
964 draw->drawable,
965 back->pixmap,
966 (uint32_t) draw->send_sbc,
967 0, /* valid */
968 0, /* update */
969 0, /* x_off */
970 0, /* y_off */
971 None, /* target_crtc */
972 None,
973 back->sync_fence,
974 options,
975 target_msc,
976 divisor,
977 remainder, 0, NULL);
978 ret = (int64_t) draw->send_sbc;
979
980 /* Schedule a server-side back-preserving blit if necessary.
981 * This happens iff all conditions below are satisfied:
982 * a) We have a fake front,
983 * b) We need to preserve the back buffer,
984 * c) We don't have local blit capabilities.
985 */
986 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
987 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
988 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
989 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
990
991 dri3_fence_reset(draw->conn, new_back);
992 dri3_copy_area(draw->conn, src->pixmap,
993 new_back->pixmap,
994 dri3_drawable_gc(draw),
995 0, 0, 0, 0, draw->width, draw->height);
996 dri3_fence_trigger(draw->conn, new_back);
997 new_back->last_swap = src->last_swap;
998 }
999
1000 xcb_flush(draw->conn);
1001 if (draw->stamp)
1002 ++(*draw->stamp);
1003 }
1004 mtx_unlock(&draw->mtx);
1005
1006 draw->ext->flush->invalidate(draw->dri_drawable);
1007
1008 return ret;
1009 }
1010
1011 int
1012 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
1013 {
1014 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
1015 int ret;
1016
1017 mtx_lock(&draw->mtx);
1018 ret = (!back || back->last_swap == 0) ? 0 :
1019 draw->send_sbc - back->last_swap + 1;
1020 mtx_unlock(&draw->mtx);
1021
1022 return ret;
1023 }
1024
1025 /** loader_dri3_open
1026 *
1027 * Wrapper around xcb_dri3_open
1028 */
1029 int
1030 loader_dri3_open(xcb_connection_t *conn,
1031 xcb_window_t root,
1032 uint32_t provider)
1033 {
1034 xcb_dri3_open_cookie_t cookie;
1035 xcb_dri3_open_reply_t *reply;
1036 int fd;
1037
1038 cookie = xcb_dri3_open(conn,
1039 root,
1040 provider);
1041
1042 reply = xcb_dri3_open_reply(conn, cookie, NULL);
1043 if (!reply)
1044 return -1;
1045
1046 if (reply->nfd != 1) {
1047 free(reply);
1048 return -1;
1049 }
1050
1051 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
1052 free(reply);
1053 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
1054
1055 return fd;
1056 }
1057
1058 static uint32_t
1059 dri3_cpp_for_format(uint32_t format) {
1060 switch (format) {
1061 case __DRI_IMAGE_FORMAT_R8:
1062 return 1;
1063 case __DRI_IMAGE_FORMAT_RGB565:
1064 case __DRI_IMAGE_FORMAT_GR88:
1065 return 2;
1066 case __DRI_IMAGE_FORMAT_XRGB8888:
1067 case __DRI_IMAGE_FORMAT_ARGB8888:
1068 case __DRI_IMAGE_FORMAT_ABGR8888:
1069 case __DRI_IMAGE_FORMAT_XBGR8888:
1070 case __DRI_IMAGE_FORMAT_XRGB2101010:
1071 case __DRI_IMAGE_FORMAT_ARGB2101010:
1072 case __DRI_IMAGE_FORMAT_XBGR2101010:
1073 case __DRI_IMAGE_FORMAT_ABGR2101010:
1074 case __DRI_IMAGE_FORMAT_SARGB8:
1075 case __DRI_IMAGE_FORMAT_SABGR8:
1076 return 4;
1077 case __DRI_IMAGE_FORMAT_NONE:
1078 default:
1079 return 0;
1080 }
1081 }
1082
1083 /* Map format of render buffer to corresponding format for the linear_buffer
1084 * used for sharing with the display gpu of a Prime setup (== is_different_gpu).
1085 * Usually linear_format == format, except for depth >= 30 formats, where
1086 * different gpu vendors have different preferences wrt. color channel ordering.
1087 */
1088 static uint32_t
1089 dri3_linear_format_for_format(struct loader_dri3_drawable *draw, uint32_t format)
1090 {
1091 switch (format) {
1092 case __DRI_IMAGE_FORMAT_XRGB2101010:
1093 case __DRI_IMAGE_FORMAT_XBGR2101010:
1094 /* Different preferred formats for different hw */
1095 if (dri3_get_red_mask_for_depth(draw, 30) == 0x3ff)
1096 return __DRI_IMAGE_FORMAT_XBGR2101010;
1097 else
1098 return __DRI_IMAGE_FORMAT_XRGB2101010;
1099
1100 case __DRI_IMAGE_FORMAT_ARGB2101010:
1101 case __DRI_IMAGE_FORMAT_ABGR2101010:
1102 /* Different preferred formats for different hw */
1103 if (dri3_get_red_mask_for_depth(draw, 30) == 0x3ff)
1104 return __DRI_IMAGE_FORMAT_ABGR2101010;
1105 else
1106 return __DRI_IMAGE_FORMAT_ARGB2101010;
1107
1108 default:
1109 return format;
1110 }
1111 }
1112
1113 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1114 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1115 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1116 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1117 */
1118 static int
1119 image_format_to_fourcc(int format)
1120 {
1121
1122 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1123 switch (format) {
1124 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1125 case __DRI_IMAGE_FORMAT_SABGR8: return __DRI_IMAGE_FOURCC_SABGR8888;
1126 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1127 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1128 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1129 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1130 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1131 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1132 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1133 case __DRI_IMAGE_FORMAT_XBGR2101010: return __DRI_IMAGE_FOURCC_XBGR2101010;
1134 case __DRI_IMAGE_FORMAT_ABGR2101010: return __DRI_IMAGE_FOURCC_ABGR2101010;
1135 }
1136 return 0;
1137 }
1138
1139 #ifdef HAVE_DRI3_MODIFIERS
1140 static bool
1141 has_supported_modifier(struct loader_dri3_drawable *draw, unsigned int format,
1142 uint64_t *modifiers, uint32_t count)
1143 {
1144 uint64_t *supported_modifiers;
1145 int32_t supported_modifiers_count;
1146 bool found = false;
1147 int i, j;
1148
1149 if (!draw->ext->image->queryDmaBufModifiers(draw->dri_screen,
1150 format, 0, NULL, NULL,
1151 &supported_modifiers_count) ||
1152 supported_modifiers_count == 0)
1153 return false;
1154
1155 supported_modifiers = malloc(supported_modifiers_count * sizeof(uint64_t));
1156 if (!supported_modifiers)
1157 return false;
1158
1159 draw->ext->image->queryDmaBufModifiers(draw->dri_screen, format,
1160 supported_modifiers_count,
1161 supported_modifiers, NULL,
1162 &supported_modifiers_count);
1163
1164 for (i = 0; !found && i < supported_modifiers_count; i++) {
1165 for (j = 0; !found && j < count; j++) {
1166 if (supported_modifiers[i] == modifiers[j])
1167 found = true;
1168 }
1169 }
1170
1171 free(supported_modifiers);
1172 return found;
1173 }
1174 #endif
1175
1176 /** loader_dri3_alloc_render_buffer
1177 *
1178 * Use the driver createImage function to construct a __DRIimage, then
1179 * get a file descriptor for that and create an X pixmap from that
1180 *
1181 * Allocate an xshmfence for synchronization
1182 */
1183 static struct loader_dri3_buffer *
1184 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1185 int width, int height, int depth)
1186 {
1187 struct loader_dri3_buffer *buffer;
1188 __DRIimage *pixmap_buffer;
1189 xcb_pixmap_t pixmap;
1190 xcb_sync_fence_t sync_fence;
1191 struct xshmfence *shm_fence;
1192 int buffer_fds[4], fence_fd;
1193 int num_planes = 0;
1194 int i, mod;
1195 int ret;
1196
1197 /* Create an xshmfence object and
1198 * prepare to send that to the X server
1199 */
1200
1201 fence_fd = xshmfence_alloc_shm();
1202 if (fence_fd < 0)
1203 return NULL;
1204
1205 shm_fence = xshmfence_map_shm(fence_fd);
1206 if (shm_fence == NULL)
1207 goto no_shm_fence;
1208
1209 /* Allocate the image from the driver
1210 */
1211 buffer = calloc(1, sizeof *buffer);
1212 if (!buffer)
1213 goto no_buffer;
1214
1215 buffer->cpp = dri3_cpp_for_format(format);
1216 if (!buffer->cpp)
1217 goto no_image;
1218
1219 if (!draw->is_different_gpu) {
1220 #ifdef HAVE_DRI3_MODIFIERS
1221 if (draw->multiplanes_available &&
1222 draw->ext->image->base.version >= 15 &&
1223 draw->ext->image->queryDmaBufModifiers &&
1224 draw->ext->image->createImageWithModifiers) {
1225 xcb_dri3_get_supported_modifiers_cookie_t mod_cookie;
1226 xcb_dri3_get_supported_modifiers_reply_t *mod_reply;
1227 xcb_generic_error_t *error = NULL;
1228 uint64_t *modifiers = NULL;
1229 uint32_t count = 0;
1230
1231 mod_cookie = xcb_dri3_get_supported_modifiers(draw->conn,
1232 draw->window,
1233 depth, buffer->cpp * 8);
1234 mod_reply = xcb_dri3_get_supported_modifiers_reply(draw->conn,
1235 mod_cookie,
1236 &error);
1237 if (!mod_reply)
1238 goto no_image;
1239
1240 if (mod_reply->num_window_modifiers) {
1241 count = mod_reply->num_window_modifiers;
1242 modifiers = malloc(count * sizeof(uint64_t));
1243 if (!modifiers) {
1244 free(mod_reply);
1245 goto no_image;
1246 }
1247
1248 memcpy(modifiers,
1249 xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply),
1250 count * sizeof(uint64_t));
1251
1252 if (!has_supported_modifier(draw, image_format_to_fourcc(format),
1253 modifiers, count)) {
1254 free(modifiers);
1255 count = 0;
1256 modifiers = NULL;
1257 }
1258 }
1259
1260 if (mod_reply->num_screen_modifiers && modifiers == NULL) {
1261 count = mod_reply->num_screen_modifiers;
1262 modifiers = malloc(count * sizeof(uint64_t));
1263 if (!modifiers) {
1264 free(modifiers);
1265 free(mod_reply);
1266 goto no_image;
1267 }
1268
1269 memcpy(modifiers,
1270 xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply),
1271 count * sizeof(uint64_t));
1272 }
1273
1274 free(mod_reply);
1275
1276 buffer->image = draw->ext->image->createImageWithModifiers(draw->dri_screen,
1277 width, height,
1278 format,
1279 modifiers,
1280 count,
1281 buffer);
1282 free(modifiers);
1283 }
1284 #endif
1285 if (!buffer->image)
1286 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1287 width, height,
1288 format,
1289 __DRI_IMAGE_USE_SHARE |
1290 __DRI_IMAGE_USE_SCANOUT |
1291 __DRI_IMAGE_USE_BACKBUFFER,
1292 buffer);
1293
1294 pixmap_buffer = buffer->image;
1295
1296 if (!buffer->image)
1297 goto no_image;
1298 } else {
1299 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1300 width, height,
1301 format,
1302 0,
1303 buffer);
1304
1305 if (!buffer->image)
1306 goto no_image;
1307
1308 buffer->linear_buffer =
1309 draw->ext->image->createImage(draw->dri_screen,
1310 width, height,
1311 dri3_linear_format_for_format(draw, format),
1312 __DRI_IMAGE_USE_SHARE |
1313 __DRI_IMAGE_USE_LINEAR |
1314 __DRI_IMAGE_USE_BACKBUFFER,
1315 buffer);
1316 pixmap_buffer = buffer->linear_buffer;
1317
1318 if (!buffer->linear_buffer)
1319 goto no_linear_buffer;
1320 }
1321
1322 /* X want some information about the planes, so ask the image for it
1323 */
1324 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_NUM_PLANES,
1325 &num_planes))
1326 num_planes = 1;
1327
1328 for (i = 0; i < num_planes; i++) {
1329 __DRIimage *image = draw->ext->image->fromPlanar(pixmap_buffer, i, NULL);
1330
1331 if (!image) {
1332 assert(i == 0);
1333 image = pixmap_buffer;
1334 }
1335
1336 ret = draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_FD,
1337 &buffer_fds[i]);
1338 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_STRIDE,
1339 &buffer->strides[i]);
1340 ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_OFFSET,
1341 &buffer->offsets[i]);
1342 if (image != pixmap_buffer)
1343 draw->ext->image->destroyImage(image);
1344
1345 if (!ret)
1346 goto no_buffer_attrib;
1347 }
1348
1349 ret = draw->ext->image->queryImage(pixmap_buffer,
1350 __DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod);
1351 buffer->modifier = (uint64_t) mod << 32;
1352 ret &= draw->ext->image->queryImage(pixmap_buffer,
1353 __DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod);
1354 buffer->modifier |= (uint64_t)(mod & 0xffffffff);
1355
1356 if (!ret)
1357 buffer->modifier = DRM_FORMAT_MOD_INVALID;
1358
1359 pixmap = xcb_generate_id(draw->conn);
1360 #ifdef HAVE_DRI3_MODIFIERS
1361 if (draw->multiplanes_available &&
1362 buffer->modifier != DRM_FORMAT_MOD_INVALID) {
1363 xcb_dri3_pixmap_from_buffers(draw->conn,
1364 pixmap,
1365 draw->window,
1366 num_planes,
1367 width, height,
1368 buffer->strides[0], buffer->offsets[0],
1369 buffer->strides[1], buffer->offsets[1],
1370 buffer->strides[2], buffer->offsets[2],
1371 buffer->strides[3], buffer->offsets[3],
1372 depth, buffer->cpp * 8,
1373 buffer->modifier,
1374 buffer_fds);
1375 } else
1376 #endif
1377 {
1378 xcb_dri3_pixmap_from_buffer(draw->conn,
1379 pixmap,
1380 draw->drawable,
1381 buffer->size,
1382 width, height, buffer->strides[0],
1383 depth, buffer->cpp * 8,
1384 buffer_fds[0]);
1385 }
1386
1387 xcb_dri3_fence_from_fd(draw->conn,
1388 pixmap,
1389 (sync_fence = xcb_generate_id(draw->conn)),
1390 false,
1391 fence_fd);
1392
1393 buffer->pixmap = pixmap;
1394 buffer->own_pixmap = true;
1395 buffer->sync_fence = sync_fence;
1396 buffer->shm_fence = shm_fence;
1397 buffer->width = width;
1398 buffer->height = height;
1399
1400 /* Mark the buffer as idle
1401 */
1402 dri3_fence_set(buffer);
1403
1404 return buffer;
1405
1406 no_buffer_attrib:
1407 do {
1408 close(buffer_fds[i]);
1409 } while (--i >= 0);
1410 draw->ext->image->destroyImage(pixmap_buffer);
1411 no_linear_buffer:
1412 if (draw->is_different_gpu)
1413 draw->ext->image->destroyImage(buffer->image);
1414 no_image:
1415 free(buffer);
1416 no_buffer:
1417 xshmfence_unmap_shm(shm_fence);
1418 no_shm_fence:
1419 close(fence_fd);
1420 return NULL;
1421 }
1422
1423 /** loader_dri3_update_drawable
1424 *
1425 * Called the first time we use the drawable and then
1426 * after we receive present configure notify events to
1427 * track the geometry of the drawable
1428 */
1429 static int
1430 dri3_update_drawable(struct loader_dri3_drawable *draw)
1431 {
1432 mtx_lock(&draw->mtx);
1433 if (draw->first_init) {
1434 xcb_get_geometry_cookie_t geom_cookie;
1435 xcb_get_geometry_reply_t *geom_reply;
1436 xcb_void_cookie_t cookie;
1437 xcb_generic_error_t *error;
1438 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1439 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1440 xcb_window_t root_win;
1441
1442 draw->first_init = false;
1443
1444 /* Try to select for input on the window.
1445 *
1446 * If the drawable is a window, this will get our events
1447 * delivered.
1448 *
1449 * Otherwise, we'll get a BadWindow error back from this request which
1450 * will let us know that the drawable is a pixmap instead.
1451 */
1452
1453 draw->eid = xcb_generate_id(draw->conn);
1454 cookie =
1455 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1456 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1457 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1458 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1459
1460 present_capabilities_cookie =
1461 xcb_present_query_capabilities(draw->conn, draw->drawable);
1462
1463 /* Create an XCB event queue to hold present events outside of the usual
1464 * application event queue
1465 */
1466 draw->special_event = xcb_register_for_special_xge(draw->conn,
1467 &xcb_present_id,
1468 draw->eid,
1469 draw->stamp);
1470 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1471
1472 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1473
1474 if (!geom_reply) {
1475 mtx_unlock(&draw->mtx);
1476 return false;
1477 }
1478 draw->width = geom_reply->width;
1479 draw->height = geom_reply->height;
1480 draw->depth = geom_reply->depth;
1481 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1482 root_win = geom_reply->root;
1483
1484 free(geom_reply);
1485
1486 draw->is_pixmap = false;
1487
1488 /* Check to see if our select input call failed. If it failed with a
1489 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1490 * special event queue created above and mark the drawable as a pixmap
1491 */
1492
1493 error = xcb_request_check(draw->conn, cookie);
1494
1495 present_capabilities_reply =
1496 xcb_present_query_capabilities_reply(draw->conn,
1497 present_capabilities_cookie,
1498 NULL);
1499
1500 if (present_capabilities_reply) {
1501 draw->present_capabilities = present_capabilities_reply->capabilities;
1502 free(present_capabilities_reply);
1503 } else
1504 draw->present_capabilities = 0;
1505
1506 if (error) {
1507 if (error->error_code != BadWindow) {
1508 free(error);
1509 mtx_unlock(&draw->mtx);
1510 return false;
1511 }
1512 free(error);
1513 draw->is_pixmap = true;
1514 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1515 draw->special_event = NULL;
1516 }
1517
1518 if (draw->is_pixmap)
1519 draw->window = root_win;
1520 else
1521 draw->window = draw->drawable;
1522 }
1523 dri3_flush_present_events(draw);
1524 mtx_unlock(&draw->mtx);
1525 return true;
1526 }
1527
1528 __DRIimage *
1529 loader_dri3_create_image(xcb_connection_t *c,
1530 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1531 unsigned int format,
1532 __DRIscreen *dri_screen,
1533 const __DRIimageExtension *image,
1534 void *loaderPrivate)
1535 {
1536 int *fds;
1537 __DRIimage *image_planar, *ret;
1538 int stride, offset;
1539
1540 /* Get an FD for the pixmap object
1541 */
1542 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1543
1544 stride = bp_reply->stride;
1545 offset = 0;
1546
1547 /* createImageFromFds creates a wrapper __DRIimage structure which
1548 * can deal with multiple planes for things like Yuv images. So, once
1549 * we've gotten the planar wrapper, pull the single plane out of it and
1550 * discard the wrapper.
1551 */
1552 image_planar = image->createImageFromFds(dri_screen,
1553 bp_reply->width,
1554 bp_reply->height,
1555 image_format_to_fourcc(format),
1556 fds, 1,
1557 &stride, &offset, loaderPrivate);
1558 close(fds[0]);
1559 if (!image_planar)
1560 return NULL;
1561
1562 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1563
1564 if (!ret)
1565 ret = image_planar;
1566 else
1567 image->destroyImage(image_planar);
1568
1569 return ret;
1570 }
1571
1572 #ifdef HAVE_DRI3_MODIFIERS
1573 __DRIimage *
1574 loader_dri3_create_image_from_buffers(xcb_connection_t *c,
1575 xcb_dri3_buffers_from_pixmap_reply_t *bp_reply,
1576 unsigned int format,
1577 __DRIscreen *dri_screen,
1578 const __DRIimageExtension *image,
1579 void *loaderPrivate)
1580 {
1581 __DRIimage *ret;
1582 int *fds;
1583 uint32_t *strides_in, *offsets_in;
1584 int strides[4], offsets[4];
1585 unsigned error;
1586 int i;
1587
1588 if (bp_reply->nfd > 4)
1589 return NULL;
1590
1591 fds = xcb_dri3_buffers_from_pixmap_reply_fds(c, bp_reply);
1592 strides_in = xcb_dri3_buffers_from_pixmap_strides(bp_reply);
1593 offsets_in = xcb_dri3_buffers_from_pixmap_offsets(bp_reply);
1594 for (i = 0; i < bp_reply->nfd; i++) {
1595 strides[i] = strides_in[i];
1596 offsets[i] = offsets_in[i];
1597 }
1598
1599 ret = image->createImageFromDmaBufs2(dri_screen,
1600 bp_reply->width,
1601 bp_reply->height,
1602 image_format_to_fourcc(format),
1603 bp_reply->modifier,
1604 fds, bp_reply->nfd,
1605 strides, offsets,
1606 0, 0, 0, 0, /* UNDEFINED */
1607 &error, loaderPrivate);
1608
1609 for (i = 0; i < bp_reply->nfd; i++)
1610 close(fds[i]);
1611
1612 return ret;
1613 }
1614 #endif
1615
1616 /** dri3_get_pixmap_buffer
1617 *
1618 * Get the DRM object for a pixmap from the X server and
1619 * wrap that with a __DRIimage structure using createImageFromFds
1620 */
1621 static struct loader_dri3_buffer *
1622 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1623 enum loader_dri3_buffer_type buffer_type,
1624 struct loader_dri3_drawable *draw)
1625 {
1626 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1627 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1628 xcb_drawable_t pixmap;
1629 xcb_sync_fence_t sync_fence;
1630 struct xshmfence *shm_fence;
1631 int width;
1632 int height;
1633 int fence_fd;
1634 __DRIscreen *cur_screen;
1635
1636 if (buffer)
1637 return buffer;
1638
1639 pixmap = draw->drawable;
1640
1641 buffer = calloc(1, sizeof *buffer);
1642 if (!buffer)
1643 goto no_buffer;
1644
1645 fence_fd = xshmfence_alloc_shm();
1646 if (fence_fd < 0)
1647 goto no_fence;
1648 shm_fence = xshmfence_map_shm(fence_fd);
1649 if (shm_fence == NULL) {
1650 close (fence_fd);
1651 goto no_fence;
1652 }
1653
1654 /* Get the currently-bound screen or revert to using the drawable's screen if
1655 * no contexts are currently bound. The latter case is at least necessary for
1656 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1657 */
1658 cur_screen = draw->vtable->get_dri_screen();
1659 if (!cur_screen) {
1660 cur_screen = draw->dri_screen;
1661 }
1662
1663 xcb_dri3_fence_from_fd(draw->conn,
1664 pixmap,
1665 (sync_fence = xcb_generate_id(draw->conn)),
1666 false,
1667 fence_fd);
1668 #ifdef HAVE_DRI3_MODIFIERS
1669 if (draw->multiplanes_available &&
1670 draw->ext->image->base.version >= 15 &&
1671 draw->ext->image->createImageFromDmaBufs2) {
1672 xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie;
1673 xcb_dri3_buffers_from_pixmap_reply_t *bps_reply;
1674
1675 bps_cookie = xcb_dri3_buffers_from_pixmap(draw->conn, pixmap);
1676 bps_reply = xcb_dri3_buffers_from_pixmap_reply(draw->conn, bps_cookie,
1677 NULL);
1678 if (!bps_reply)
1679 goto no_image;
1680 buffer->image =
1681 loader_dri3_create_image_from_buffers(draw->conn, bps_reply, format,
1682 cur_screen, draw->ext->image,
1683 buffer);
1684 width = bps_reply->width;
1685 height = bps_reply->height;
1686 free(bps_reply);
1687 } else
1688 #endif
1689 {
1690 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1691 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1692
1693 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1694 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1695 if (!bp_reply)
1696 goto no_image;
1697
1698 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1699 cur_screen, draw->ext->image,
1700 buffer);
1701 width = bp_reply->width;
1702 height = bp_reply->height;
1703 free(bp_reply);
1704 }
1705
1706 if (!buffer->image)
1707 goto no_image;
1708
1709 buffer->pixmap = pixmap;
1710 buffer->own_pixmap = false;
1711 buffer->width = width;
1712 buffer->height = height;
1713 buffer->shm_fence = shm_fence;
1714 buffer->sync_fence = sync_fence;
1715
1716 draw->buffers[buf_id] = buffer;
1717
1718 return buffer;
1719
1720 no_image:
1721 xcb_sync_destroy_fence(draw->conn, sync_fence);
1722 xshmfence_unmap_shm(shm_fence);
1723 no_fence:
1724 free(buffer);
1725 no_buffer:
1726 return NULL;
1727 }
1728
1729 /** dri3_get_buffer
1730 *
1731 * Find a front or back buffer, allocating new ones as necessary
1732 */
1733 static struct loader_dri3_buffer *
1734 dri3_get_buffer(__DRIdrawable *driDrawable,
1735 unsigned int format,
1736 enum loader_dri3_buffer_type buffer_type,
1737 struct loader_dri3_drawable *draw)
1738 {
1739 struct loader_dri3_buffer *buffer;
1740 bool fence_await = buffer_type == loader_dri3_buffer_back;
1741 int buf_id;
1742
1743 if (buffer_type == loader_dri3_buffer_back) {
1744 draw->back_format = format;
1745
1746 buf_id = dri3_find_back(draw);
1747
1748 if (buf_id < 0)
1749 return NULL;
1750 } else {
1751 buf_id = LOADER_DRI3_FRONT_ID;
1752 }
1753
1754 buffer = draw->buffers[buf_id];
1755
1756 /* Allocate a new buffer if there isn't an old one, if that
1757 * old one is the wrong size, or if it's suboptimal
1758 */
1759 if (!buffer || buffer->width != draw->width ||
1760 buffer->height != draw->height ||
1761 buffer->reallocate) {
1762 struct loader_dri3_buffer *new_buffer;
1763
1764 /* Allocate the new buffers
1765 */
1766 new_buffer = dri3_alloc_render_buffer(draw,
1767 format,
1768 draw->width,
1769 draw->height,
1770 draw->depth);
1771 if (!new_buffer)
1772 return NULL;
1773
1774 /* When resizing, copy the contents of the old buffer, waiting for that
1775 * copy to complete using our fences before proceeding
1776 */
1777 if ((buffer_type == loader_dri3_buffer_back ||
1778 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1779 && buffer) {
1780
1781 /* Fill the new buffer with data from an old buffer */
1782 if (!loader_dri3_blit_image(draw,
1783 new_buffer->image,
1784 buffer->image,
1785 0, 0, draw->width, draw->height,
1786 0, 0, 0) &&
1787 !buffer->linear_buffer) {
1788 dri3_fence_reset(draw->conn, new_buffer);
1789 dri3_copy_area(draw->conn,
1790 buffer->pixmap,
1791 new_buffer->pixmap,
1792 dri3_drawable_gc(draw),
1793 0, 0, 0, 0,
1794 draw->width, draw->height);
1795 dri3_fence_trigger(draw->conn, new_buffer);
1796 fence_await = true;
1797 }
1798 dri3_free_render_buffer(draw, buffer);
1799 } else if (buffer_type == loader_dri3_buffer_front) {
1800 /* Fill the new fake front with data from a real front */
1801 loader_dri3_swapbuffer_barrier(draw);
1802 dri3_fence_reset(draw->conn, new_buffer);
1803 dri3_copy_area(draw->conn,
1804 draw->drawable,
1805 new_buffer->pixmap,
1806 dri3_drawable_gc(draw),
1807 0, 0, 0, 0,
1808 draw->width, draw->height);
1809 dri3_fence_trigger(draw->conn, new_buffer);
1810
1811 if (new_buffer->linear_buffer) {
1812 dri3_fence_await(draw->conn, draw, new_buffer);
1813 (void) loader_dri3_blit_image(draw,
1814 new_buffer->image,
1815 new_buffer->linear_buffer,
1816 0, 0, draw->width, draw->height,
1817 0, 0, 0);
1818 } else
1819 fence_await = true;
1820 }
1821 buffer = new_buffer;
1822 draw->buffers[buf_id] = buffer;
1823 }
1824
1825 if (fence_await)
1826 dri3_fence_await(draw->conn, draw, buffer);
1827
1828 /*
1829 * Do we need to preserve the content of a previous buffer?
1830 *
1831 * Note that this blit is needed only to avoid a wait for a buffer that
1832 * is currently in the flip chain or being scanned out from. That's really
1833 * a tradeoff. If we're ok with the wait we can reduce the number of back
1834 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1835 * but in the latter case we must disallow page-flipping.
1836 */
1837 if (buffer_type == loader_dri3_buffer_back &&
1838 draw->cur_blit_source != -1 &&
1839 draw->buffers[draw->cur_blit_source] &&
1840 buffer != draw->buffers[draw->cur_blit_source]) {
1841
1842 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1843
1844 /* Avoid flushing here. Will propably do good for tiling hardware. */
1845 (void) loader_dri3_blit_image(draw,
1846 buffer->image,
1847 source->image,
1848 0, 0, draw->width, draw->height,
1849 0, 0, 0);
1850 buffer->last_swap = source->last_swap;
1851 draw->cur_blit_source = -1;
1852 }
1853 /* Return the requested buffer */
1854 return buffer;
1855 }
1856
1857 /** dri3_free_buffers
1858 *
1859 * Free the front bufffer or all of the back buffers. Used
1860 * when the application changes which buffers it needs
1861 */
1862 static void
1863 dri3_free_buffers(__DRIdrawable *driDrawable,
1864 enum loader_dri3_buffer_type buffer_type,
1865 struct loader_dri3_drawable *draw)
1866 {
1867 struct loader_dri3_buffer *buffer;
1868 int first_id;
1869 int n_id;
1870 int buf_id;
1871
1872 switch (buffer_type) {
1873 case loader_dri3_buffer_back:
1874 first_id = LOADER_DRI3_BACK_ID(0);
1875 n_id = LOADER_DRI3_MAX_BACK;
1876 draw->cur_blit_source = -1;
1877 break;
1878 case loader_dri3_buffer_front:
1879 first_id = LOADER_DRI3_FRONT_ID;
1880 /* Don't free a fake front holding new backbuffer content. */
1881 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1882 }
1883
1884 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1885 buffer = draw->buffers[buf_id];
1886 if (buffer) {
1887 dri3_free_render_buffer(draw, buffer);
1888 draw->buffers[buf_id] = NULL;
1889 }
1890 }
1891 }
1892
1893 /** loader_dri3_get_buffers
1894 *
1895 * The published buffer allocation API.
1896 * Returns all of the necessary buffers, allocating
1897 * as needed.
1898 */
1899 int
1900 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1901 unsigned int format,
1902 uint32_t *stamp,
1903 void *loaderPrivate,
1904 uint32_t buffer_mask,
1905 struct __DRIimageList *buffers)
1906 {
1907 struct loader_dri3_drawable *draw = loaderPrivate;
1908 struct loader_dri3_buffer *front, *back;
1909 int buf_id;
1910
1911 buffers->image_mask = 0;
1912 buffers->front = NULL;
1913 buffers->back = NULL;
1914
1915 front = NULL;
1916 back = NULL;
1917
1918 if (!dri3_update_drawable(draw))
1919 return false;
1920
1921 dri3_update_num_back(draw);
1922
1923 /* Free no longer needed back buffers */
1924 for (buf_id = draw->num_back; buf_id < LOADER_DRI3_MAX_BACK; buf_id++) {
1925 if (draw->cur_blit_source != buf_id && draw->buffers[buf_id]) {
1926 dri3_free_render_buffer(draw, draw->buffers[buf_id]);
1927 draw->buffers[buf_id] = NULL;
1928 }
1929 }
1930
1931 /* pixmaps always have front buffers.
1932 * Exchange swaps also mandate fake front buffers.
1933 */
1934 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1935 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1936
1937 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1938 /* All pixmaps are owned by the server gpu.
1939 * When we use a different gpu, we can't use the pixmap
1940 * as buffer since it is potentially tiled a way
1941 * our device can't understand. In this case, use
1942 * a fake front buffer. Hopefully the pixmap
1943 * content will get synced with the fake front
1944 * buffer.
1945 */
1946 if (draw->is_pixmap && !draw->is_different_gpu)
1947 front = dri3_get_pixmap_buffer(driDrawable,
1948 format,
1949 loader_dri3_buffer_front,
1950 draw);
1951 else
1952 front = dri3_get_buffer(driDrawable,
1953 format,
1954 loader_dri3_buffer_front,
1955 draw);
1956
1957 if (!front)
1958 return false;
1959 } else {
1960 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1961 draw->have_fake_front = 0;
1962 }
1963
1964 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1965 back = dri3_get_buffer(driDrawable,
1966 format,
1967 loader_dri3_buffer_back,
1968 draw);
1969 if (!back)
1970 return false;
1971 draw->have_back = 1;
1972 } else {
1973 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1974 draw->have_back = 0;
1975 }
1976
1977 if (front) {
1978 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1979 buffers->front = front->image;
1980 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1981 }
1982
1983 if (back) {
1984 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1985 buffers->back = back->image;
1986 }
1987
1988 draw->stamp = stamp;
1989
1990 return true;
1991 }
1992
1993 /** loader_dri3_update_drawable_geometry
1994 *
1995 * Get the current drawable geometry.
1996 */
1997 void
1998 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1999 {
2000 xcb_get_geometry_cookie_t geom_cookie;
2001 xcb_get_geometry_reply_t *geom_reply;
2002
2003 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
2004
2005 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
2006
2007 if (geom_reply) {
2008 draw->width = geom_reply->width;
2009 draw->height = geom_reply->height;
2010 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
2011 draw->ext->flush->invalidate(draw->dri_drawable);
2012
2013 free(geom_reply);
2014 }
2015 }
2016
2017
2018 /**
2019 * Make sure the server has flushed all pending swap buffers to hardware
2020 * for this drawable. Ideally we'd want to send an X protocol request to
2021 * have the server block our connection until the swaps are complete. That
2022 * would avoid the potential round-trip here.
2023 */
2024 void
2025 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
2026 {
2027 int64_t ust, msc, sbc;
2028
2029 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
2030 }
2031
2032 /**
2033 * Perform any cleanup associated with a close screen operation.
2034 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
2035 *
2036 * This function destroys the screen's cached swap context if any.
2037 */
2038 void
2039 loader_dri3_close_screen(__DRIscreen *dri_screen)
2040 {
2041 mtx_lock(&blit_context.mtx);
2042 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
2043 blit_context.core->destroyContext(blit_context.ctx);
2044 blit_context.ctx = NULL;
2045 }
2046 mtx_unlock(&blit_context.mtx);
2047 }
2048
2049 /**
2050 * Find a backbuffer slot - potentially allocating a back buffer
2051 *
2052 * \param draw[in,out] Pointer to the drawable for which to find back.
2053 * \return Pointer to a new back buffer or NULL if allocation failed or was
2054 * not mandated.
2055 *
2056 * Find a potentially new back buffer, and if it's not been allocated yet and
2057 * in addition needs initializing, then try to allocate and initialize it.
2058 */
2059 #include <stdio.h>
2060 static struct loader_dri3_buffer *
2061 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
2062 {
2063 struct loader_dri3_buffer *back;
2064 int id;
2065
2066 id = dri3_find_back(draw);
2067 if (id < 0)
2068 return NULL;
2069
2070 back = draw->buffers[id];
2071 /* Allocate a new back if we haven't got one */
2072 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
2073 dri3_update_drawable(draw))
2074 back = dri3_alloc_render_buffer(draw, draw->back_format,
2075 draw->width, draw->height, draw->depth);
2076
2077 if (!back)
2078 return NULL;
2079
2080 draw->buffers[id] = back;
2081
2082 /* If necessary, prefill the back with data according to swap_method mode. */
2083 if (draw->cur_blit_source != -1 &&
2084 draw->buffers[draw->cur_blit_source] &&
2085 back != draw->buffers[draw->cur_blit_source]) {
2086 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
2087
2088 dri3_fence_await(draw->conn, draw, source);
2089 dri3_fence_await(draw->conn, draw, back);
2090 (void) loader_dri3_blit_image(draw,
2091 back->image,
2092 source->image,
2093 0, 0, draw->width, draw->height,
2094 0, 0, 0);
2095 back->last_swap = source->last_swap;
2096 draw->cur_blit_source = -1;
2097 }
2098
2099 return back;
2100 }