30ea133f7e57b508cd7e429e06c1b3b98be7931f
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include "loader_dri3_helper.h"
36
37 /* From xmlpool/options.h, user exposed so should be stable */
38 #define DRI_CONF_VBLANK_NEVER 0
39 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
41 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
42
43 /**
44 * A cached blit context.
45 */
46 struct loader_dri3_blit_context {
47 mtx_t mtx;
48 __DRIcontext *ctx;
49 __DRIscreen *cur_screen;
50 const __DRIcoreExtension *core;
51 };
52
53 /* For simplicity we maintain the cache only for a single screen at a time */
54 static struct loader_dri3_blit_context blit_context = {
55 _MTX_INITIALIZER_NP, NULL
56 };
57
58 static void
59 dri3_flush_present_events(struct loader_dri3_drawable *draw);
60
61 static struct loader_dri3_buffer *
62 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
63
64 /**
65 * Do we have blit functionality in the image blit extension?
66 *
67 * \param draw[in] The drawable intended to blit from / to.
68 * \return true if we have blit functionality. false otherwise.
69 */
70 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
71 {
72 return draw->ext->image->base.version >= 9 &&
73 draw->ext->image->blitImage != NULL;
74 }
75
76 /**
77 * Get and lock (for use with the current thread) a dri context associated
78 * with the drawable's dri screen. The context is intended to be used with
79 * the dri image extension's blitImage method.
80 *
81 * \param draw[in] Pointer to the drawable whose dri screen we want a
82 * dri context for.
83 * \return A dri context or NULL if context creation failed.
84 *
85 * When the caller is done with the context (even if the context returned was
86 * NULL), the caller must call loader_dri3_blit_context_put.
87 */
88 static __DRIcontext *
89 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
90 {
91 mtx_lock(&blit_context.mtx);
92
93 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
94 blit_context.core->destroyContext(blit_context.ctx);
95 blit_context.ctx = NULL;
96 }
97
98 if (!blit_context.ctx) {
99 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
100 NULL, NULL, NULL);
101 blit_context.cur_screen = draw->dri_screen;
102 blit_context.core = draw->ext->core;
103 }
104
105 return blit_context.ctx;
106 }
107
108 /**
109 * Release (for use with other threads) a dri context previously obtained using
110 * loader_dri3_blit_context_get.
111 */
112 static void
113 loader_dri3_blit_context_put(void)
114 {
115 mtx_unlock(&blit_context.mtx);
116 }
117
118 /**
119 * Blit (parts of) the contents of a DRI image to another dri image
120 *
121 * \param draw[in] The drawable which owns the images.
122 * \param dst[in] The destination image.
123 * \param src[in] The source image.
124 * \param dstx0[in] Start destination coordinate.
125 * \param dsty0[in] Start destination coordinate.
126 * \param width[in] Blit width.
127 * \param height[in] Blit height.
128 * \param srcx0[in] Start source coordinate.
129 * \param srcy0[in] Start source coordinate.
130 * \param flush_flag[in] Image blit flush flag.
131 * \return true iff successful.
132 */
133 static bool
134 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
135 __DRIimage *dst, __DRIimage *src,
136 int dstx0, int dsty0, int width, int height,
137 int srcx0, int srcy0, int flush_flag)
138 {
139 __DRIcontext *dri_context;
140 bool use_blit_context = false;
141
142 if (!loader_dri3_have_image_blit(draw))
143 return false;
144
145 dri_context = draw->vtable->get_dri_context(draw);
146
147 if (!dri_context || !draw->vtable->in_current_context(draw)) {
148 dri_context = loader_dri3_blit_context_get(draw);
149 use_blit_context = true;
150 flush_flag |= __BLIT_FLAG_FLUSH;
151 }
152
153 if (dri_context)
154 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
155 width, height, srcx0, srcy0,
156 width, height, flush_flag);
157
158 if (use_blit_context)
159 loader_dri3_blit_context_put();
160
161 return dri_context != NULL;
162 }
163
164 static inline void
165 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
166 {
167 xshmfence_reset(buffer->shm_fence);
168 }
169
170 static inline void
171 dri3_fence_set(struct loader_dri3_buffer *buffer)
172 {
173 xshmfence_trigger(buffer->shm_fence);
174 }
175
176 static inline void
177 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
178 {
179 xcb_sync_trigger_fence(c, buffer->sync_fence);
180 }
181
182 static inline void
183 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
184 struct loader_dri3_buffer *buffer)
185 {
186 xcb_flush(c);
187 xshmfence_await(buffer->shm_fence);
188 if (draw) {
189 mtx_lock(&draw->mtx);
190 dri3_flush_present_events(draw);
191 mtx_unlock(&draw->mtx);
192 }
193 }
194
195 static void
196 dri3_update_num_back(struct loader_dri3_drawable *draw)
197 {
198 if (draw->flipping)
199 draw->num_back = 3;
200 else
201 draw->num_back = 2;
202 }
203
204 void
205 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
206 {
207 draw->swap_interval = interval;
208 }
209
210 /** dri3_free_render_buffer
211 *
212 * Free everything associated with one render buffer including pixmap, fence
213 * stuff and the driver image
214 */
215 static void
216 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
217 struct loader_dri3_buffer *buffer)
218 {
219 if (buffer->own_pixmap)
220 xcb_free_pixmap(draw->conn, buffer->pixmap);
221 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
222 xshmfence_unmap_shm(buffer->shm_fence);
223 draw->ext->image->destroyImage(buffer->image);
224 if (buffer->linear_buffer)
225 draw->ext->image->destroyImage(buffer->linear_buffer);
226 free(buffer);
227 }
228
229 void
230 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
231 {
232 int i;
233
234 draw->ext->core->destroyDrawable(draw->dri_drawable);
235
236 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
237 if (draw->buffers[i])
238 dri3_free_render_buffer(draw, draw->buffers[i]);
239 }
240
241 if (draw->special_event) {
242 xcb_void_cookie_t cookie =
243 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
244 XCB_PRESENT_EVENT_MASK_NO_EVENT);
245
246 xcb_discard_reply(draw->conn, cookie.sequence);
247 xcb_unregister_for_special_event(draw->conn, draw->special_event);
248 }
249
250 cnd_destroy(&draw->event_cnd);
251 mtx_destroy(&draw->mtx);
252 }
253
254 int
255 loader_dri3_drawable_init(xcb_connection_t *conn,
256 xcb_drawable_t drawable,
257 __DRIscreen *dri_screen,
258 bool is_different_gpu,
259 const __DRIconfig *dri_config,
260 struct loader_dri3_extensions *ext,
261 const struct loader_dri3_vtable *vtable,
262 struct loader_dri3_drawable *draw)
263 {
264 xcb_get_geometry_cookie_t cookie;
265 xcb_get_geometry_reply_t *reply;
266 xcb_generic_error_t *error;
267 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
268 int swap_interval;
269
270 draw->conn = conn;
271 draw->ext = ext;
272 draw->vtable = vtable;
273 draw->drawable = drawable;
274 draw->dri_screen = dri_screen;
275 draw->is_different_gpu = is_different_gpu;
276
277 draw->have_back = 0;
278 draw->have_fake_front = 0;
279 draw->first_init = true;
280
281 draw->cur_blit_source = -1;
282 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
283 mtx_init(&draw->mtx, mtx_plain);
284 cnd_init(&draw->event_cnd);
285
286 if (draw->ext->config)
287 draw->ext->config->configQueryi(draw->dri_screen,
288 "vblank_mode", &vblank_mode);
289
290 switch (vblank_mode) {
291 case DRI_CONF_VBLANK_NEVER:
292 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
293 swap_interval = 0;
294 break;
295 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
296 case DRI_CONF_VBLANK_ALWAYS_SYNC:
297 default:
298 swap_interval = 1;
299 break;
300 }
301 draw->swap_interval = swap_interval;
302
303 dri3_update_num_back(draw);
304
305 /* Create a new drawable */
306 draw->dri_drawable =
307 draw->ext->image_driver->createNewDrawable(dri_screen,
308 dri_config,
309 draw);
310
311 if (!draw->dri_drawable)
312 return 1;
313
314 cookie = xcb_get_geometry(draw->conn, draw->drawable);
315 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
316 if (reply == NULL || error != NULL) {
317 draw->ext->core->destroyDrawable(draw->dri_drawable);
318 return 1;
319 }
320
321 draw->width = reply->width;
322 draw->height = reply->height;
323 draw->depth = reply->depth;
324 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
325 free(reply);
326
327 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
328 if (draw->ext->core->base.version >= 2) {
329 (void )draw->ext->core->getConfigAttrib(dri_config,
330 __DRI_ATTRIB_SWAP_METHOD,
331 &draw->swap_method);
332 }
333
334 /*
335 * Make sure server has the same swap interval we do for the new
336 * drawable.
337 */
338 loader_dri3_set_swap_interval(draw, swap_interval);
339
340 return 0;
341 }
342
343 /*
344 * Process one Present event
345 */
346 static void
347 dri3_handle_present_event(struct loader_dri3_drawable *draw,
348 xcb_present_generic_event_t *ge)
349 {
350 switch (ge->evtype) {
351 case XCB_PRESENT_CONFIGURE_NOTIFY: {
352 xcb_present_configure_notify_event_t *ce = (void *) ge;
353
354 draw->width = ce->width;
355 draw->height = ce->height;
356 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
357 draw->ext->flush->invalidate(draw->dri_drawable);
358 break;
359 }
360 case XCB_PRESENT_COMPLETE_NOTIFY: {
361 xcb_present_complete_notify_event_t *ce = (void *) ge;
362
363 /* Compute the processed SBC number from the received 32-bit serial number
364 * merged with the upper 32-bits of the sent 64-bit serial number while
365 * checking for wrap.
366 */
367 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
368 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
369 if (draw->recv_sbc > draw->send_sbc)
370 draw->recv_sbc -= 0x100000000;
371 switch (ce->mode) {
372 case XCB_PRESENT_COMPLETE_MODE_FLIP:
373 draw->flipping = true;
374 break;
375 case XCB_PRESENT_COMPLETE_MODE_COPY:
376 draw->flipping = false;
377 break;
378 }
379
380 if (draw->vtable->show_fps)
381 draw->vtable->show_fps(draw, ce->ust);
382
383 draw->ust = ce->ust;
384 draw->msc = ce->msc;
385 } else if (ce->serial == draw->eid) {
386 draw->notify_ust = ce->ust;
387 draw->notify_msc = ce->msc;
388 }
389 break;
390 }
391 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
392 xcb_present_idle_notify_event_t *ie = (void *) ge;
393 int b;
394
395 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
396 struct loader_dri3_buffer *buf = draw->buffers[b];
397
398 if (buf && buf->pixmap == ie->pixmap)
399 buf->busy = 0;
400
401 if (buf && draw->num_back <= b && b < LOADER_DRI3_MAX_BACK &&
402 draw->cur_blit_source != b &&
403 !buf->busy) {
404 dri3_free_render_buffer(draw, buf);
405 draw->buffers[b] = NULL;
406 }
407 }
408 break;
409 }
410 }
411 free(ge);
412 }
413
414 static bool
415 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
416 {
417 xcb_generic_event_t *ev;
418 xcb_present_generic_event_t *ge;
419
420 xcb_flush(draw->conn);
421
422 /* Only have one thread waiting for events at a time */
423 if (draw->has_event_waiter) {
424 cnd_wait(&draw->event_cnd, &draw->mtx);
425 /* Another thread has updated the protected info, so retest. */
426 return true;
427 } else {
428 draw->has_event_waiter = true;
429 /* Allow other threads access to the drawable while we're waiting. */
430 mtx_unlock(&draw->mtx);
431 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
432 mtx_lock(&draw->mtx);
433 draw->has_event_waiter = false;
434 cnd_broadcast(&draw->event_cnd);
435 }
436 if (!ev)
437 return false;
438 ge = (void *) ev;
439 dri3_handle_present_event(draw, ge);
440 return true;
441 }
442
443 /** loader_dri3_wait_for_msc
444 *
445 * Get the X server to send an event when the target msc/divisor/remainder is
446 * reached.
447 */
448 bool
449 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
450 int64_t target_msc,
451 int64_t divisor, int64_t remainder,
452 int64_t *ust, int64_t *msc, int64_t *sbc)
453 {
454 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
455 draw->drawable,
456 draw->eid,
457 target_msc,
458 divisor,
459 remainder);
460 xcb_generic_event_t *ev;
461 unsigned full_sequence;
462
463 mtx_lock(&draw->mtx);
464 xcb_flush(draw->conn);
465
466 /* Wait for the event */
467 do {
468 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
469 if (!ev) {
470 mtx_unlock(&draw->mtx);
471 return false;
472 }
473
474 full_sequence = ev->full_sequence;
475 dri3_handle_present_event(draw, (void *) ev);
476 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
477
478 *ust = draw->notify_ust;
479 *msc = draw->notify_msc;
480 *sbc = draw->recv_sbc;
481 mtx_unlock(&draw->mtx);
482
483 return true;
484 }
485
486 /** loader_dri3_wait_for_sbc
487 *
488 * Wait for the completed swap buffer count to reach the specified
489 * target. Presumably the application knows that this will be reached with
490 * outstanding complete events, or we're going to be here awhile.
491 */
492 int
493 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
494 int64_t target_sbc, int64_t *ust,
495 int64_t *msc, int64_t *sbc)
496 {
497 /* From the GLX_OML_sync_control spec:
498 *
499 * "If <target_sbc> = 0, the function will block until all previous
500 * swaps requested with glXSwapBuffersMscOML for that window have
501 * completed."
502 */
503 mtx_lock(&draw->mtx);
504 if (!target_sbc)
505 target_sbc = draw->send_sbc;
506
507 while (draw->recv_sbc < target_sbc) {
508 if (!dri3_wait_for_event_locked(draw)) {
509 mtx_unlock(&draw->mtx);
510 return 0;
511 }
512 }
513
514 *ust = draw->ust;
515 *msc = draw->msc;
516 *sbc = draw->recv_sbc;
517 mtx_unlock(&draw->mtx);
518 return 1;
519 }
520
521 /** loader_dri3_find_back
522 *
523 * Find an idle back buffer. If there isn't one, then
524 * wait for a present idle notify event from the X server
525 */
526 static int
527 dri3_find_back(struct loader_dri3_drawable *draw)
528 {
529 int b;
530 int num_to_consider;
531
532 mtx_lock(&draw->mtx);
533 /* Increase the likelyhood of reusing current buffer */
534 dri3_flush_present_events(draw);
535
536 /* Check whether we need to reuse the current back buffer as new back.
537 * In that case, wait until it's not busy anymore.
538 */
539 dri3_update_num_back(draw);
540 num_to_consider = draw->num_back;
541 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
542 num_to_consider = 1;
543 draw->cur_blit_source = -1;
544 }
545
546 for (;;) {
547 for (b = 0; b < num_to_consider; b++) {
548 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
549 struct loader_dri3_buffer *buffer = draw->buffers[id];
550
551 if (!buffer || !buffer->busy) {
552 draw->cur_back = id;
553 mtx_unlock(&draw->mtx);
554 return id;
555 }
556 }
557 if (!dri3_wait_for_event_locked(draw)) {
558 mtx_unlock(&draw->mtx);
559 return -1;
560 }
561 }
562 }
563
564 static xcb_gcontext_t
565 dri3_drawable_gc(struct loader_dri3_drawable *draw)
566 {
567 if (!draw->gc) {
568 uint32_t v = 0;
569 xcb_create_gc(draw->conn,
570 (draw->gc = xcb_generate_id(draw->conn)),
571 draw->drawable,
572 XCB_GC_GRAPHICS_EXPOSURES,
573 &v);
574 }
575 return draw->gc;
576 }
577
578
579 static struct loader_dri3_buffer *
580 dri3_back_buffer(struct loader_dri3_drawable *draw)
581 {
582 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
583 }
584
585 static struct loader_dri3_buffer *
586 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
587 {
588 return draw->buffers[LOADER_DRI3_FRONT_ID];
589 }
590
591 static void
592 dri3_copy_area(xcb_connection_t *c,
593 xcb_drawable_t src_drawable,
594 xcb_drawable_t dst_drawable,
595 xcb_gcontext_t gc,
596 int16_t src_x,
597 int16_t src_y,
598 int16_t dst_x,
599 int16_t dst_y,
600 uint16_t width,
601 uint16_t height)
602 {
603 xcb_void_cookie_t cookie;
604
605 cookie = xcb_copy_area_checked(c,
606 src_drawable,
607 dst_drawable,
608 gc,
609 src_x,
610 src_y,
611 dst_x,
612 dst_y,
613 width,
614 height);
615 xcb_discard_reply(c, cookie.sequence);
616 }
617
618 /**
619 * Asks the driver to flush any queued work necessary for serializing with the
620 * X command stream, and optionally the slightly more strict requirement of
621 * glFlush() equivalence (which would require flushing even if nothing had
622 * been drawn to a window system framebuffer, for example).
623 */
624 void
625 loader_dri3_flush(struct loader_dri3_drawable *draw,
626 unsigned flags,
627 enum __DRI2throttleReason throttle_reason)
628 {
629 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
630 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
631
632 if (dri_context) {
633 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
634 flags, throttle_reason);
635 }
636 }
637
638 void
639 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
640 int x, int y,
641 int width, int height,
642 bool flush)
643 {
644 struct loader_dri3_buffer *back;
645 unsigned flags = __DRI2_FLUSH_DRAWABLE;
646
647 /* Check we have the right attachments */
648 if (!draw->have_back || draw->is_pixmap)
649 return;
650
651 if (flush)
652 flags |= __DRI2_FLUSH_CONTEXT;
653 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
654
655 back = dri3_find_back_alloc(draw);
656 if (!back)
657 return;
658
659 y = draw->height - y - height;
660
661 if (draw->is_different_gpu) {
662 /* Update the linear buffer part of the back buffer
663 * for the dri3_copy_area operation
664 */
665 (void) loader_dri3_blit_image(draw,
666 back->linear_buffer,
667 back->image,
668 0, 0, back->width, back->height,
669 0, 0, __BLIT_FLAG_FLUSH);
670 }
671
672 loader_dri3_swapbuffer_barrier(draw);
673 dri3_fence_reset(draw->conn, back);
674 dri3_copy_area(draw->conn,
675 back->pixmap,
676 draw->drawable,
677 dri3_drawable_gc(draw),
678 x, y, x, y, width, height);
679 dri3_fence_trigger(draw->conn, back);
680 /* Refresh the fake front (if present) after we just damaged the real
681 * front.
682 */
683 if (draw->have_fake_front &&
684 !loader_dri3_blit_image(draw,
685 dri3_fake_front_buffer(draw)->image,
686 back->image,
687 x, y, width, height,
688 x, y, __BLIT_FLAG_FLUSH) &&
689 !draw->is_different_gpu) {
690 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
691 dri3_copy_area(draw->conn,
692 back->pixmap,
693 dri3_fake_front_buffer(draw)->pixmap,
694 dri3_drawable_gc(draw),
695 x, y, x, y, width, height);
696 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
697 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
698 }
699 dri3_fence_await(draw->conn, draw, back);
700 }
701
702 void
703 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
704 xcb_drawable_t dest,
705 xcb_drawable_t src)
706 {
707 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
708
709 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
710 dri3_copy_area(draw->conn,
711 src, dest,
712 dri3_drawable_gc(draw),
713 0, 0, 0, 0, draw->width, draw->height);
714 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
715 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
716 }
717
718 void
719 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
720 {
721 struct loader_dri3_buffer *front;
722
723 if (draw == NULL || !draw->have_fake_front)
724 return;
725
726 front = dri3_fake_front_buffer(draw);
727
728 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
729
730 /* In the psc->is_different_gpu case, the linear buffer has been updated,
731 * but not yet the tiled buffer.
732 * Copy back to the tiled buffer we use for rendering.
733 * Note that we don't need flushing.
734 */
735 if (draw->is_different_gpu)
736 (void) loader_dri3_blit_image(draw,
737 front->image,
738 front->linear_buffer,
739 0, 0, front->width, front->height,
740 0, 0, 0);
741 }
742
743 void
744 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
745 {
746 struct loader_dri3_buffer *front;
747
748 if (draw == NULL || !draw->have_fake_front)
749 return;
750
751 front = dri3_fake_front_buffer(draw);
752
753 /* In the psc->is_different_gpu case, we update the linear_buffer
754 * before updating the real front.
755 */
756 if (draw->is_different_gpu)
757 (void) loader_dri3_blit_image(draw,
758 front->linear_buffer,
759 front->image,
760 0, 0, front->width, front->height,
761 0, 0, __BLIT_FLAG_FLUSH);
762 loader_dri3_swapbuffer_barrier(draw);
763 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
764 }
765
766 /** dri3_flush_present_events
767 *
768 * Process any present events that have been received from the X server
769 */
770 static void
771 dri3_flush_present_events(struct loader_dri3_drawable *draw)
772 {
773 /* Check to see if any configuration changes have occurred
774 * since we were last invoked
775 */
776 if (draw->has_event_waiter)
777 return;
778
779 if (draw->special_event) {
780 xcb_generic_event_t *ev;
781
782 while ((ev = xcb_poll_for_special_event(draw->conn,
783 draw->special_event)) != NULL) {
784 xcb_present_generic_event_t *ge = (void *) ev;
785 dri3_handle_present_event(draw, ge);
786 }
787 }
788 }
789
790 /** loader_dri3_swap_buffers_msc
791 *
792 * Make the current back buffer visible using the present extension
793 */
794 int64_t
795 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
796 int64_t target_msc, int64_t divisor,
797 int64_t remainder, unsigned flush_flags,
798 bool force_copy)
799 {
800 struct loader_dri3_buffer *back;
801 int64_t ret = 0;
802 uint32_t options = XCB_PRESENT_OPTION_NONE;
803
804 draw->vtable->flush_drawable(draw, flush_flags);
805
806 back = dri3_find_back_alloc(draw);
807
808 mtx_lock(&draw->mtx);
809 if (draw->is_different_gpu && back) {
810 /* Update the linear buffer before presenting the pixmap */
811 (void) loader_dri3_blit_image(draw,
812 back->linear_buffer,
813 back->image,
814 0, 0, back->width, back->height,
815 0, 0, __BLIT_FLAG_FLUSH);
816 }
817
818 /* If we need to preload the new back buffer, remember the source.
819 * The force_copy parameter is used by EGL to attempt to preserve
820 * the back buffer across a call to this function.
821 */
822 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
823 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
824
825 /* Exchange the back and fake front. Even though the server knows about these
826 * buffers, it has no notion of back and fake front.
827 */
828 if (back && draw->have_fake_front) {
829 struct loader_dri3_buffer *tmp;
830
831 tmp = dri3_fake_front_buffer(draw);
832 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
833 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
834
835 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
836 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
837 }
838
839 dri3_flush_present_events(draw);
840
841 if (back && !draw->is_pixmap) {
842 dri3_fence_reset(draw->conn, back);
843
844 /* Compute when we want the frame shown by taking the last known
845 * successful MSC and adding in a swap interval for each outstanding swap
846 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
847 * semantic"
848 */
849 ++draw->send_sbc;
850 if (target_msc == 0 && divisor == 0 && remainder == 0)
851 target_msc = draw->msc + draw->swap_interval *
852 (draw->send_sbc - draw->recv_sbc);
853 else if (divisor == 0 && remainder > 0) {
854 /* From the GLX_OML_sync_control spec:
855 * "If <divisor> = 0, the swap will occur when MSC becomes
856 * greater than or equal to <target_msc>."
857 *
858 * Note that there's no mention of the remainder. The Present
859 * extension throws BadValue for remainder != 0 with divisor == 0, so
860 * just drop the passed in value.
861 */
862 remainder = 0;
863 }
864
865 /* From the GLX_EXT_swap_control spec
866 * and the EGL 1.4 spec (page 53):
867 *
868 * "If <interval> is set to a value of 0, buffer swaps are not
869 * synchronized to a video frame."
870 *
871 * Implementation note: It is possible to enable triple buffering
872 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
873 * the default.
874 */
875 if (draw->swap_interval == 0)
876 options |= XCB_PRESENT_OPTION_ASYNC;
877
878 /* If we need to populate the new back, but need to reuse the back
879 * buffer slot due to lack of local blit capabilities, make sure
880 * the server doesn't flip and we deadlock.
881 */
882 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
883 options |= XCB_PRESENT_OPTION_COPY;
884
885 back->busy = 1;
886 back->last_swap = draw->send_sbc;
887 xcb_present_pixmap(draw->conn,
888 draw->drawable,
889 back->pixmap,
890 (uint32_t) draw->send_sbc,
891 0, /* valid */
892 0, /* update */
893 0, /* x_off */
894 0, /* y_off */
895 None, /* target_crtc */
896 None,
897 back->sync_fence,
898 options,
899 target_msc,
900 divisor,
901 remainder, 0, NULL);
902 ret = (int64_t) draw->send_sbc;
903
904 /* Schedule a server-side back-preserving blit if necessary.
905 * This happens iff all conditions below are satisfied:
906 * a) We have a fake front,
907 * b) We need to preserve the back buffer,
908 * c) We don't have local blit capabilities.
909 */
910 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
911 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
912 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
913 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
914
915 dri3_fence_reset(draw->conn, new_back);
916 dri3_copy_area(draw->conn, src->pixmap,
917 new_back->pixmap,
918 dri3_drawable_gc(draw),
919 0, 0, 0, 0, draw->width, draw->height);
920 dri3_fence_trigger(draw->conn, new_back);
921 new_back->last_swap = src->last_swap;
922 }
923
924 xcb_flush(draw->conn);
925 if (draw->stamp)
926 ++(*draw->stamp);
927 }
928 mtx_unlock(&draw->mtx);
929
930 draw->ext->flush->invalidate(draw->dri_drawable);
931
932 return ret;
933 }
934
935 int
936 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
937 {
938 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
939 int ret;
940
941 mtx_lock(&draw->mtx);
942 ret = (!back || back->last_swap == 0) ? 0 :
943 draw->send_sbc - back->last_swap + 1;
944 mtx_unlock(&draw->mtx);
945
946 return ret;
947 }
948
949 /** loader_dri3_open
950 *
951 * Wrapper around xcb_dri3_open
952 */
953 int
954 loader_dri3_open(xcb_connection_t *conn,
955 xcb_window_t root,
956 uint32_t provider)
957 {
958 xcb_dri3_open_cookie_t cookie;
959 xcb_dri3_open_reply_t *reply;
960 int fd;
961
962 cookie = xcb_dri3_open(conn,
963 root,
964 provider);
965
966 reply = xcb_dri3_open_reply(conn, cookie, NULL);
967 if (!reply)
968 return -1;
969
970 if (reply->nfd != 1) {
971 free(reply);
972 return -1;
973 }
974
975 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
976 free(reply);
977 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
978
979 return fd;
980 }
981
982 static uint32_t
983 dri3_cpp_for_format(uint32_t format) {
984 switch (format) {
985 case __DRI_IMAGE_FORMAT_R8:
986 return 1;
987 case __DRI_IMAGE_FORMAT_RGB565:
988 case __DRI_IMAGE_FORMAT_GR88:
989 return 2;
990 case __DRI_IMAGE_FORMAT_XRGB8888:
991 case __DRI_IMAGE_FORMAT_ARGB8888:
992 case __DRI_IMAGE_FORMAT_ABGR8888:
993 case __DRI_IMAGE_FORMAT_XBGR8888:
994 case __DRI_IMAGE_FORMAT_XRGB2101010:
995 case __DRI_IMAGE_FORMAT_ARGB2101010:
996 case __DRI_IMAGE_FORMAT_XBGR2101010:
997 case __DRI_IMAGE_FORMAT_ABGR2101010:
998 case __DRI_IMAGE_FORMAT_SARGB8:
999 return 4;
1000 case __DRI_IMAGE_FORMAT_NONE:
1001 default:
1002 return 0;
1003 }
1004 }
1005
1006 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1007 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1008 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1009 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1010 */
1011 static int
1012 image_format_to_fourcc(int format)
1013 {
1014
1015 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1016 switch (format) {
1017 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1018 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1019 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1020 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1021 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1022 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1023 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1024 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1025 case __DRI_IMAGE_FORMAT_XBGR2101010: return __DRI_IMAGE_FOURCC_XBGR2101010;
1026 case __DRI_IMAGE_FORMAT_ABGR2101010: return __DRI_IMAGE_FOURCC_ABGR2101010;
1027 }
1028 return 0;
1029 }
1030
1031 /** loader_dri3_alloc_render_buffer
1032 *
1033 * Use the driver createImage function to construct a __DRIimage, then
1034 * get a file descriptor for that and create an X pixmap from that
1035 *
1036 * Allocate an xshmfence for synchronization
1037 */
1038 static struct loader_dri3_buffer *
1039 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1040 int width, int height, int depth)
1041 {
1042 struct loader_dri3_buffer *buffer;
1043 __DRIimage *pixmap_buffer;
1044 xcb_pixmap_t pixmap;
1045 xcb_sync_fence_t sync_fence;
1046 struct xshmfence *shm_fence;
1047 int buffer_fd, fence_fd;
1048 int stride;
1049
1050 /* Create an xshmfence object and
1051 * prepare to send that to the X server
1052 */
1053
1054 fence_fd = xshmfence_alloc_shm();
1055 if (fence_fd < 0)
1056 return NULL;
1057
1058 shm_fence = xshmfence_map_shm(fence_fd);
1059 if (shm_fence == NULL)
1060 goto no_shm_fence;
1061
1062 /* Allocate the image from the driver
1063 */
1064 buffer = calloc(1, sizeof *buffer);
1065 if (!buffer)
1066 goto no_buffer;
1067
1068 buffer->cpp = dri3_cpp_for_format(format);
1069 if (!buffer->cpp)
1070 goto no_image;
1071
1072 if (!draw->is_different_gpu) {
1073 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1074 width, height,
1075 format,
1076 __DRI_IMAGE_USE_SHARE |
1077 __DRI_IMAGE_USE_SCANOUT |
1078 __DRI_IMAGE_USE_BACKBUFFER,
1079 buffer);
1080 pixmap_buffer = buffer->image;
1081
1082 if (!buffer->image)
1083 goto no_image;
1084 } else {
1085 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1086 width, height,
1087 format,
1088 0,
1089 buffer);
1090
1091 if (!buffer->image)
1092 goto no_image;
1093
1094 buffer->linear_buffer =
1095 draw->ext->image->createImage(draw->dri_screen,
1096 width, height, format,
1097 __DRI_IMAGE_USE_SHARE |
1098 __DRI_IMAGE_USE_LINEAR |
1099 __DRI_IMAGE_USE_BACKBUFFER,
1100 buffer);
1101 pixmap_buffer = buffer->linear_buffer;
1102
1103 if (!buffer->linear_buffer)
1104 goto no_linear_buffer;
1105 }
1106
1107 /* X wants the stride, so ask the image for it
1108 */
1109 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
1110 &stride))
1111 goto no_buffer_attrib;
1112
1113 buffer->pitch = stride;
1114
1115 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1116 &buffer_fd))
1117 goto no_buffer_attrib;
1118
1119 xcb_dri3_pixmap_from_buffer(draw->conn,
1120 (pixmap = xcb_generate_id(draw->conn)),
1121 draw->drawable,
1122 buffer->size,
1123 width, height, buffer->pitch,
1124 depth, buffer->cpp * 8,
1125 buffer_fd);
1126
1127 xcb_dri3_fence_from_fd(draw->conn,
1128 pixmap,
1129 (sync_fence = xcb_generate_id(draw->conn)),
1130 false,
1131 fence_fd);
1132
1133 buffer->pixmap = pixmap;
1134 buffer->own_pixmap = true;
1135 buffer->sync_fence = sync_fence;
1136 buffer->shm_fence = shm_fence;
1137 buffer->width = width;
1138 buffer->height = height;
1139
1140 /* Mark the buffer as idle
1141 */
1142 dri3_fence_set(buffer);
1143
1144 return buffer;
1145
1146 no_buffer_attrib:
1147 draw->ext->image->destroyImage(pixmap_buffer);
1148 no_linear_buffer:
1149 if (draw->is_different_gpu)
1150 draw->ext->image->destroyImage(buffer->image);
1151 no_image:
1152 free(buffer);
1153 no_buffer:
1154 xshmfence_unmap_shm(shm_fence);
1155 no_shm_fence:
1156 close(fence_fd);
1157 return NULL;
1158 }
1159
1160 /** loader_dri3_update_drawable
1161 *
1162 * Called the first time we use the drawable and then
1163 * after we receive present configure notify events to
1164 * track the geometry of the drawable
1165 */
1166 static int
1167 dri3_update_drawable(__DRIdrawable *driDrawable,
1168 struct loader_dri3_drawable *draw)
1169 {
1170 mtx_lock(&draw->mtx);
1171 if (draw->first_init) {
1172 xcb_get_geometry_cookie_t geom_cookie;
1173 xcb_get_geometry_reply_t *geom_reply;
1174 xcb_void_cookie_t cookie;
1175 xcb_generic_error_t *error;
1176 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1177 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1178
1179 draw->first_init = false;
1180
1181 /* Try to select for input on the window.
1182 *
1183 * If the drawable is a window, this will get our events
1184 * delivered.
1185 *
1186 * Otherwise, we'll get a BadWindow error back from this request which
1187 * will let us know that the drawable is a pixmap instead.
1188 */
1189
1190 draw->eid = xcb_generate_id(draw->conn);
1191 cookie =
1192 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1193 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1194 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1195 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1196
1197 present_capabilities_cookie =
1198 xcb_present_query_capabilities(draw->conn, draw->drawable);
1199
1200 /* Create an XCB event queue to hold present events outside of the usual
1201 * application event queue
1202 */
1203 draw->special_event = xcb_register_for_special_xge(draw->conn,
1204 &xcb_present_id,
1205 draw->eid,
1206 draw->stamp);
1207 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1208
1209 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1210
1211 if (!geom_reply) {
1212 mtx_unlock(&draw->mtx);
1213 return false;
1214 }
1215
1216 draw->width = geom_reply->width;
1217 draw->height = geom_reply->height;
1218 draw->depth = geom_reply->depth;
1219 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1220
1221 free(geom_reply);
1222
1223 draw->is_pixmap = false;
1224
1225 /* Check to see if our select input call failed. If it failed with a
1226 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1227 * special event queue created above and mark the drawable as a pixmap
1228 */
1229
1230 error = xcb_request_check(draw->conn, cookie);
1231
1232 present_capabilities_reply =
1233 xcb_present_query_capabilities_reply(draw->conn,
1234 present_capabilities_cookie,
1235 NULL);
1236
1237 if (present_capabilities_reply) {
1238 draw->present_capabilities = present_capabilities_reply->capabilities;
1239 free(present_capabilities_reply);
1240 } else
1241 draw->present_capabilities = 0;
1242
1243 if (error) {
1244 if (error->error_code != BadWindow) {
1245 free(error);
1246 mtx_unlock(&draw->mtx);
1247 return false;
1248 }
1249 draw->is_pixmap = true;
1250 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1251 draw->special_event = NULL;
1252 }
1253 }
1254 dri3_flush_present_events(draw);
1255 mtx_unlock(&draw->mtx);
1256 return true;
1257 }
1258
1259 __DRIimage *
1260 loader_dri3_create_image(xcb_connection_t *c,
1261 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1262 unsigned int format,
1263 __DRIscreen *dri_screen,
1264 const __DRIimageExtension *image,
1265 void *loaderPrivate)
1266 {
1267 int *fds;
1268 __DRIimage *image_planar, *ret;
1269 int stride, offset;
1270
1271 /* Get an FD for the pixmap object
1272 */
1273 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1274
1275 stride = bp_reply->stride;
1276 offset = 0;
1277
1278 /* createImageFromFds creates a wrapper __DRIimage structure which
1279 * can deal with multiple planes for things like Yuv images. So, once
1280 * we've gotten the planar wrapper, pull the single plane out of it and
1281 * discard the wrapper.
1282 */
1283 image_planar = image->createImageFromFds(dri_screen,
1284 bp_reply->width,
1285 bp_reply->height,
1286 image_format_to_fourcc(format),
1287 fds, 1,
1288 &stride, &offset, loaderPrivate);
1289 close(fds[0]);
1290 if (!image_planar)
1291 return NULL;
1292
1293 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1294
1295 if (!ret)
1296 ret = image_planar;
1297 else
1298 image->destroyImage(image_planar);
1299
1300 return ret;
1301 }
1302
1303 /** dri3_get_pixmap_buffer
1304 *
1305 * Get the DRM object for a pixmap from the X server and
1306 * wrap that with a __DRIimage structure using createImageFromFds
1307 */
1308 static struct loader_dri3_buffer *
1309 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1310 enum loader_dri3_buffer_type buffer_type,
1311 struct loader_dri3_drawable *draw)
1312 {
1313 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1314 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1315 xcb_drawable_t pixmap;
1316 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1317 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1318 xcb_sync_fence_t sync_fence;
1319 struct xshmfence *shm_fence;
1320 int fence_fd;
1321 __DRIscreen *cur_screen;
1322
1323 if (buffer)
1324 return buffer;
1325
1326 pixmap = draw->drawable;
1327
1328 buffer = calloc(1, sizeof *buffer);
1329 if (!buffer)
1330 goto no_buffer;
1331
1332 fence_fd = xshmfence_alloc_shm();
1333 if (fence_fd < 0)
1334 goto no_fence;
1335 shm_fence = xshmfence_map_shm(fence_fd);
1336 if (shm_fence == NULL) {
1337 close (fence_fd);
1338 goto no_fence;
1339 }
1340
1341 xcb_dri3_fence_from_fd(draw->conn,
1342 pixmap,
1343 (sync_fence = xcb_generate_id(draw->conn)),
1344 false,
1345 fence_fd);
1346
1347 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1348 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1349 if (!bp_reply)
1350 goto no_image;
1351
1352 /* Get the currently-bound screen or revert to using the drawable's screen if
1353 * no contexts are currently bound. The latter case is at least necessary for
1354 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1355 */
1356 cur_screen = draw->vtable->get_dri_screen();
1357 if (!cur_screen) {
1358 cur_screen = draw->dri_screen;
1359 }
1360
1361 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1362 cur_screen, draw->ext->image,
1363 buffer);
1364 if (!buffer->image)
1365 goto no_image;
1366
1367 buffer->pixmap = pixmap;
1368 buffer->own_pixmap = false;
1369 buffer->width = bp_reply->width;
1370 buffer->height = bp_reply->height;
1371 buffer->shm_fence = shm_fence;
1372 buffer->sync_fence = sync_fence;
1373
1374 draw->buffers[buf_id] = buffer;
1375
1376 free(bp_reply);
1377
1378 return buffer;
1379
1380 no_image:
1381 free(bp_reply);
1382 xcb_sync_destroy_fence(draw->conn, sync_fence);
1383 xshmfence_unmap_shm(shm_fence);
1384 no_fence:
1385 free(buffer);
1386 no_buffer:
1387 return NULL;
1388 }
1389
1390 /** dri3_get_buffer
1391 *
1392 * Find a front or back buffer, allocating new ones as necessary
1393 */
1394 static struct loader_dri3_buffer *
1395 dri3_get_buffer(__DRIdrawable *driDrawable,
1396 unsigned int format,
1397 enum loader_dri3_buffer_type buffer_type,
1398 struct loader_dri3_drawable *draw)
1399 {
1400 struct loader_dri3_buffer *buffer;
1401 int buf_id;
1402
1403 if (buffer_type == loader_dri3_buffer_back) {
1404 draw->back_format = format;
1405
1406 buf_id = dri3_find_back(draw);
1407
1408 if (buf_id < 0)
1409 return NULL;
1410 } else {
1411 buf_id = LOADER_DRI3_FRONT_ID;
1412 }
1413
1414 buffer = draw->buffers[buf_id];
1415
1416 /* Allocate a new buffer if there isn't an old one, or if that
1417 * old one is the wrong size
1418 */
1419 if (!buffer || buffer->width != draw->width ||
1420 buffer->height != draw->height) {
1421 struct loader_dri3_buffer *new_buffer;
1422
1423 /* Allocate the new buffers
1424 */
1425 new_buffer = dri3_alloc_render_buffer(draw,
1426 format,
1427 draw->width,
1428 draw->height,
1429 draw->depth);
1430 if (!new_buffer)
1431 return NULL;
1432
1433 /* When resizing, copy the contents of the old buffer, waiting for that
1434 * copy to complete using our fences before proceeding
1435 */
1436 if ((buffer_type == loader_dri3_buffer_back ||
1437 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1438 && buffer) {
1439
1440 /* Fill the new buffer with data from an old buffer */
1441 dri3_fence_await(draw->conn, draw, buffer);
1442 if (!loader_dri3_blit_image(draw,
1443 new_buffer->image,
1444 buffer->image,
1445 0, 0, draw->width, draw->height,
1446 0, 0, 0) &&
1447 !buffer->linear_buffer) {
1448 dri3_fence_reset(draw->conn, new_buffer);
1449 dri3_copy_area(draw->conn,
1450 buffer->pixmap,
1451 new_buffer->pixmap,
1452 dri3_drawable_gc(draw),
1453 0, 0, 0, 0,
1454 draw->width, draw->height);
1455 dri3_fence_trigger(draw->conn, new_buffer);
1456 }
1457 dri3_free_render_buffer(draw, buffer);
1458 } else if (buffer_type == loader_dri3_buffer_front) {
1459 /* Fill the new fake front with data from a real front */
1460 loader_dri3_swapbuffer_barrier(draw);
1461 dri3_fence_reset(draw->conn, new_buffer);
1462 dri3_copy_area(draw->conn,
1463 draw->drawable,
1464 new_buffer->pixmap,
1465 dri3_drawable_gc(draw),
1466 0, 0, 0, 0,
1467 draw->width, draw->height);
1468 dri3_fence_trigger(draw->conn, new_buffer);
1469
1470 if (new_buffer->linear_buffer) {
1471 dri3_fence_await(draw->conn, draw, new_buffer);
1472 (void) loader_dri3_blit_image(draw,
1473 new_buffer->image,
1474 new_buffer->linear_buffer,
1475 0, 0, draw->width, draw->height,
1476 0, 0, 0);
1477 }
1478 }
1479 buffer = new_buffer;
1480 draw->buffers[buf_id] = buffer;
1481 }
1482 dri3_fence_await(draw->conn, draw, buffer);
1483
1484 /*
1485 * Do we need to preserve the content of a previous buffer?
1486 *
1487 * Note that this blit is needed only to avoid a wait for a buffer that
1488 * is currently in the flip chain or being scanned out from. That's really
1489 * a tradeoff. If we're ok with the wait we can reduce the number of back
1490 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1491 * but in the latter case we must disallow page-flipping.
1492 */
1493 if (buffer_type == loader_dri3_buffer_back &&
1494 draw->cur_blit_source != -1 &&
1495 draw->buffers[draw->cur_blit_source] &&
1496 buffer != draw->buffers[draw->cur_blit_source]) {
1497
1498 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1499
1500 /* Avoid flushing here. Will propably do good for tiling hardware. */
1501 (void) loader_dri3_blit_image(draw,
1502 buffer->image,
1503 source->image,
1504 0, 0, draw->width, draw->height,
1505 0, 0, 0);
1506 buffer->last_swap = source->last_swap;
1507 draw->cur_blit_source = -1;
1508 }
1509 /* Return the requested buffer */
1510 return buffer;
1511 }
1512
1513 /** dri3_free_buffers
1514 *
1515 * Free the front bufffer or all of the back buffers. Used
1516 * when the application changes which buffers it needs
1517 */
1518 static void
1519 dri3_free_buffers(__DRIdrawable *driDrawable,
1520 enum loader_dri3_buffer_type buffer_type,
1521 struct loader_dri3_drawable *draw)
1522 {
1523 struct loader_dri3_buffer *buffer;
1524 int first_id;
1525 int n_id;
1526 int buf_id;
1527
1528 switch (buffer_type) {
1529 case loader_dri3_buffer_back:
1530 first_id = LOADER_DRI3_BACK_ID(0);
1531 n_id = LOADER_DRI3_MAX_BACK;
1532 draw->cur_blit_source = -1;
1533 break;
1534 case loader_dri3_buffer_front:
1535 first_id = LOADER_DRI3_FRONT_ID;
1536 /* Don't free a fake front holding new backbuffer content. */
1537 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1538 }
1539
1540 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1541 buffer = draw->buffers[buf_id];
1542 if (buffer) {
1543 dri3_free_render_buffer(draw, buffer);
1544 draw->buffers[buf_id] = NULL;
1545 }
1546 }
1547 }
1548
1549 /** loader_dri3_get_buffers
1550 *
1551 * The published buffer allocation API.
1552 * Returns all of the necessary buffers, allocating
1553 * as needed.
1554 */
1555 int
1556 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1557 unsigned int format,
1558 uint32_t *stamp,
1559 void *loaderPrivate,
1560 uint32_t buffer_mask,
1561 struct __DRIimageList *buffers)
1562 {
1563 struct loader_dri3_drawable *draw = loaderPrivate;
1564 struct loader_dri3_buffer *front, *back;
1565
1566 buffers->image_mask = 0;
1567 buffers->front = NULL;
1568 buffers->back = NULL;
1569
1570 front = NULL;
1571 back = NULL;
1572
1573 if (!dri3_update_drawable(driDrawable, draw))
1574 return false;
1575
1576 /* pixmaps always have front buffers.
1577 * Exchange swaps also mandate fake front buffers.
1578 */
1579 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1580 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1581
1582 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1583 /* All pixmaps are owned by the server gpu.
1584 * When we use a different gpu, we can't use the pixmap
1585 * as buffer since it is potentially tiled a way
1586 * our device can't understand. In this case, use
1587 * a fake front buffer. Hopefully the pixmap
1588 * content will get synced with the fake front
1589 * buffer.
1590 */
1591 if (draw->is_pixmap && !draw->is_different_gpu)
1592 front = dri3_get_pixmap_buffer(driDrawable,
1593 format,
1594 loader_dri3_buffer_front,
1595 draw);
1596 else
1597 front = dri3_get_buffer(driDrawable,
1598 format,
1599 loader_dri3_buffer_front,
1600 draw);
1601
1602 if (!front)
1603 return false;
1604 } else {
1605 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1606 draw->have_fake_front = 0;
1607 }
1608
1609 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1610 back = dri3_get_buffer(driDrawable,
1611 format,
1612 loader_dri3_buffer_back,
1613 draw);
1614 if (!back)
1615 return false;
1616 draw->have_back = 1;
1617 } else {
1618 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1619 draw->have_back = 0;
1620 }
1621
1622 if (front) {
1623 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1624 buffers->front = front->image;
1625 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1626 }
1627
1628 if (back) {
1629 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1630 buffers->back = back->image;
1631 }
1632
1633 draw->stamp = stamp;
1634
1635 return true;
1636 }
1637
1638 /** loader_dri3_update_drawable_geometry
1639 *
1640 * Get the current drawable geometry.
1641 */
1642 void
1643 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1644 {
1645 xcb_get_geometry_cookie_t geom_cookie;
1646 xcb_get_geometry_reply_t *geom_reply;
1647
1648 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1649
1650 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1651
1652 if (geom_reply) {
1653 draw->width = geom_reply->width;
1654 draw->height = geom_reply->height;
1655 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1656 draw->ext->flush->invalidate(draw->dri_drawable);
1657
1658 free(geom_reply);
1659 }
1660 }
1661
1662
1663 /**
1664 * Make sure the server has flushed all pending swap buffers to hardware
1665 * for this drawable. Ideally we'd want to send an X protocol request to
1666 * have the server block our connection until the swaps are complete. That
1667 * would avoid the potential round-trip here.
1668 */
1669 void
1670 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1671 {
1672 int64_t ust, msc, sbc;
1673
1674 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1675 }
1676
1677 /**
1678 * Perform any cleanup associated with a close screen operation.
1679 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1680 *
1681 * This function destroys the screen's cached swap context if any.
1682 */
1683 void
1684 loader_dri3_close_screen(__DRIscreen *dri_screen)
1685 {
1686 mtx_lock(&blit_context.mtx);
1687 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1688 blit_context.core->destroyContext(blit_context.ctx);
1689 blit_context.ctx = NULL;
1690 }
1691 mtx_unlock(&blit_context.mtx);
1692 }
1693
1694 /**
1695 * Find a backbuffer slot - potentially allocating a back buffer
1696 *
1697 * \param draw[in,out] Pointer to the drawable for which to find back.
1698 * \return Pointer to a new back buffer or NULL if allocation failed or was
1699 * not mandated.
1700 *
1701 * Find a potentially new back buffer, and if it's not been allocated yet and
1702 * in addition needs initializing, then try to allocate and initialize it.
1703 */
1704 #include <stdio.h>
1705 static struct loader_dri3_buffer *
1706 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1707 {
1708 struct loader_dri3_buffer *back;
1709 int id;
1710
1711 id = dri3_find_back(draw);
1712 if (id < 0)
1713 return NULL;
1714
1715 back = draw->buffers[id];
1716 /* Allocate a new back if we haven't got one */
1717 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
1718 dri3_update_drawable(draw->dri_drawable, draw))
1719 back = dri3_alloc_render_buffer(draw, draw->back_format,
1720 draw->width, draw->height, draw->depth);
1721
1722 if (!back)
1723 return NULL;
1724
1725 draw->buffers[id] = back;
1726
1727 /* If necessary, prefill the back with data according to swap_method mode. */
1728 if (draw->cur_blit_source != -1 &&
1729 draw->buffers[draw->cur_blit_source] &&
1730 back != draw->buffers[draw->cur_blit_source]) {
1731 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1732
1733 dri3_fence_await(draw->conn, draw, source);
1734 dri3_fence_await(draw->conn, draw, back);
1735 (void) loader_dri3_blit_image(draw,
1736 back->image,
1737 source->image,
1738 0, 0, draw->width, draw->height,
1739 0, 0, 0);
1740 back->last_swap = source->last_swap;
1741 draw->cur_blit_source = -1;
1742 }
1743
1744 return back;
1745 }