loader: Add support for platform and host1x busses
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include "loader_dri3_helper.h"
36 #include "util/macros.h"
37
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
43
44 /**
45 * A cached blit context.
46 */
47 struct loader_dri3_blit_context {
48 mtx_t mtx;
49 __DRIcontext *ctx;
50 __DRIscreen *cur_screen;
51 const __DRIcoreExtension *core;
52 };
53
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context = {
56 _MTX_INITIALIZER_NP, NULL
57 };
58
59 static void
60 dri3_flush_present_events(struct loader_dri3_drawable *draw);
61
62 static struct loader_dri3_buffer *
63 dri3_find_back_alloc(struct loader_dri3_drawable *draw);
64
65 /**
66 * Do we have blit functionality in the image blit extension?
67 *
68 * \param draw[in] The drawable intended to blit from / to.
69 * \return true if we have blit functionality. false otherwise.
70 */
71 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
72 {
73 return draw->ext->image->base.version >= 9 &&
74 draw->ext->image->blitImage != NULL;
75 }
76
77 /**
78 * Get and lock (for use with the current thread) a dri context associated
79 * with the drawable's dri screen. The context is intended to be used with
80 * the dri image extension's blitImage method.
81 *
82 * \param draw[in] Pointer to the drawable whose dri screen we want a
83 * dri context for.
84 * \return A dri context or NULL if context creation failed.
85 *
86 * When the caller is done with the context (even if the context returned was
87 * NULL), the caller must call loader_dri3_blit_context_put.
88 */
89 static __DRIcontext *
90 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
91 {
92 mtx_lock(&blit_context.mtx);
93
94 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
95 blit_context.core->destroyContext(blit_context.ctx);
96 blit_context.ctx = NULL;
97 }
98
99 if (!blit_context.ctx) {
100 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
101 NULL, NULL, NULL);
102 blit_context.cur_screen = draw->dri_screen;
103 blit_context.core = draw->ext->core;
104 }
105
106 return blit_context.ctx;
107 }
108
109 /**
110 * Release (for use with other threads) a dri context previously obtained using
111 * loader_dri3_blit_context_get.
112 */
113 static void
114 loader_dri3_blit_context_put(void)
115 {
116 mtx_unlock(&blit_context.mtx);
117 }
118
119 /**
120 * Blit (parts of) the contents of a DRI image to another dri image
121 *
122 * \param draw[in] The drawable which owns the images.
123 * \param dst[in] The destination image.
124 * \param src[in] The source image.
125 * \param dstx0[in] Start destination coordinate.
126 * \param dsty0[in] Start destination coordinate.
127 * \param width[in] Blit width.
128 * \param height[in] Blit height.
129 * \param srcx0[in] Start source coordinate.
130 * \param srcy0[in] Start source coordinate.
131 * \param flush_flag[in] Image blit flush flag.
132 * \return true iff successful.
133 */
134 static bool
135 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
136 __DRIimage *dst, __DRIimage *src,
137 int dstx0, int dsty0, int width, int height,
138 int srcx0, int srcy0, int flush_flag)
139 {
140 __DRIcontext *dri_context;
141 bool use_blit_context = false;
142
143 if (!loader_dri3_have_image_blit(draw))
144 return false;
145
146 dri_context = draw->vtable->get_dri_context(draw);
147
148 if (!dri_context || !draw->vtable->in_current_context(draw)) {
149 dri_context = loader_dri3_blit_context_get(draw);
150 use_blit_context = true;
151 flush_flag |= __BLIT_FLAG_FLUSH;
152 }
153
154 if (dri_context)
155 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
156 width, height, srcx0, srcy0,
157 width, height, flush_flag);
158
159 if (use_blit_context)
160 loader_dri3_blit_context_put();
161
162 return dri_context != NULL;
163 }
164
165 static inline void
166 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
167 {
168 xshmfence_reset(buffer->shm_fence);
169 }
170
171 static inline void
172 dri3_fence_set(struct loader_dri3_buffer *buffer)
173 {
174 xshmfence_trigger(buffer->shm_fence);
175 }
176
177 static inline void
178 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
179 {
180 xcb_sync_trigger_fence(c, buffer->sync_fence);
181 }
182
183 static inline void
184 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
185 struct loader_dri3_buffer *buffer)
186 {
187 xcb_flush(c);
188 xshmfence_await(buffer->shm_fence);
189 if (draw) {
190 mtx_lock(&draw->mtx);
191 dri3_flush_present_events(draw);
192 mtx_unlock(&draw->mtx);
193 }
194 }
195
196 static void
197 dri3_update_num_back(struct loader_dri3_drawable *draw)
198 {
199 if (draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP)
200 draw->num_back = 3;
201 else
202 draw->num_back = 2;
203 }
204
205 void
206 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
207 {
208 draw->swap_interval = interval;
209 }
210
211 /** dri3_free_render_buffer
212 *
213 * Free everything associated with one render buffer including pixmap, fence
214 * stuff and the driver image
215 */
216 static void
217 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
218 struct loader_dri3_buffer *buffer)
219 {
220 if (buffer->own_pixmap)
221 xcb_free_pixmap(draw->conn, buffer->pixmap);
222 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
223 xshmfence_unmap_shm(buffer->shm_fence);
224 draw->ext->image->destroyImage(buffer->image);
225 if (buffer->linear_buffer)
226 draw->ext->image->destroyImage(buffer->linear_buffer);
227 free(buffer);
228 }
229
230 void
231 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
232 {
233 int i;
234
235 draw->ext->core->destroyDrawable(draw->dri_drawable);
236
237 for (i = 0; i < ARRAY_SIZE(draw->buffers); i++) {
238 if (draw->buffers[i])
239 dri3_free_render_buffer(draw, draw->buffers[i]);
240 }
241
242 if (draw->special_event) {
243 xcb_void_cookie_t cookie =
244 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
245 XCB_PRESENT_EVENT_MASK_NO_EVENT);
246
247 xcb_discard_reply(draw->conn, cookie.sequence);
248 xcb_unregister_for_special_event(draw->conn, draw->special_event);
249 }
250
251 cnd_destroy(&draw->event_cnd);
252 mtx_destroy(&draw->mtx);
253 }
254
255 int
256 loader_dri3_drawable_init(xcb_connection_t *conn,
257 xcb_drawable_t drawable,
258 __DRIscreen *dri_screen,
259 bool is_different_gpu,
260 const __DRIconfig *dri_config,
261 struct loader_dri3_extensions *ext,
262 const struct loader_dri3_vtable *vtable,
263 struct loader_dri3_drawable *draw)
264 {
265 xcb_get_geometry_cookie_t cookie;
266 xcb_get_geometry_reply_t *reply;
267 xcb_generic_error_t *error;
268 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
269 int swap_interval;
270
271 draw->conn = conn;
272 draw->ext = ext;
273 draw->vtable = vtable;
274 draw->drawable = drawable;
275 draw->dri_screen = dri_screen;
276 draw->is_different_gpu = is_different_gpu;
277
278 draw->have_back = 0;
279 draw->have_fake_front = 0;
280 draw->first_init = true;
281
282 draw->cur_blit_source = -1;
283 draw->back_format = __DRI_IMAGE_FORMAT_NONE;
284 mtx_init(&draw->mtx, mtx_plain);
285 cnd_init(&draw->event_cnd);
286
287 if (draw->ext->config)
288 draw->ext->config->configQueryi(draw->dri_screen,
289 "vblank_mode", &vblank_mode);
290
291 switch (vblank_mode) {
292 case DRI_CONF_VBLANK_NEVER:
293 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
294 swap_interval = 0;
295 break;
296 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
297 case DRI_CONF_VBLANK_ALWAYS_SYNC:
298 default:
299 swap_interval = 1;
300 break;
301 }
302 draw->swap_interval = swap_interval;
303
304 dri3_update_num_back(draw);
305
306 /* Create a new drawable */
307 draw->dri_drawable =
308 draw->ext->image_driver->createNewDrawable(dri_screen,
309 dri_config,
310 draw);
311
312 if (!draw->dri_drawable)
313 return 1;
314
315 cookie = xcb_get_geometry(draw->conn, draw->drawable);
316 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
317 if (reply == NULL || error != NULL) {
318 draw->ext->core->destroyDrawable(draw->dri_drawable);
319 return 1;
320 }
321
322 draw->width = reply->width;
323 draw->height = reply->height;
324 draw->depth = reply->depth;
325 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
326 free(reply);
327
328 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
329 if (draw->ext->core->base.version >= 2) {
330 (void )draw->ext->core->getConfigAttrib(dri_config,
331 __DRI_ATTRIB_SWAP_METHOD,
332 &draw->swap_method);
333 }
334
335 /*
336 * Make sure server has the same swap interval we do for the new
337 * drawable.
338 */
339 loader_dri3_set_swap_interval(draw, swap_interval);
340
341 return 0;
342 }
343
344 /*
345 * Process one Present event
346 */
347 static void
348 dri3_handle_present_event(struct loader_dri3_drawable *draw,
349 xcb_present_generic_event_t *ge)
350 {
351 switch (ge->evtype) {
352 case XCB_PRESENT_CONFIGURE_NOTIFY: {
353 xcb_present_configure_notify_event_t *ce = (void *) ge;
354
355 draw->width = ce->width;
356 draw->height = ce->height;
357 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
358 draw->ext->flush->invalidate(draw->dri_drawable);
359 break;
360 }
361 case XCB_PRESENT_COMPLETE_NOTIFY: {
362 xcb_present_complete_notify_event_t *ce = (void *) ge;
363
364 /* Compute the processed SBC number from the received 32-bit serial number
365 * merged with the upper 32-bits of the sent 64-bit serial number while
366 * checking for wrap.
367 */
368 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
369 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
370 if (draw->recv_sbc > draw->send_sbc)
371 draw->recv_sbc -= 0x100000000;
372
373 draw->last_present_mode = ce->mode;
374
375 if (draw->vtable->show_fps)
376 draw->vtable->show_fps(draw, ce->ust);
377
378 draw->ust = ce->ust;
379 draw->msc = ce->msc;
380 } else if (ce->serial == draw->eid) {
381 draw->notify_ust = ce->ust;
382 draw->notify_msc = ce->msc;
383 }
384 break;
385 }
386 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
387 xcb_present_idle_notify_event_t *ie = (void *) ge;
388 int b;
389
390 for (b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
391 struct loader_dri3_buffer *buf = draw->buffers[b];
392
393 if (buf && buf->pixmap == ie->pixmap)
394 buf->busy = 0;
395
396 if (buf && draw->num_back <= b && b < LOADER_DRI3_MAX_BACK &&
397 draw->cur_blit_source != b &&
398 !buf->busy) {
399 dri3_free_render_buffer(draw, buf);
400 draw->buffers[b] = NULL;
401 }
402 }
403 break;
404 }
405 }
406 free(ge);
407 }
408
409 static bool
410 dri3_wait_for_event_locked(struct loader_dri3_drawable *draw)
411 {
412 xcb_generic_event_t *ev;
413 xcb_present_generic_event_t *ge;
414
415 xcb_flush(draw->conn);
416
417 /* Only have one thread waiting for events at a time */
418 if (draw->has_event_waiter) {
419 cnd_wait(&draw->event_cnd, &draw->mtx);
420 /* Another thread has updated the protected info, so retest. */
421 return true;
422 } else {
423 draw->has_event_waiter = true;
424 /* Allow other threads access to the drawable while we're waiting. */
425 mtx_unlock(&draw->mtx);
426 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
427 mtx_lock(&draw->mtx);
428 draw->has_event_waiter = false;
429 cnd_broadcast(&draw->event_cnd);
430 }
431 if (!ev)
432 return false;
433 ge = (void *) ev;
434 dri3_handle_present_event(draw, ge);
435 return true;
436 }
437
438 /** loader_dri3_wait_for_msc
439 *
440 * Get the X server to send an event when the target msc/divisor/remainder is
441 * reached.
442 */
443 bool
444 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
445 int64_t target_msc,
446 int64_t divisor, int64_t remainder,
447 int64_t *ust, int64_t *msc, int64_t *sbc)
448 {
449 xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
450 draw->drawable,
451 draw->eid,
452 target_msc,
453 divisor,
454 remainder);
455 xcb_generic_event_t *ev;
456 unsigned full_sequence;
457
458 mtx_lock(&draw->mtx);
459 xcb_flush(draw->conn);
460
461 /* Wait for the event */
462 do {
463 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
464 if (!ev) {
465 mtx_unlock(&draw->mtx);
466 return false;
467 }
468
469 full_sequence = ev->full_sequence;
470 dri3_handle_present_event(draw, (void *) ev);
471 } while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
472
473 *ust = draw->notify_ust;
474 *msc = draw->notify_msc;
475 *sbc = draw->recv_sbc;
476 mtx_unlock(&draw->mtx);
477
478 return true;
479 }
480
481 /** loader_dri3_wait_for_sbc
482 *
483 * Wait for the completed swap buffer count to reach the specified
484 * target. Presumably the application knows that this will be reached with
485 * outstanding complete events, or we're going to be here awhile.
486 */
487 int
488 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
489 int64_t target_sbc, int64_t *ust,
490 int64_t *msc, int64_t *sbc)
491 {
492 /* From the GLX_OML_sync_control spec:
493 *
494 * "If <target_sbc> = 0, the function will block until all previous
495 * swaps requested with glXSwapBuffersMscOML for that window have
496 * completed."
497 */
498 mtx_lock(&draw->mtx);
499 if (!target_sbc)
500 target_sbc = draw->send_sbc;
501
502 while (draw->recv_sbc < target_sbc) {
503 if (!dri3_wait_for_event_locked(draw)) {
504 mtx_unlock(&draw->mtx);
505 return 0;
506 }
507 }
508
509 *ust = draw->ust;
510 *msc = draw->msc;
511 *sbc = draw->recv_sbc;
512 mtx_unlock(&draw->mtx);
513 return 1;
514 }
515
516 /** loader_dri3_find_back
517 *
518 * Find an idle back buffer. If there isn't one, then
519 * wait for a present idle notify event from the X server
520 */
521 static int
522 dri3_find_back(struct loader_dri3_drawable *draw)
523 {
524 int b;
525 int num_to_consider;
526
527 mtx_lock(&draw->mtx);
528 /* Increase the likelyhood of reusing current buffer */
529 dri3_flush_present_events(draw);
530
531 /* Check whether we need to reuse the current back buffer as new back.
532 * In that case, wait until it's not busy anymore.
533 */
534 dri3_update_num_back(draw);
535 num_to_consider = draw->num_back;
536 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
537 num_to_consider = 1;
538 draw->cur_blit_source = -1;
539 }
540
541 for (;;) {
542 for (b = 0; b < num_to_consider; b++) {
543 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
544 struct loader_dri3_buffer *buffer = draw->buffers[id];
545
546 if (!buffer || !buffer->busy) {
547 draw->cur_back = id;
548 mtx_unlock(&draw->mtx);
549 return id;
550 }
551 }
552 if (!dri3_wait_for_event_locked(draw)) {
553 mtx_unlock(&draw->mtx);
554 return -1;
555 }
556 }
557 }
558
559 static xcb_gcontext_t
560 dri3_drawable_gc(struct loader_dri3_drawable *draw)
561 {
562 if (!draw->gc) {
563 uint32_t v = 0;
564 xcb_create_gc(draw->conn,
565 (draw->gc = xcb_generate_id(draw->conn)),
566 draw->drawable,
567 XCB_GC_GRAPHICS_EXPOSURES,
568 &v);
569 }
570 return draw->gc;
571 }
572
573
574 static struct loader_dri3_buffer *
575 dri3_back_buffer(struct loader_dri3_drawable *draw)
576 {
577 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
578 }
579
580 static struct loader_dri3_buffer *
581 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
582 {
583 return draw->buffers[LOADER_DRI3_FRONT_ID];
584 }
585
586 static void
587 dri3_copy_area(xcb_connection_t *c,
588 xcb_drawable_t src_drawable,
589 xcb_drawable_t dst_drawable,
590 xcb_gcontext_t gc,
591 int16_t src_x,
592 int16_t src_y,
593 int16_t dst_x,
594 int16_t dst_y,
595 uint16_t width,
596 uint16_t height)
597 {
598 xcb_void_cookie_t cookie;
599
600 cookie = xcb_copy_area_checked(c,
601 src_drawable,
602 dst_drawable,
603 gc,
604 src_x,
605 src_y,
606 dst_x,
607 dst_y,
608 width,
609 height);
610 xcb_discard_reply(c, cookie.sequence);
611 }
612
613 /**
614 * Asks the driver to flush any queued work necessary for serializing with the
615 * X command stream, and optionally the slightly more strict requirement of
616 * glFlush() equivalence (which would require flushing even if nothing had
617 * been drawn to a window system framebuffer, for example).
618 */
619 void
620 loader_dri3_flush(struct loader_dri3_drawable *draw,
621 unsigned flags,
622 enum __DRI2throttleReason throttle_reason)
623 {
624 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
625 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
626
627 if (dri_context) {
628 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
629 flags, throttle_reason);
630 }
631 }
632
633 void
634 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
635 int x, int y,
636 int width, int height,
637 bool flush)
638 {
639 struct loader_dri3_buffer *back;
640 unsigned flags = __DRI2_FLUSH_DRAWABLE;
641
642 /* Check we have the right attachments */
643 if (!draw->have_back || draw->is_pixmap)
644 return;
645
646 if (flush)
647 flags |= __DRI2_FLUSH_CONTEXT;
648 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
649
650 back = dri3_find_back_alloc(draw);
651 if (!back)
652 return;
653
654 y = draw->height - y - height;
655
656 if (draw->is_different_gpu) {
657 /* Update the linear buffer part of the back buffer
658 * for the dri3_copy_area operation
659 */
660 (void) loader_dri3_blit_image(draw,
661 back->linear_buffer,
662 back->image,
663 0, 0, back->width, back->height,
664 0, 0, __BLIT_FLAG_FLUSH);
665 }
666
667 loader_dri3_swapbuffer_barrier(draw);
668 dri3_fence_reset(draw->conn, back);
669 dri3_copy_area(draw->conn,
670 back->pixmap,
671 draw->drawable,
672 dri3_drawable_gc(draw),
673 x, y, x, y, width, height);
674 dri3_fence_trigger(draw->conn, back);
675 /* Refresh the fake front (if present) after we just damaged the real
676 * front.
677 */
678 if (draw->have_fake_front &&
679 !loader_dri3_blit_image(draw,
680 dri3_fake_front_buffer(draw)->image,
681 back->image,
682 x, y, width, height,
683 x, y, __BLIT_FLAG_FLUSH) &&
684 !draw->is_different_gpu) {
685 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
686 dri3_copy_area(draw->conn,
687 back->pixmap,
688 dri3_fake_front_buffer(draw)->pixmap,
689 dri3_drawable_gc(draw),
690 x, y, x, y, width, height);
691 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
692 dri3_fence_await(draw->conn, NULL, dri3_fake_front_buffer(draw));
693 }
694 dri3_fence_await(draw->conn, draw, back);
695 }
696
697 void
698 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
699 xcb_drawable_t dest,
700 xcb_drawable_t src)
701 {
702 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
703
704 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
705 dri3_copy_area(draw->conn,
706 src, dest,
707 dri3_drawable_gc(draw),
708 0, 0, 0, 0, draw->width, draw->height);
709 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
710 dri3_fence_await(draw->conn, draw, dri3_fake_front_buffer(draw));
711 }
712
713 void
714 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
715 {
716 struct loader_dri3_buffer *front;
717
718 if (draw == NULL || !draw->have_fake_front)
719 return;
720
721 front = dri3_fake_front_buffer(draw);
722
723 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
724
725 /* In the psc->is_different_gpu case, the linear buffer has been updated,
726 * but not yet the tiled buffer.
727 * Copy back to the tiled buffer we use for rendering.
728 * Note that we don't need flushing.
729 */
730 if (draw->is_different_gpu)
731 (void) loader_dri3_blit_image(draw,
732 front->image,
733 front->linear_buffer,
734 0, 0, front->width, front->height,
735 0, 0, 0);
736 }
737
738 void
739 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
740 {
741 struct loader_dri3_buffer *front;
742
743 if (draw == NULL || !draw->have_fake_front)
744 return;
745
746 front = dri3_fake_front_buffer(draw);
747
748 /* In the psc->is_different_gpu case, we update the linear_buffer
749 * before updating the real front.
750 */
751 if (draw->is_different_gpu)
752 (void) loader_dri3_blit_image(draw,
753 front->linear_buffer,
754 front->image,
755 0, 0, front->width, front->height,
756 0, 0, __BLIT_FLAG_FLUSH);
757 loader_dri3_swapbuffer_barrier(draw);
758 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
759 }
760
761 /** dri3_flush_present_events
762 *
763 * Process any present events that have been received from the X server
764 */
765 static void
766 dri3_flush_present_events(struct loader_dri3_drawable *draw)
767 {
768 /* Check to see if any configuration changes have occurred
769 * since we were last invoked
770 */
771 if (draw->has_event_waiter)
772 return;
773
774 if (draw->special_event) {
775 xcb_generic_event_t *ev;
776
777 while ((ev = xcb_poll_for_special_event(draw->conn,
778 draw->special_event)) != NULL) {
779 xcb_present_generic_event_t *ge = (void *) ev;
780 dri3_handle_present_event(draw, ge);
781 }
782 }
783 }
784
785 /** loader_dri3_swap_buffers_msc
786 *
787 * Make the current back buffer visible using the present extension
788 */
789 int64_t
790 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
791 int64_t target_msc, int64_t divisor,
792 int64_t remainder, unsigned flush_flags,
793 bool force_copy)
794 {
795 struct loader_dri3_buffer *back;
796 int64_t ret = 0;
797 uint32_t options = XCB_PRESENT_OPTION_NONE;
798
799 draw->vtable->flush_drawable(draw, flush_flags);
800
801 back = dri3_find_back_alloc(draw);
802
803 mtx_lock(&draw->mtx);
804 if (draw->is_different_gpu && back) {
805 /* Update the linear buffer before presenting the pixmap */
806 (void) loader_dri3_blit_image(draw,
807 back->linear_buffer,
808 back->image,
809 0, 0, back->width, back->height,
810 0, 0, __BLIT_FLAG_FLUSH);
811 }
812
813 /* If we need to preload the new back buffer, remember the source.
814 * The force_copy parameter is used by EGL to attempt to preserve
815 * the back buffer across a call to this function.
816 */
817 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
818 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
819
820 /* Exchange the back and fake front. Even though the server knows about these
821 * buffers, it has no notion of back and fake front.
822 */
823 if (back && draw->have_fake_front) {
824 struct loader_dri3_buffer *tmp;
825
826 tmp = dri3_fake_front_buffer(draw);
827 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
828 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
829
830 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
831 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
832 }
833
834 dri3_flush_present_events(draw);
835
836 if (back && !draw->is_pixmap) {
837 dri3_fence_reset(draw->conn, back);
838
839 /* Compute when we want the frame shown by taking the last known
840 * successful MSC and adding in a swap interval for each outstanding swap
841 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
842 * semantic"
843 */
844 ++draw->send_sbc;
845 if (target_msc == 0 && divisor == 0 && remainder == 0)
846 target_msc = draw->msc + draw->swap_interval *
847 (draw->send_sbc - draw->recv_sbc);
848 else if (divisor == 0 && remainder > 0) {
849 /* From the GLX_OML_sync_control spec:
850 * "If <divisor> = 0, the swap will occur when MSC becomes
851 * greater than or equal to <target_msc>."
852 *
853 * Note that there's no mention of the remainder. The Present
854 * extension throws BadValue for remainder != 0 with divisor == 0, so
855 * just drop the passed in value.
856 */
857 remainder = 0;
858 }
859
860 /* From the GLX_EXT_swap_control spec
861 * and the EGL 1.4 spec (page 53):
862 *
863 * "If <interval> is set to a value of 0, buffer swaps are not
864 * synchronized to a video frame."
865 *
866 * Implementation note: It is possible to enable triple buffering
867 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
868 * the default.
869 */
870 if (draw->swap_interval == 0)
871 options |= XCB_PRESENT_OPTION_ASYNC;
872
873 /* If we need to populate the new back, but need to reuse the back
874 * buffer slot due to lack of local blit capabilities, make sure
875 * the server doesn't flip and we deadlock.
876 */
877 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
878 options |= XCB_PRESENT_OPTION_COPY;
879
880 back->busy = 1;
881 back->last_swap = draw->send_sbc;
882 xcb_present_pixmap(draw->conn,
883 draw->drawable,
884 back->pixmap,
885 (uint32_t) draw->send_sbc,
886 0, /* valid */
887 0, /* update */
888 0, /* x_off */
889 0, /* y_off */
890 None, /* target_crtc */
891 None,
892 back->sync_fence,
893 options,
894 target_msc,
895 divisor,
896 remainder, 0, NULL);
897 ret = (int64_t) draw->send_sbc;
898
899 /* Schedule a server-side back-preserving blit if necessary.
900 * This happens iff all conditions below are satisfied:
901 * a) We have a fake front,
902 * b) We need to preserve the back buffer,
903 * c) We don't have local blit capabilities.
904 */
905 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
906 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
907 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
908 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
909
910 dri3_fence_reset(draw->conn, new_back);
911 dri3_copy_area(draw->conn, src->pixmap,
912 new_back->pixmap,
913 dri3_drawable_gc(draw),
914 0, 0, 0, 0, draw->width, draw->height);
915 dri3_fence_trigger(draw->conn, new_back);
916 new_back->last_swap = src->last_swap;
917 }
918
919 xcb_flush(draw->conn);
920 if (draw->stamp)
921 ++(*draw->stamp);
922 }
923 mtx_unlock(&draw->mtx);
924
925 draw->ext->flush->invalidate(draw->dri_drawable);
926
927 return ret;
928 }
929
930 int
931 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
932 {
933 struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
934 int ret;
935
936 mtx_lock(&draw->mtx);
937 ret = (!back || back->last_swap == 0) ? 0 :
938 draw->send_sbc - back->last_swap + 1;
939 mtx_unlock(&draw->mtx);
940
941 return ret;
942 }
943
944 /** loader_dri3_open
945 *
946 * Wrapper around xcb_dri3_open
947 */
948 int
949 loader_dri3_open(xcb_connection_t *conn,
950 xcb_window_t root,
951 uint32_t provider)
952 {
953 xcb_dri3_open_cookie_t cookie;
954 xcb_dri3_open_reply_t *reply;
955 int fd;
956
957 cookie = xcb_dri3_open(conn,
958 root,
959 provider);
960
961 reply = xcb_dri3_open_reply(conn, cookie, NULL);
962 if (!reply)
963 return -1;
964
965 if (reply->nfd != 1) {
966 free(reply);
967 return -1;
968 }
969
970 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
971 free(reply);
972 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
973
974 return fd;
975 }
976
977 static uint32_t
978 dri3_cpp_for_format(uint32_t format) {
979 switch (format) {
980 case __DRI_IMAGE_FORMAT_R8:
981 return 1;
982 case __DRI_IMAGE_FORMAT_RGB565:
983 case __DRI_IMAGE_FORMAT_GR88:
984 return 2;
985 case __DRI_IMAGE_FORMAT_XRGB8888:
986 case __DRI_IMAGE_FORMAT_ARGB8888:
987 case __DRI_IMAGE_FORMAT_ABGR8888:
988 case __DRI_IMAGE_FORMAT_XBGR8888:
989 case __DRI_IMAGE_FORMAT_XRGB2101010:
990 case __DRI_IMAGE_FORMAT_ARGB2101010:
991 case __DRI_IMAGE_FORMAT_XBGR2101010:
992 case __DRI_IMAGE_FORMAT_ABGR2101010:
993 case __DRI_IMAGE_FORMAT_SARGB8:
994 return 4;
995 case __DRI_IMAGE_FORMAT_NONE:
996 default:
997 return 0;
998 }
999 }
1000
1001 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1002 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1003 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1004 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1005 */
1006 static int
1007 image_format_to_fourcc(int format)
1008 {
1009
1010 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1011 switch (format) {
1012 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1013 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1014 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1015 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1016 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1017 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1018 case __DRI_IMAGE_FORMAT_XRGB2101010: return __DRI_IMAGE_FOURCC_XRGB2101010;
1019 case __DRI_IMAGE_FORMAT_ARGB2101010: return __DRI_IMAGE_FOURCC_ARGB2101010;
1020 case __DRI_IMAGE_FORMAT_XBGR2101010: return __DRI_IMAGE_FOURCC_XBGR2101010;
1021 case __DRI_IMAGE_FORMAT_ABGR2101010: return __DRI_IMAGE_FOURCC_ABGR2101010;
1022 }
1023 return 0;
1024 }
1025
1026 /** loader_dri3_alloc_render_buffer
1027 *
1028 * Use the driver createImage function to construct a __DRIimage, then
1029 * get a file descriptor for that and create an X pixmap from that
1030 *
1031 * Allocate an xshmfence for synchronization
1032 */
1033 static struct loader_dri3_buffer *
1034 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
1035 int width, int height, int depth)
1036 {
1037 struct loader_dri3_buffer *buffer;
1038 __DRIimage *pixmap_buffer;
1039 xcb_pixmap_t pixmap;
1040 xcb_sync_fence_t sync_fence;
1041 struct xshmfence *shm_fence;
1042 int buffer_fd, fence_fd;
1043 int stride;
1044
1045 /* Create an xshmfence object and
1046 * prepare to send that to the X server
1047 */
1048
1049 fence_fd = xshmfence_alloc_shm();
1050 if (fence_fd < 0)
1051 return NULL;
1052
1053 shm_fence = xshmfence_map_shm(fence_fd);
1054 if (shm_fence == NULL)
1055 goto no_shm_fence;
1056
1057 /* Allocate the image from the driver
1058 */
1059 buffer = calloc(1, sizeof *buffer);
1060 if (!buffer)
1061 goto no_buffer;
1062
1063 buffer->cpp = dri3_cpp_for_format(format);
1064 if (!buffer->cpp)
1065 goto no_image;
1066
1067 if (!draw->is_different_gpu) {
1068 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1069 width, height,
1070 format,
1071 __DRI_IMAGE_USE_SHARE |
1072 __DRI_IMAGE_USE_SCANOUT |
1073 __DRI_IMAGE_USE_BACKBUFFER,
1074 buffer);
1075 pixmap_buffer = buffer->image;
1076
1077 if (!buffer->image)
1078 goto no_image;
1079 } else {
1080 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1081 width, height,
1082 format,
1083 0,
1084 buffer);
1085
1086 if (!buffer->image)
1087 goto no_image;
1088
1089 buffer->linear_buffer =
1090 draw->ext->image->createImage(draw->dri_screen,
1091 width, height, format,
1092 __DRI_IMAGE_USE_SHARE |
1093 __DRI_IMAGE_USE_LINEAR |
1094 __DRI_IMAGE_USE_BACKBUFFER,
1095 buffer);
1096 pixmap_buffer = buffer->linear_buffer;
1097
1098 if (!buffer->linear_buffer)
1099 goto no_linear_buffer;
1100 }
1101
1102 /* X wants the stride, so ask the image for it
1103 */
1104 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
1105 &stride))
1106 goto no_buffer_attrib;
1107
1108 buffer->pitch = stride;
1109
1110 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1111 &buffer_fd))
1112 goto no_buffer_attrib;
1113
1114 xcb_dri3_pixmap_from_buffer(draw->conn,
1115 (pixmap = xcb_generate_id(draw->conn)),
1116 draw->drawable,
1117 buffer->size,
1118 width, height, buffer->pitch,
1119 depth, buffer->cpp * 8,
1120 buffer_fd);
1121
1122 xcb_dri3_fence_from_fd(draw->conn,
1123 pixmap,
1124 (sync_fence = xcb_generate_id(draw->conn)),
1125 false,
1126 fence_fd);
1127
1128 buffer->pixmap = pixmap;
1129 buffer->own_pixmap = true;
1130 buffer->sync_fence = sync_fence;
1131 buffer->shm_fence = shm_fence;
1132 buffer->width = width;
1133 buffer->height = height;
1134
1135 /* Mark the buffer as idle
1136 */
1137 dri3_fence_set(buffer);
1138
1139 return buffer;
1140
1141 no_buffer_attrib:
1142 draw->ext->image->destroyImage(pixmap_buffer);
1143 no_linear_buffer:
1144 if (draw->is_different_gpu)
1145 draw->ext->image->destroyImage(buffer->image);
1146 no_image:
1147 free(buffer);
1148 no_buffer:
1149 xshmfence_unmap_shm(shm_fence);
1150 no_shm_fence:
1151 close(fence_fd);
1152 return NULL;
1153 }
1154
1155 /** loader_dri3_update_drawable
1156 *
1157 * Called the first time we use the drawable and then
1158 * after we receive present configure notify events to
1159 * track the geometry of the drawable
1160 */
1161 static int
1162 dri3_update_drawable(__DRIdrawable *driDrawable,
1163 struct loader_dri3_drawable *draw)
1164 {
1165 mtx_lock(&draw->mtx);
1166 if (draw->first_init) {
1167 xcb_get_geometry_cookie_t geom_cookie;
1168 xcb_get_geometry_reply_t *geom_reply;
1169 xcb_void_cookie_t cookie;
1170 xcb_generic_error_t *error;
1171 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1172 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1173
1174 draw->first_init = false;
1175
1176 /* Try to select for input on the window.
1177 *
1178 * If the drawable is a window, this will get our events
1179 * delivered.
1180 *
1181 * Otherwise, we'll get a BadWindow error back from this request which
1182 * will let us know that the drawable is a pixmap instead.
1183 */
1184
1185 draw->eid = xcb_generate_id(draw->conn);
1186 cookie =
1187 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1188 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1189 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1190 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1191
1192 present_capabilities_cookie =
1193 xcb_present_query_capabilities(draw->conn, draw->drawable);
1194
1195 /* Create an XCB event queue to hold present events outside of the usual
1196 * application event queue
1197 */
1198 draw->special_event = xcb_register_for_special_xge(draw->conn,
1199 &xcb_present_id,
1200 draw->eid,
1201 draw->stamp);
1202 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1203
1204 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1205
1206 if (!geom_reply) {
1207 mtx_unlock(&draw->mtx);
1208 return false;
1209 }
1210
1211 draw->width = geom_reply->width;
1212 draw->height = geom_reply->height;
1213 draw->depth = geom_reply->depth;
1214 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1215
1216 free(geom_reply);
1217
1218 draw->is_pixmap = false;
1219
1220 /* Check to see if our select input call failed. If it failed with a
1221 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1222 * special event queue created above and mark the drawable as a pixmap
1223 */
1224
1225 error = xcb_request_check(draw->conn, cookie);
1226
1227 present_capabilities_reply =
1228 xcb_present_query_capabilities_reply(draw->conn,
1229 present_capabilities_cookie,
1230 NULL);
1231
1232 if (present_capabilities_reply) {
1233 draw->present_capabilities = present_capabilities_reply->capabilities;
1234 free(present_capabilities_reply);
1235 } else
1236 draw->present_capabilities = 0;
1237
1238 if (error) {
1239 if (error->error_code != BadWindow) {
1240 free(error);
1241 mtx_unlock(&draw->mtx);
1242 return false;
1243 }
1244 draw->is_pixmap = true;
1245 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1246 draw->special_event = NULL;
1247 }
1248 }
1249 dri3_flush_present_events(draw);
1250 mtx_unlock(&draw->mtx);
1251 return true;
1252 }
1253
1254 __DRIimage *
1255 loader_dri3_create_image(xcb_connection_t *c,
1256 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1257 unsigned int format,
1258 __DRIscreen *dri_screen,
1259 const __DRIimageExtension *image,
1260 void *loaderPrivate)
1261 {
1262 int *fds;
1263 __DRIimage *image_planar, *ret;
1264 int stride, offset;
1265
1266 /* Get an FD for the pixmap object
1267 */
1268 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1269
1270 stride = bp_reply->stride;
1271 offset = 0;
1272
1273 /* createImageFromFds creates a wrapper __DRIimage structure which
1274 * can deal with multiple planes for things like Yuv images. So, once
1275 * we've gotten the planar wrapper, pull the single plane out of it and
1276 * discard the wrapper.
1277 */
1278 image_planar = image->createImageFromFds(dri_screen,
1279 bp_reply->width,
1280 bp_reply->height,
1281 image_format_to_fourcc(format),
1282 fds, 1,
1283 &stride, &offset, loaderPrivate);
1284 close(fds[0]);
1285 if (!image_planar)
1286 return NULL;
1287
1288 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1289
1290 if (!ret)
1291 ret = image_planar;
1292 else
1293 image->destroyImage(image_planar);
1294
1295 return ret;
1296 }
1297
1298 /** dri3_get_pixmap_buffer
1299 *
1300 * Get the DRM object for a pixmap from the X server and
1301 * wrap that with a __DRIimage structure using createImageFromFds
1302 */
1303 static struct loader_dri3_buffer *
1304 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1305 enum loader_dri3_buffer_type buffer_type,
1306 struct loader_dri3_drawable *draw)
1307 {
1308 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1309 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1310 xcb_drawable_t pixmap;
1311 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1312 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1313 xcb_sync_fence_t sync_fence;
1314 struct xshmfence *shm_fence;
1315 int fence_fd;
1316 __DRIscreen *cur_screen;
1317
1318 if (buffer)
1319 return buffer;
1320
1321 pixmap = draw->drawable;
1322
1323 buffer = calloc(1, sizeof *buffer);
1324 if (!buffer)
1325 goto no_buffer;
1326
1327 fence_fd = xshmfence_alloc_shm();
1328 if (fence_fd < 0)
1329 goto no_fence;
1330 shm_fence = xshmfence_map_shm(fence_fd);
1331 if (shm_fence == NULL) {
1332 close (fence_fd);
1333 goto no_fence;
1334 }
1335
1336 xcb_dri3_fence_from_fd(draw->conn,
1337 pixmap,
1338 (sync_fence = xcb_generate_id(draw->conn)),
1339 false,
1340 fence_fd);
1341
1342 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1343 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1344 if (!bp_reply)
1345 goto no_image;
1346
1347 /* Get the currently-bound screen or revert to using the drawable's screen if
1348 * no contexts are currently bound. The latter case is at least necessary for
1349 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1350 */
1351 cur_screen = draw->vtable->get_dri_screen();
1352 if (!cur_screen) {
1353 cur_screen = draw->dri_screen;
1354 }
1355
1356 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1357 cur_screen, draw->ext->image,
1358 buffer);
1359 if (!buffer->image)
1360 goto no_image;
1361
1362 buffer->pixmap = pixmap;
1363 buffer->own_pixmap = false;
1364 buffer->width = bp_reply->width;
1365 buffer->height = bp_reply->height;
1366 buffer->shm_fence = shm_fence;
1367 buffer->sync_fence = sync_fence;
1368
1369 draw->buffers[buf_id] = buffer;
1370
1371 free(bp_reply);
1372
1373 return buffer;
1374
1375 no_image:
1376 free(bp_reply);
1377 xcb_sync_destroy_fence(draw->conn, sync_fence);
1378 xshmfence_unmap_shm(shm_fence);
1379 no_fence:
1380 free(buffer);
1381 no_buffer:
1382 return NULL;
1383 }
1384
1385 /** dri3_get_buffer
1386 *
1387 * Find a front or back buffer, allocating new ones as necessary
1388 */
1389 static struct loader_dri3_buffer *
1390 dri3_get_buffer(__DRIdrawable *driDrawable,
1391 unsigned int format,
1392 enum loader_dri3_buffer_type buffer_type,
1393 struct loader_dri3_drawable *draw)
1394 {
1395 struct loader_dri3_buffer *buffer;
1396 int buf_id;
1397
1398 if (buffer_type == loader_dri3_buffer_back) {
1399 draw->back_format = format;
1400
1401 buf_id = dri3_find_back(draw);
1402
1403 if (buf_id < 0)
1404 return NULL;
1405 } else {
1406 buf_id = LOADER_DRI3_FRONT_ID;
1407 }
1408
1409 buffer = draw->buffers[buf_id];
1410
1411 /* Allocate a new buffer if there isn't an old one, or if that
1412 * old one is the wrong size
1413 */
1414 if (!buffer || buffer->width != draw->width ||
1415 buffer->height != draw->height) {
1416 struct loader_dri3_buffer *new_buffer;
1417
1418 /* Allocate the new buffers
1419 */
1420 new_buffer = dri3_alloc_render_buffer(draw,
1421 format,
1422 draw->width,
1423 draw->height,
1424 draw->depth);
1425 if (!new_buffer)
1426 return NULL;
1427
1428 /* When resizing, copy the contents of the old buffer, waiting for that
1429 * copy to complete using our fences before proceeding
1430 */
1431 if ((buffer_type == loader_dri3_buffer_back ||
1432 (buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
1433 && buffer) {
1434
1435 /* Fill the new buffer with data from an old buffer */
1436 dri3_fence_await(draw->conn, draw, buffer);
1437 if (!loader_dri3_blit_image(draw,
1438 new_buffer->image,
1439 buffer->image,
1440 0, 0, draw->width, draw->height,
1441 0, 0, 0) &&
1442 !buffer->linear_buffer) {
1443 dri3_fence_reset(draw->conn, new_buffer);
1444 dri3_copy_area(draw->conn,
1445 buffer->pixmap,
1446 new_buffer->pixmap,
1447 dri3_drawable_gc(draw),
1448 0, 0, 0, 0,
1449 draw->width, draw->height);
1450 dri3_fence_trigger(draw->conn, new_buffer);
1451 }
1452 dri3_free_render_buffer(draw, buffer);
1453 } else if (buffer_type == loader_dri3_buffer_front) {
1454 /* Fill the new fake front with data from a real front */
1455 loader_dri3_swapbuffer_barrier(draw);
1456 dri3_fence_reset(draw->conn, new_buffer);
1457 dri3_copy_area(draw->conn,
1458 draw->drawable,
1459 new_buffer->pixmap,
1460 dri3_drawable_gc(draw),
1461 0, 0, 0, 0,
1462 draw->width, draw->height);
1463 dri3_fence_trigger(draw->conn, new_buffer);
1464
1465 if (new_buffer->linear_buffer) {
1466 dri3_fence_await(draw->conn, draw, new_buffer);
1467 (void) loader_dri3_blit_image(draw,
1468 new_buffer->image,
1469 new_buffer->linear_buffer,
1470 0, 0, draw->width, draw->height,
1471 0, 0, 0);
1472 }
1473 }
1474 buffer = new_buffer;
1475 draw->buffers[buf_id] = buffer;
1476 }
1477 dri3_fence_await(draw->conn, draw, buffer);
1478
1479 /*
1480 * Do we need to preserve the content of a previous buffer?
1481 *
1482 * Note that this blit is needed only to avoid a wait for a buffer that
1483 * is currently in the flip chain or being scanned out from. That's really
1484 * a tradeoff. If we're ok with the wait we can reduce the number of back
1485 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1486 * but in the latter case we must disallow page-flipping.
1487 */
1488 if (buffer_type == loader_dri3_buffer_back &&
1489 draw->cur_blit_source != -1 &&
1490 draw->buffers[draw->cur_blit_source] &&
1491 buffer != draw->buffers[draw->cur_blit_source]) {
1492
1493 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1494
1495 /* Avoid flushing here. Will propably do good for tiling hardware. */
1496 (void) loader_dri3_blit_image(draw,
1497 buffer->image,
1498 source->image,
1499 0, 0, draw->width, draw->height,
1500 0, 0, 0);
1501 buffer->last_swap = source->last_swap;
1502 draw->cur_blit_source = -1;
1503 }
1504 /* Return the requested buffer */
1505 return buffer;
1506 }
1507
1508 /** dri3_free_buffers
1509 *
1510 * Free the front bufffer or all of the back buffers. Used
1511 * when the application changes which buffers it needs
1512 */
1513 static void
1514 dri3_free_buffers(__DRIdrawable *driDrawable,
1515 enum loader_dri3_buffer_type buffer_type,
1516 struct loader_dri3_drawable *draw)
1517 {
1518 struct loader_dri3_buffer *buffer;
1519 int first_id;
1520 int n_id;
1521 int buf_id;
1522
1523 switch (buffer_type) {
1524 case loader_dri3_buffer_back:
1525 first_id = LOADER_DRI3_BACK_ID(0);
1526 n_id = LOADER_DRI3_MAX_BACK;
1527 draw->cur_blit_source = -1;
1528 break;
1529 case loader_dri3_buffer_front:
1530 first_id = LOADER_DRI3_FRONT_ID;
1531 /* Don't free a fake front holding new backbuffer content. */
1532 n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
1533 }
1534
1535 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1536 buffer = draw->buffers[buf_id];
1537 if (buffer) {
1538 dri3_free_render_buffer(draw, buffer);
1539 draw->buffers[buf_id] = NULL;
1540 }
1541 }
1542 }
1543
1544 /** loader_dri3_get_buffers
1545 *
1546 * The published buffer allocation API.
1547 * Returns all of the necessary buffers, allocating
1548 * as needed.
1549 */
1550 int
1551 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1552 unsigned int format,
1553 uint32_t *stamp,
1554 void *loaderPrivate,
1555 uint32_t buffer_mask,
1556 struct __DRIimageList *buffers)
1557 {
1558 struct loader_dri3_drawable *draw = loaderPrivate;
1559 struct loader_dri3_buffer *front, *back;
1560
1561 buffers->image_mask = 0;
1562 buffers->front = NULL;
1563 buffers->back = NULL;
1564
1565 front = NULL;
1566 back = NULL;
1567
1568 if (!dri3_update_drawable(driDrawable, draw))
1569 return false;
1570
1571 /* pixmaps always have front buffers.
1572 * Exchange swaps also mandate fake front buffers.
1573 */
1574 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1575 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1576
1577 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1578 /* All pixmaps are owned by the server gpu.
1579 * When we use a different gpu, we can't use the pixmap
1580 * as buffer since it is potentially tiled a way
1581 * our device can't understand. In this case, use
1582 * a fake front buffer. Hopefully the pixmap
1583 * content will get synced with the fake front
1584 * buffer.
1585 */
1586 if (draw->is_pixmap && !draw->is_different_gpu)
1587 front = dri3_get_pixmap_buffer(driDrawable,
1588 format,
1589 loader_dri3_buffer_front,
1590 draw);
1591 else
1592 front = dri3_get_buffer(driDrawable,
1593 format,
1594 loader_dri3_buffer_front,
1595 draw);
1596
1597 if (!front)
1598 return false;
1599 } else {
1600 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1601 draw->have_fake_front = 0;
1602 }
1603
1604 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1605 back = dri3_get_buffer(driDrawable,
1606 format,
1607 loader_dri3_buffer_back,
1608 draw);
1609 if (!back)
1610 return false;
1611 draw->have_back = 1;
1612 } else {
1613 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1614 draw->have_back = 0;
1615 }
1616
1617 if (front) {
1618 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1619 buffers->front = front->image;
1620 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1621 }
1622
1623 if (back) {
1624 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1625 buffers->back = back->image;
1626 }
1627
1628 draw->stamp = stamp;
1629
1630 return true;
1631 }
1632
1633 /** loader_dri3_update_drawable_geometry
1634 *
1635 * Get the current drawable geometry.
1636 */
1637 void
1638 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1639 {
1640 xcb_get_geometry_cookie_t geom_cookie;
1641 xcb_get_geometry_reply_t *geom_reply;
1642
1643 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1644
1645 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1646
1647 if (geom_reply) {
1648 draw->width = geom_reply->width;
1649 draw->height = geom_reply->height;
1650 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1651 draw->ext->flush->invalidate(draw->dri_drawable);
1652
1653 free(geom_reply);
1654 }
1655 }
1656
1657
1658 /**
1659 * Make sure the server has flushed all pending swap buffers to hardware
1660 * for this drawable. Ideally we'd want to send an X protocol request to
1661 * have the server block our connection until the swaps are complete. That
1662 * would avoid the potential round-trip here.
1663 */
1664 void
1665 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1666 {
1667 int64_t ust, msc, sbc;
1668
1669 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1670 }
1671
1672 /**
1673 * Perform any cleanup associated with a close screen operation.
1674 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1675 *
1676 * This function destroys the screen's cached swap context if any.
1677 */
1678 void
1679 loader_dri3_close_screen(__DRIscreen *dri_screen)
1680 {
1681 mtx_lock(&blit_context.mtx);
1682 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1683 blit_context.core->destroyContext(blit_context.ctx);
1684 blit_context.ctx = NULL;
1685 }
1686 mtx_unlock(&blit_context.mtx);
1687 }
1688
1689 /**
1690 * Find a backbuffer slot - potentially allocating a back buffer
1691 *
1692 * \param draw[in,out] Pointer to the drawable for which to find back.
1693 * \return Pointer to a new back buffer or NULL if allocation failed or was
1694 * not mandated.
1695 *
1696 * Find a potentially new back buffer, and if it's not been allocated yet and
1697 * in addition needs initializing, then try to allocate and initialize it.
1698 */
1699 #include <stdio.h>
1700 static struct loader_dri3_buffer *
1701 dri3_find_back_alloc(struct loader_dri3_drawable *draw)
1702 {
1703 struct loader_dri3_buffer *back;
1704 int id;
1705
1706 id = dri3_find_back(draw);
1707 if (id < 0)
1708 return NULL;
1709
1710 back = draw->buffers[id];
1711 /* Allocate a new back if we haven't got one */
1712 if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
1713 dri3_update_drawable(draw->dri_drawable, draw))
1714 back = dri3_alloc_render_buffer(draw, draw->back_format,
1715 draw->width, draw->height, draw->depth);
1716
1717 if (!back)
1718 return NULL;
1719
1720 draw->buffers[id] = back;
1721
1722 /* If necessary, prefill the back with data according to swap_method mode. */
1723 if (draw->cur_blit_source != -1 &&
1724 draw->buffers[draw->cur_blit_source] &&
1725 back != draw->buffers[draw->cur_blit_source]) {
1726 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1727
1728 dri3_fence_await(draw->conn, draw, source);
1729 dri3_fence_await(draw->conn, draw, back);
1730 (void) loader_dri3_blit_image(draw,
1731 back->image,
1732 source->image,
1733 0, 0, draw->width, draw->height,
1734 0, 0, 0);
1735 back->last_swap = source->last_swap;
1736 draw->cur_blit_source = -1;
1737 }
1738
1739 return back;
1740 }