loader_dri3: Support GLX_SWAP_EXCHANGE_OML
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include <c11/threads.h>
36 #include "loader_dri3_helper.h"
37
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
43
44 /**
45 * A cached blit context.
46 */
47 struct loader_dri3_blit_context {
48 mtx_t mtx;
49 __DRIcontext *ctx;
50 __DRIscreen *cur_screen;
51 const __DRIcoreExtension *core;
52 };
53
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context = {
56 _MTX_INITIALIZER_NP, NULL
57 };
58
59 static void
60 dri3_flush_present_events(struct loader_dri3_drawable *draw);
61
62 /**
63 * Do we have blit functionality in the image blit extension?
64 *
65 * \param draw[in] The drawable intended to blit from / to.
66 * \return true if we have blit functionality. false otherwise.
67 */
68 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
69 {
70 return draw->ext->image->base.version >= 9 &&
71 draw->ext->image->blitImage != NULL;
72 }
73
74 /**
75 * Get and lock (for use with the current thread) a dri context associated
76 * with the drawable's dri screen. The context is intended to be used with
77 * the dri image extension's blitImage method.
78 *
79 * \param draw[in] Pointer to the drawable whose dri screen we want a
80 * dri context for.
81 * \return A dri context or NULL if context creation failed.
82 *
83 * When the caller is done with the context (even if the context returned was
84 * NULL), the caller must call loader_dri3_blit_context_put.
85 */
86 static __DRIcontext *
87 loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
88 {
89 mtx_lock(&blit_context.mtx);
90
91 if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
92 blit_context.core->destroyContext(blit_context.ctx);
93 blit_context.ctx = NULL;
94 }
95
96 if (!blit_context.ctx) {
97 blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
98 NULL, NULL, NULL);
99 blit_context.cur_screen = draw->dri_screen;
100 blit_context.core = draw->ext->core;
101 }
102
103 return blit_context.ctx;
104 }
105
106 /**
107 * Release (for use with other threads) a dri context previously obtained using
108 * loader_dri3_blit_context_get.
109 */
110 static void
111 loader_dri3_blit_context_put(void)
112 {
113 mtx_unlock(&blit_context.mtx);
114 }
115
116 /**
117 * Blit (parts of) the contents of a DRI image to another dri image
118 *
119 * \param draw[in] The drawable which owns the images.
120 * \param dst[in] The destination image.
121 * \param src[in] The source image.
122 * \param dstx0[in] Start destination coordinate.
123 * \param dsty0[in] Start destination coordinate.
124 * \param width[in] Blit width.
125 * \param height[in] Blit height.
126 * \param srcx0[in] Start source coordinate.
127 * \param srcy0[in] Start source coordinate.
128 * \param flush_flag[in] Image blit flush flag.
129 * \return true iff successful.
130 */
131 static bool
132 loader_dri3_blit_image(struct loader_dri3_drawable *draw,
133 __DRIimage *dst, __DRIimage *src,
134 int dstx0, int dsty0, int width, int height,
135 int srcx0, int srcy0, int flush_flag)
136 {
137 __DRIcontext *dri_context;
138 bool use_blit_context = false;
139
140 if (!loader_dri3_have_image_blit(draw))
141 return false;
142
143 dri_context = draw->vtable->get_dri_context(draw);
144
145 if (!dri_context || !draw->vtable->in_current_context(draw)) {
146 dri_context = loader_dri3_blit_context_get(draw);
147 use_blit_context = true;
148 flush_flag |= __BLIT_FLAG_FLUSH;
149 }
150
151 if (dri_context)
152 draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
153 width, height, srcx0, srcy0,
154 width, height, flush_flag);
155
156 if (use_blit_context)
157 loader_dri3_blit_context_put();
158
159 return dri_context != NULL;
160 }
161
162 static inline void
163 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
164 {
165 xshmfence_reset(buffer->shm_fence);
166 }
167
168 static inline void
169 dri3_fence_set(struct loader_dri3_buffer *buffer)
170 {
171 xshmfence_trigger(buffer->shm_fence);
172 }
173
174 static inline void
175 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
176 {
177 xcb_sync_trigger_fence(c, buffer->sync_fence);
178 }
179
180 static inline void
181 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
182 {
183 xcb_flush(c);
184 xshmfence_await(buffer->shm_fence);
185 }
186
187 static void
188 dri3_update_num_back(struct loader_dri3_drawable *draw)
189 {
190 if (draw->flipping)
191 draw->num_back = 3;
192 else
193 draw->num_back = 2;
194 }
195
196 void
197 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
198 {
199 draw->swap_interval = interval;
200 dri3_update_num_back(draw);
201 }
202
203 /** dri3_free_render_buffer
204 *
205 * Free everything associated with one render buffer including pixmap, fence
206 * stuff and the driver image
207 */
208 static void
209 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
210 struct loader_dri3_buffer *buffer)
211 {
212 if (buffer->own_pixmap)
213 xcb_free_pixmap(draw->conn, buffer->pixmap);
214 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
215 xshmfence_unmap_shm(buffer->shm_fence);
216 draw->ext->image->destroyImage(buffer->image);
217 if (buffer->linear_buffer)
218 draw->ext->image->destroyImage(buffer->linear_buffer);
219 free(buffer);
220 }
221
222 void
223 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
224 {
225 int i;
226
227 draw->ext->core->destroyDrawable(draw->dri_drawable);
228
229 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
230 if (draw->buffers[i])
231 dri3_free_render_buffer(draw, draw->buffers[i]);
232 }
233
234 if (draw->special_event) {
235 xcb_void_cookie_t cookie =
236 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
237 XCB_PRESENT_EVENT_MASK_NO_EVENT);
238
239 xcb_discard_reply(draw->conn, cookie.sequence);
240 xcb_unregister_for_special_event(draw->conn, draw->special_event);
241 }
242 }
243
244 int
245 loader_dri3_drawable_init(xcb_connection_t *conn,
246 xcb_drawable_t drawable,
247 __DRIscreen *dri_screen,
248 bool is_different_gpu,
249 const __DRIconfig *dri_config,
250 struct loader_dri3_extensions *ext,
251 const struct loader_dri3_vtable *vtable,
252 struct loader_dri3_drawable *draw)
253 {
254 xcb_get_geometry_cookie_t cookie;
255 xcb_get_geometry_reply_t *reply;
256 xcb_generic_error_t *error;
257 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
258 int swap_interval;
259
260 draw->conn = conn;
261 draw->ext = ext;
262 draw->vtable = vtable;
263 draw->drawable = drawable;
264 draw->dri_screen = dri_screen;
265 draw->is_different_gpu = is_different_gpu;
266
267 draw->have_back = 0;
268 draw->have_fake_front = 0;
269 draw->first_init = true;
270
271 draw->cur_blit_source = -1;
272
273 if (draw->ext->config)
274 draw->ext->config->configQueryi(draw->dri_screen,
275 "vblank_mode", &vblank_mode);
276
277 switch (vblank_mode) {
278 case DRI_CONF_VBLANK_NEVER:
279 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
280 swap_interval = 0;
281 break;
282 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
283 case DRI_CONF_VBLANK_ALWAYS_SYNC:
284 default:
285 swap_interval = 1;
286 break;
287 }
288 draw->swap_interval = swap_interval;
289
290 dri3_update_num_back(draw);
291
292 /* Create a new drawable */
293 draw->dri_drawable =
294 draw->ext->image_driver->createNewDrawable(dri_screen,
295 dri_config,
296 draw);
297
298 if (!draw->dri_drawable)
299 return 1;
300
301 cookie = xcb_get_geometry(draw->conn, draw->drawable);
302 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
303 if (reply == NULL || error != NULL) {
304 draw->ext->core->destroyDrawable(draw->dri_drawable);
305 return 1;
306 }
307
308 draw->width = reply->width;
309 draw->height = reply->height;
310 draw->depth = reply->depth;
311 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
312 free(reply);
313
314 draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
315 if (draw->ext->core->base.version >= 2) {
316 (void )draw->ext->core->getConfigAttrib(dri_config,
317 __DRI_ATTRIB_SWAP_METHOD,
318 &draw->swap_method);
319 }
320
321 /*
322 * Make sure server has the same swap interval we do for the new
323 * drawable.
324 */
325 loader_dri3_set_swap_interval(draw, swap_interval);
326
327 return 0;
328 }
329
330 /*
331 * Process one Present event
332 */
333 static void
334 dri3_handle_present_event(struct loader_dri3_drawable *draw,
335 xcb_present_generic_event_t *ge)
336 {
337 switch (ge->evtype) {
338 case XCB_PRESENT_CONFIGURE_NOTIFY: {
339 xcb_present_configure_notify_event_t *ce = (void *) ge;
340
341 draw->width = ce->width;
342 draw->height = ce->height;
343 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
344 break;
345 }
346 case XCB_PRESENT_COMPLETE_NOTIFY: {
347 xcb_present_complete_notify_event_t *ce = (void *) ge;
348
349 /* Compute the processed SBC number from the received 32-bit serial number
350 * merged with the upper 32-bits of the sent 64-bit serial number while
351 * checking for wrap.
352 */
353 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
354 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
355 if (draw->recv_sbc > draw->send_sbc)
356 draw->recv_sbc -= 0x100000000;
357 switch (ce->mode) {
358 case XCB_PRESENT_COMPLETE_MODE_FLIP:
359 draw->flipping = true;
360 break;
361 case XCB_PRESENT_COMPLETE_MODE_COPY:
362 draw->flipping = false;
363 break;
364 }
365 dri3_update_num_back(draw);
366
367 if (draw->vtable->show_fps)
368 draw->vtable->show_fps(draw, ce->ust);
369
370 draw->ust = ce->ust;
371 draw->msc = ce->msc;
372 } else {
373 draw->recv_msc_serial = ce->serial;
374 draw->notify_ust = ce->ust;
375 draw->notify_msc = ce->msc;
376 }
377 break;
378 }
379 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
380 xcb_present_idle_notify_event_t *ie = (void *) ge;
381 int b;
382
383 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
384 struct loader_dri3_buffer *buf = draw->buffers[b];
385
386 if (buf && buf->pixmap == ie->pixmap) {
387 buf->busy = 0;
388 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
389 dri3_free_render_buffer(draw, buf);
390 draw->buffers[b] = NULL;
391 }
392 break;
393 }
394 }
395 break;
396 }
397 }
398 free(ge);
399 }
400
401 static bool
402 dri3_wait_for_event(struct loader_dri3_drawable *draw)
403 {
404 xcb_generic_event_t *ev;
405 xcb_present_generic_event_t *ge;
406
407 xcb_flush(draw->conn);
408 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
409 if (!ev)
410 return false;
411 ge = (void *) ev;
412 dri3_handle_present_event(draw, ge);
413 return true;
414 }
415
416 /** loader_dri3_wait_for_msc
417 *
418 * Get the X server to send an event when the target msc/divisor/remainder is
419 * reached.
420 */
421 bool
422 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
423 int64_t target_msc,
424 int64_t divisor, int64_t remainder,
425 int64_t *ust, int64_t *msc, int64_t *sbc)
426 {
427 uint32_t msc_serial;
428
429 msc_serial = ++draw->send_msc_serial;
430 xcb_present_notify_msc(draw->conn,
431 draw->drawable,
432 msc_serial,
433 target_msc,
434 divisor,
435 remainder);
436
437 xcb_flush(draw->conn);
438
439 /* Wait for the event */
440 if (draw->special_event) {
441 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
442 if (!dri3_wait_for_event(draw))
443 return false;
444 }
445 }
446
447 *ust = draw->notify_ust;
448 *msc = draw->notify_msc;
449 *sbc = draw->recv_sbc;
450
451 return true;
452 }
453
454 /** loader_dri3_wait_for_sbc
455 *
456 * Wait for the completed swap buffer count to reach the specified
457 * target. Presumably the application knows that this will be reached with
458 * outstanding complete events, or we're going to be here awhile.
459 */
460 int
461 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
462 int64_t target_sbc, int64_t *ust,
463 int64_t *msc, int64_t *sbc)
464 {
465 /* From the GLX_OML_sync_control spec:
466 *
467 * "If <target_sbc> = 0, the function will block until all previous
468 * swaps requested with glXSwapBuffersMscOML for that window have
469 * completed."
470 */
471 if (!target_sbc)
472 target_sbc = draw->send_sbc;
473
474 while (draw->recv_sbc < target_sbc) {
475 if (!dri3_wait_for_event(draw))
476 return 0;
477 }
478
479 *ust = draw->ust;
480 *msc = draw->msc;
481 *sbc = draw->recv_sbc;
482 return 1;
483 }
484
485 /** loader_dri3_find_back
486 *
487 * Find an idle back buffer. If there isn't one, then
488 * wait for a present idle notify event from the X server
489 */
490 static int
491 dri3_find_back(struct loader_dri3_drawable *draw)
492 {
493 int b;
494 xcb_generic_event_t *ev;
495 xcb_present_generic_event_t *ge;
496 int num_to_consider = draw->num_back;
497
498 /* Increase the likelyhood of reusing current buffer */
499 dri3_flush_present_events(draw);
500
501 /* Check whether we need to reuse the current back buffer as new back.
502 * In that case, wait until it's not busy anymore.
503 */
504 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
505 num_to_consider = 1;
506 draw->cur_blit_source = -1;
507 }
508
509 for (;;) {
510 for (b = 0; b < num_to_consider; b++) {
511 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
512 struct loader_dri3_buffer *buffer = draw->buffers[id];
513
514 if (!buffer || !buffer->busy) {
515 draw->cur_back = id;
516 return id;
517 }
518 }
519 xcb_flush(draw->conn);
520 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
521 if (!ev)
522 return -1;
523 ge = (void *) ev;
524 dri3_handle_present_event(draw, ge);
525 }
526 }
527
528 static xcb_gcontext_t
529 dri3_drawable_gc(struct loader_dri3_drawable *draw)
530 {
531 if (!draw->gc) {
532 uint32_t v = 0;
533 xcb_create_gc(draw->conn,
534 (draw->gc = xcb_generate_id(draw->conn)),
535 draw->drawable,
536 XCB_GC_GRAPHICS_EXPOSURES,
537 &v);
538 }
539 return draw->gc;
540 }
541
542
543 static struct loader_dri3_buffer *
544 dri3_back_buffer(struct loader_dri3_drawable *draw)
545 {
546 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
547 }
548
549 static struct loader_dri3_buffer *
550 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
551 {
552 return draw->buffers[LOADER_DRI3_FRONT_ID];
553 }
554
555 static void
556 dri3_copy_area(xcb_connection_t *c,
557 xcb_drawable_t src_drawable,
558 xcb_drawable_t dst_drawable,
559 xcb_gcontext_t gc,
560 int16_t src_x,
561 int16_t src_y,
562 int16_t dst_x,
563 int16_t dst_y,
564 uint16_t width,
565 uint16_t height)
566 {
567 xcb_void_cookie_t cookie;
568
569 cookie = xcb_copy_area_checked(c,
570 src_drawable,
571 dst_drawable,
572 gc,
573 src_x,
574 src_y,
575 dst_x,
576 dst_y,
577 width,
578 height);
579 xcb_discard_reply(c, cookie.sequence);
580 }
581
582 /**
583 * Asks the driver to flush any queued work necessary for serializing with the
584 * X command stream, and optionally the slightly more strict requirement of
585 * glFlush() equivalence (which would require flushing even if nothing had
586 * been drawn to a window system framebuffer, for example).
587 */
588 void
589 loader_dri3_flush(struct loader_dri3_drawable *draw,
590 unsigned flags,
591 enum __DRI2throttleReason throttle_reason)
592 {
593 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
594 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
595
596 if (dri_context) {
597 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
598 flags, throttle_reason);
599 }
600 }
601
602 void
603 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
604 int x, int y,
605 int width, int height,
606 bool flush)
607 {
608 struct loader_dri3_buffer *back;
609 unsigned flags = __DRI2_FLUSH_DRAWABLE;
610
611 /* Check we have the right attachments */
612 if (!draw->have_back || draw->is_pixmap)
613 return;
614
615 if (flush)
616 flags |= __DRI2_FLUSH_CONTEXT;
617 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
618
619 back = dri3_back_buffer(draw);
620 y = draw->height - y - height;
621
622 if (draw->is_different_gpu) {
623 /* Update the linear buffer part of the back buffer
624 * for the dri3_copy_area operation
625 */
626 (void) loader_dri3_blit_image(draw,
627 back->linear_buffer,
628 back->image,
629 0, 0, back->width, back->height,
630 0, 0, __BLIT_FLAG_FLUSH);
631 /* We use blit_image to update our fake front,
632 */
633 if (draw->have_fake_front)
634 (void) loader_dri3_blit_image(draw,
635 dri3_fake_front_buffer(draw)->image,
636 back->image,
637 x, y, width, height,
638 x, y, __BLIT_FLAG_FLUSH);
639 }
640
641 loader_dri3_swapbuffer_barrier(draw);
642 dri3_fence_reset(draw->conn, back);
643 dri3_copy_area(draw->conn,
644 dri3_back_buffer(draw)->pixmap,
645 draw->drawable,
646 dri3_drawable_gc(draw),
647 x, y, x, y, width, height);
648 dri3_fence_trigger(draw->conn, back);
649 /* Refresh the fake front (if present) after we just damaged the real
650 * front.
651 */
652 if (draw->have_fake_front && !draw->is_different_gpu) {
653 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
654 dri3_copy_area(draw->conn,
655 dri3_back_buffer(draw)->pixmap,
656 dri3_fake_front_buffer(draw)->pixmap,
657 dri3_drawable_gc(draw),
658 x, y, x, y, width, height);
659 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
660 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
661 }
662 dri3_fence_await(draw->conn, back);
663 }
664
665 void
666 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
667 xcb_drawable_t dest,
668 xcb_drawable_t src)
669 {
670 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
671
672 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
673 dri3_copy_area(draw->conn,
674 src, dest,
675 dri3_drawable_gc(draw),
676 0, 0, 0, 0, draw->width, draw->height);
677 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
678 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
679 }
680
681 void
682 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
683 {
684 struct loader_dri3_buffer *front;
685
686 if (draw == NULL || !draw->have_fake_front)
687 return;
688
689 front = dri3_fake_front_buffer(draw);
690
691 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
692
693 /* In the psc->is_different_gpu case, the linear buffer has been updated,
694 * but not yet the tiled buffer.
695 * Copy back to the tiled buffer we use for rendering.
696 * Note that we don't need flushing.
697 */
698 if (draw->is_different_gpu)
699 (void) loader_dri3_blit_image(draw,
700 front->image,
701 front->linear_buffer,
702 0, 0, front->width, front->height,
703 0, 0, 0);
704 }
705
706 void
707 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
708 {
709 struct loader_dri3_buffer *front;
710
711 if (draw == NULL || !draw->have_fake_front)
712 return;
713
714 front = dri3_fake_front_buffer(draw);
715
716 /* In the psc->is_different_gpu case, we update the linear_buffer
717 * before updating the real front.
718 */
719 if (draw->is_different_gpu)
720 (void) loader_dri3_blit_image(draw,
721 front->linear_buffer,
722 front->image,
723 0, 0, front->width, front->height,
724 0, 0, __BLIT_FLAG_FLUSH);
725 loader_dri3_swapbuffer_barrier(draw);
726 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
727 }
728
729 /** dri3_flush_present_events
730 *
731 * Process any present events that have been received from the X server
732 */
733 static void
734 dri3_flush_present_events(struct loader_dri3_drawable *draw)
735 {
736 /* Check to see if any configuration changes have occurred
737 * since we were last invoked
738 */
739 if (draw->special_event) {
740 xcb_generic_event_t *ev;
741
742 while ((ev = xcb_poll_for_special_event(draw->conn,
743 draw->special_event)) != NULL) {
744 xcb_present_generic_event_t *ge = (void *) ev;
745 dri3_handle_present_event(draw, ge);
746 }
747 }
748 }
749
750 /** loader_dri3_swap_buffers_msc
751 *
752 * Make the current back buffer visible using the present extension
753 */
754 int64_t
755 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
756 int64_t target_msc, int64_t divisor,
757 int64_t remainder, unsigned flush_flags,
758 bool force_copy)
759 {
760 struct loader_dri3_buffer *back;
761 int64_t ret = 0;
762 uint32_t options = XCB_PRESENT_OPTION_NONE;
763
764 draw->vtable->flush_drawable(draw, flush_flags);
765
766 back = draw->buffers[dri3_find_back(draw)];
767 if (draw->is_different_gpu && back) {
768 /* Update the linear buffer before presenting the pixmap */
769 (void) loader_dri3_blit_image(draw,
770 back->linear_buffer,
771 back->image,
772 0, 0, back->width, back->height,
773 0, 0, __BLIT_FLAG_FLUSH);
774 }
775
776 /* If we need to preload the new back buffer, remember the source.
777 * The force_copy parameter is used by EGL to attempt to preserve
778 * the back buffer across a call to this function.
779 */
780 if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
781 draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
782
783 /* Exchange the back and fake front. Even though the server knows about these
784 * buffers, it has no notion of back and fake front.
785 */
786 if (back && draw->have_fake_front) {
787 struct loader_dri3_buffer *tmp;
788
789 tmp = dri3_fake_front_buffer(draw);
790 draw->buffers[LOADER_DRI3_FRONT_ID] = back;
791 draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
792
793 if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
794 draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
795 }
796
797 dri3_flush_present_events(draw);
798
799 if (back && !draw->is_pixmap) {
800 dri3_fence_reset(draw->conn, back);
801
802 /* Compute when we want the frame shown by taking the last known
803 * successful MSC and adding in a swap interval for each outstanding swap
804 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
805 * semantic"
806 */
807 ++draw->send_sbc;
808 if (target_msc == 0 && divisor == 0 && remainder == 0)
809 target_msc = draw->msc + draw->swap_interval *
810 (draw->send_sbc - draw->recv_sbc);
811 else if (divisor == 0 && remainder > 0) {
812 /* From the GLX_OML_sync_control spec:
813 * "If <divisor> = 0, the swap will occur when MSC becomes
814 * greater than or equal to <target_msc>."
815 *
816 * Note that there's no mention of the remainder. The Present
817 * extension throws BadValue for remainder != 0 with divisor == 0, so
818 * just drop the passed in value.
819 */
820 remainder = 0;
821 }
822
823 /* From the GLX_EXT_swap_control spec
824 * and the EGL 1.4 spec (page 53):
825 *
826 * "If <interval> is set to a value of 0, buffer swaps are not
827 * synchronized to a video frame."
828 *
829 * Implementation note: It is possible to enable triple buffering
830 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
831 * the default.
832 */
833 if (draw->swap_interval == 0)
834 options |= XCB_PRESENT_OPTION_ASYNC;
835
836 /* If we need to populate the new back, but need to reuse the back
837 * buffer slot due to lack of local blit capabilities, make sure
838 * the server doesn't flip and we deadlock.
839 */
840 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
841 options |= XCB_PRESENT_OPTION_COPY;
842
843 back->busy = 1;
844 back->last_swap = draw->send_sbc;
845 xcb_present_pixmap(draw->conn,
846 draw->drawable,
847 back->pixmap,
848 (uint32_t) draw->send_sbc,
849 0, /* valid */
850 0, /* update */
851 0, /* x_off */
852 0, /* y_off */
853 None, /* target_crtc */
854 None,
855 back->sync_fence,
856 options,
857 target_msc,
858 divisor,
859 remainder, 0, NULL);
860 ret = (int64_t) draw->send_sbc;
861
862 /* Schedule a server-side back-preserving blit if necessary.
863 * This happens iff all conditions below are satisfied:
864 * a) We have a fake front,
865 * b) We need to preserve the back buffer,
866 * c) We don't have local blit capabilities.
867 */
868 if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
869 draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
870 struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
871 struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
872
873 dri3_fence_reset(draw->conn, new_back);
874 dri3_copy_area(draw->conn, src->pixmap,
875 new_back->pixmap,
876 dri3_drawable_gc(draw),
877 0, 0, 0, 0, draw->width, draw->height);
878 dri3_fence_trigger(draw->conn, new_back);
879 new_back->last_swap = src->last_swap;
880 }
881
882 xcb_flush(draw->conn);
883 if (draw->stamp)
884 ++(*draw->stamp);
885 }
886
887 draw->ext->flush->invalidate(draw->dri_drawable);
888
889 return ret;
890 }
891
892 int
893 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
894 {
895 int back_id = LOADER_DRI3_BACK_ID(dri3_find_back(draw));
896
897 if (back_id < 0 || !draw->buffers[back_id])
898 return 0;
899
900 if (draw->buffers[back_id]->last_swap != 0)
901 return draw->send_sbc - draw->buffers[back_id]->last_swap + 1;
902 else
903 return 0;
904 }
905
906 /** loader_dri3_open
907 *
908 * Wrapper around xcb_dri3_open
909 */
910 int
911 loader_dri3_open(xcb_connection_t *conn,
912 xcb_window_t root,
913 uint32_t provider)
914 {
915 xcb_dri3_open_cookie_t cookie;
916 xcb_dri3_open_reply_t *reply;
917 int fd;
918
919 cookie = xcb_dri3_open(conn,
920 root,
921 provider);
922
923 reply = xcb_dri3_open_reply(conn, cookie, NULL);
924 if (!reply)
925 return -1;
926
927 if (reply->nfd != 1) {
928 free(reply);
929 return -1;
930 }
931
932 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
933 free(reply);
934 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
935
936 return fd;
937 }
938
939 static uint32_t
940 dri3_cpp_for_format(uint32_t format) {
941 switch (format) {
942 case __DRI_IMAGE_FORMAT_R8:
943 return 1;
944 case __DRI_IMAGE_FORMAT_RGB565:
945 case __DRI_IMAGE_FORMAT_GR88:
946 return 2;
947 case __DRI_IMAGE_FORMAT_XRGB8888:
948 case __DRI_IMAGE_FORMAT_ARGB8888:
949 case __DRI_IMAGE_FORMAT_ABGR8888:
950 case __DRI_IMAGE_FORMAT_XBGR8888:
951 case __DRI_IMAGE_FORMAT_XRGB2101010:
952 case __DRI_IMAGE_FORMAT_ARGB2101010:
953 case __DRI_IMAGE_FORMAT_SARGB8:
954 return 4;
955 case __DRI_IMAGE_FORMAT_NONE:
956 default:
957 return 0;
958 }
959 }
960
961 /** loader_dri3_alloc_render_buffer
962 *
963 * Use the driver createImage function to construct a __DRIimage, then
964 * get a file descriptor for that and create an X pixmap from that
965 *
966 * Allocate an xshmfence for synchronization
967 */
968 static struct loader_dri3_buffer *
969 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
970 int width, int height, int depth)
971 {
972 struct loader_dri3_buffer *buffer;
973 __DRIimage *pixmap_buffer;
974 xcb_pixmap_t pixmap;
975 xcb_sync_fence_t sync_fence;
976 struct xshmfence *shm_fence;
977 int buffer_fd, fence_fd;
978 int stride;
979
980 /* Create an xshmfence object and
981 * prepare to send that to the X server
982 */
983
984 fence_fd = xshmfence_alloc_shm();
985 if (fence_fd < 0)
986 return NULL;
987
988 shm_fence = xshmfence_map_shm(fence_fd);
989 if (shm_fence == NULL)
990 goto no_shm_fence;
991
992 /* Allocate the image from the driver
993 */
994 buffer = calloc(1, sizeof *buffer);
995 if (!buffer)
996 goto no_buffer;
997
998 buffer->cpp = dri3_cpp_for_format(format);
999 if (!buffer->cpp)
1000 goto no_image;
1001
1002 if (!draw->is_different_gpu) {
1003 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1004 width, height,
1005 format,
1006 __DRI_IMAGE_USE_SHARE |
1007 __DRI_IMAGE_USE_SCANOUT |
1008 __DRI_IMAGE_USE_BACKBUFFER,
1009 buffer);
1010 pixmap_buffer = buffer->image;
1011
1012 if (!buffer->image)
1013 goto no_image;
1014 } else {
1015 buffer->image = draw->ext->image->createImage(draw->dri_screen,
1016 width, height,
1017 format,
1018 0,
1019 buffer);
1020
1021 if (!buffer->image)
1022 goto no_image;
1023
1024 buffer->linear_buffer =
1025 draw->ext->image->createImage(draw->dri_screen,
1026 width, height, format,
1027 __DRI_IMAGE_USE_SHARE |
1028 __DRI_IMAGE_USE_LINEAR |
1029 __DRI_IMAGE_USE_BACKBUFFER,
1030 buffer);
1031 pixmap_buffer = buffer->linear_buffer;
1032
1033 if (!buffer->linear_buffer)
1034 goto no_linear_buffer;
1035 }
1036
1037 /* X wants the stride, so ask the image for it
1038 */
1039 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
1040 &stride))
1041 goto no_buffer_attrib;
1042
1043 buffer->pitch = stride;
1044
1045 if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
1046 &buffer_fd))
1047 goto no_buffer_attrib;
1048
1049 xcb_dri3_pixmap_from_buffer(draw->conn,
1050 (pixmap = xcb_generate_id(draw->conn)),
1051 draw->drawable,
1052 buffer->size,
1053 width, height, buffer->pitch,
1054 depth, buffer->cpp * 8,
1055 buffer_fd);
1056
1057 xcb_dri3_fence_from_fd(draw->conn,
1058 pixmap,
1059 (sync_fence = xcb_generate_id(draw->conn)),
1060 false,
1061 fence_fd);
1062
1063 buffer->pixmap = pixmap;
1064 buffer->own_pixmap = true;
1065 buffer->sync_fence = sync_fence;
1066 buffer->shm_fence = shm_fence;
1067 buffer->width = width;
1068 buffer->height = height;
1069
1070 /* Mark the buffer as idle
1071 */
1072 dri3_fence_set(buffer);
1073
1074 return buffer;
1075
1076 no_buffer_attrib:
1077 draw->ext->image->destroyImage(pixmap_buffer);
1078 no_linear_buffer:
1079 if (draw->is_different_gpu)
1080 draw->ext->image->destroyImage(buffer->image);
1081 no_image:
1082 free(buffer);
1083 no_buffer:
1084 xshmfence_unmap_shm(shm_fence);
1085 no_shm_fence:
1086 close(fence_fd);
1087 return NULL;
1088 }
1089
1090 /** loader_dri3_update_drawable
1091 *
1092 * Called the first time we use the drawable and then
1093 * after we receive present configure notify events to
1094 * track the geometry of the drawable
1095 */
1096 static int
1097 dri3_update_drawable(__DRIdrawable *driDrawable,
1098 struct loader_dri3_drawable *draw)
1099 {
1100 if (draw->first_init) {
1101 xcb_get_geometry_cookie_t geom_cookie;
1102 xcb_get_geometry_reply_t *geom_reply;
1103 xcb_void_cookie_t cookie;
1104 xcb_generic_error_t *error;
1105 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
1106 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
1107
1108 draw->first_init = false;
1109
1110 /* Try to select for input on the window.
1111 *
1112 * If the drawable is a window, this will get our events
1113 * delivered.
1114 *
1115 * Otherwise, we'll get a BadWindow error back from this request which
1116 * will let us know that the drawable is a pixmap instead.
1117 */
1118
1119 draw->eid = xcb_generate_id(draw->conn);
1120 cookie =
1121 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
1122 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
1123 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
1124 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
1125
1126 present_capabilities_cookie =
1127 xcb_present_query_capabilities(draw->conn, draw->drawable);
1128
1129 /* Create an XCB event queue to hold present events outside of the usual
1130 * application event queue
1131 */
1132 draw->special_event = xcb_register_for_special_xge(draw->conn,
1133 &xcb_present_id,
1134 draw->eid,
1135 draw->stamp);
1136 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1137
1138 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1139
1140 if (!geom_reply)
1141 return false;
1142
1143 draw->width = geom_reply->width;
1144 draw->height = geom_reply->height;
1145 draw->depth = geom_reply->depth;
1146 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1147
1148 free(geom_reply);
1149
1150 draw->is_pixmap = false;
1151
1152 /* Check to see if our select input call failed. If it failed with a
1153 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1154 * special event queue created above and mark the drawable as a pixmap
1155 */
1156
1157 error = xcb_request_check(draw->conn, cookie);
1158
1159 present_capabilities_reply =
1160 xcb_present_query_capabilities_reply(draw->conn,
1161 present_capabilities_cookie,
1162 NULL);
1163
1164 if (present_capabilities_reply) {
1165 draw->present_capabilities = present_capabilities_reply->capabilities;
1166 free(present_capabilities_reply);
1167 } else
1168 draw->present_capabilities = 0;
1169
1170 if (error) {
1171 if (error->error_code != BadWindow) {
1172 free(error);
1173 return false;
1174 }
1175 draw->is_pixmap = true;
1176 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1177 draw->special_event = NULL;
1178 }
1179 }
1180 dri3_flush_present_events(draw);
1181 return true;
1182 }
1183
1184 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1185 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1186 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1187 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1188 */
1189 static int
1190 image_format_to_fourcc(int format)
1191 {
1192
1193 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1194 switch (format) {
1195 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1196 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1197 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1198 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1199 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1200 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1201 }
1202 return 0;
1203 }
1204
1205 __DRIimage *
1206 loader_dri3_create_image(xcb_connection_t *c,
1207 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1208 unsigned int format,
1209 __DRIscreen *dri_screen,
1210 const __DRIimageExtension *image,
1211 void *loaderPrivate)
1212 {
1213 int *fds;
1214 __DRIimage *image_planar, *ret;
1215 int stride, offset;
1216
1217 /* Get an FD for the pixmap object
1218 */
1219 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1220
1221 stride = bp_reply->stride;
1222 offset = 0;
1223
1224 /* createImageFromFds creates a wrapper __DRIimage structure which
1225 * can deal with multiple planes for things like Yuv images. So, once
1226 * we've gotten the planar wrapper, pull the single plane out of it and
1227 * discard the wrapper.
1228 */
1229 image_planar = image->createImageFromFds(dri_screen,
1230 bp_reply->width,
1231 bp_reply->height,
1232 image_format_to_fourcc(format),
1233 fds, 1,
1234 &stride, &offset, loaderPrivate);
1235 close(fds[0]);
1236 if (!image_planar)
1237 return NULL;
1238
1239 ret = image->fromPlanar(image_planar, 0, loaderPrivate);
1240
1241 image->destroyImage(image_planar);
1242
1243 return ret;
1244 }
1245
1246 /** dri3_get_pixmap_buffer
1247 *
1248 * Get the DRM object for a pixmap from the X server and
1249 * wrap that with a __DRIimage structure using createImageFromFds
1250 */
1251 static struct loader_dri3_buffer *
1252 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1253 enum loader_dri3_buffer_type buffer_type,
1254 struct loader_dri3_drawable *draw)
1255 {
1256 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1257 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1258 xcb_drawable_t pixmap;
1259 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1260 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1261 xcb_sync_fence_t sync_fence;
1262 struct xshmfence *shm_fence;
1263 int fence_fd;
1264
1265 if (buffer)
1266 return buffer;
1267
1268 pixmap = draw->drawable;
1269
1270 buffer = calloc(1, sizeof *buffer);
1271 if (!buffer)
1272 goto no_buffer;
1273
1274 fence_fd = xshmfence_alloc_shm();
1275 if (fence_fd < 0)
1276 goto no_fence;
1277 shm_fence = xshmfence_map_shm(fence_fd);
1278 if (shm_fence == NULL) {
1279 close (fence_fd);
1280 goto no_fence;
1281 }
1282
1283 xcb_dri3_fence_from_fd(draw->conn,
1284 pixmap,
1285 (sync_fence = xcb_generate_id(draw->conn)),
1286 false,
1287 fence_fd);
1288
1289 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1290 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1291 if (!bp_reply)
1292 goto no_image;
1293
1294 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1295 draw->dri_screen, draw->ext->image,
1296 buffer);
1297 if (!buffer->image)
1298 goto no_image;
1299
1300 buffer->pixmap = pixmap;
1301 buffer->own_pixmap = false;
1302 buffer->width = bp_reply->width;
1303 buffer->height = bp_reply->height;
1304 buffer->shm_fence = shm_fence;
1305 buffer->sync_fence = sync_fence;
1306
1307 draw->buffers[buf_id] = buffer;
1308
1309 free(bp_reply);
1310
1311 return buffer;
1312
1313 no_image:
1314 free(bp_reply);
1315 xcb_sync_destroy_fence(draw->conn, sync_fence);
1316 xshmfence_unmap_shm(shm_fence);
1317 no_fence:
1318 free(buffer);
1319 no_buffer:
1320 return NULL;
1321 }
1322
1323 /** dri3_get_buffer
1324 *
1325 * Find a front or back buffer, allocating new ones as necessary
1326 */
1327 static struct loader_dri3_buffer *
1328 dri3_get_buffer(__DRIdrawable *driDrawable,
1329 unsigned int format,
1330 enum loader_dri3_buffer_type buffer_type,
1331 struct loader_dri3_drawable *draw)
1332 {
1333 struct loader_dri3_buffer *buffer;
1334 int buf_id;
1335
1336 if (buffer_type == loader_dri3_buffer_back) {
1337 buf_id = dri3_find_back(draw);
1338
1339 if (buf_id < 0)
1340 return NULL;
1341 } else {
1342 buf_id = LOADER_DRI3_FRONT_ID;
1343 }
1344
1345 buffer = draw->buffers[buf_id];
1346
1347 /* Allocate a new buffer if there isn't an old one, or if that
1348 * old one is the wrong size
1349 */
1350 if (!buffer || buffer->width != draw->width ||
1351 buffer->height != draw->height) {
1352 struct loader_dri3_buffer *new_buffer;
1353
1354 /* Allocate the new buffers
1355 */
1356 new_buffer = dri3_alloc_render_buffer(draw,
1357 format,
1358 draw->width,
1359 draw->height,
1360 draw->depth);
1361 if (!new_buffer)
1362 return NULL;
1363
1364 /* When resizing, copy the contents of the old buffer, waiting for that
1365 * copy to complete using our fences before proceeding
1366 */
1367 switch (buffer_type) {
1368 case loader_dri3_buffer_back:
1369 if (buffer) {
1370 if (!buffer->linear_buffer) {
1371 dri3_fence_reset(draw->conn, new_buffer);
1372 dri3_fence_await(draw->conn, buffer);
1373 dri3_copy_area(draw->conn,
1374 buffer->pixmap,
1375 new_buffer->pixmap,
1376 dri3_drawable_gc(draw),
1377 0, 0, 0, 0,
1378 draw->width, draw->height);
1379 dri3_fence_trigger(draw->conn, new_buffer);
1380 } else if (draw->vtable->in_current_context(draw)) {
1381 (void) loader_dri3_blit_image(draw,
1382 new_buffer->image,
1383 buffer->image,
1384 0, 0, draw->width, draw->height,
1385 0, 0, 0);
1386 }
1387 dri3_free_render_buffer(draw, buffer);
1388 }
1389 break;
1390 case loader_dri3_buffer_front:
1391 loader_dri3_swapbuffer_barrier(draw);
1392 dri3_fence_reset(draw->conn, new_buffer);
1393 dri3_copy_area(draw->conn,
1394 draw->drawable,
1395 new_buffer->pixmap,
1396 dri3_drawable_gc(draw),
1397 0, 0, 0, 0,
1398 draw->width, draw->height);
1399 dri3_fence_trigger(draw->conn, new_buffer);
1400
1401 if (new_buffer->linear_buffer &&
1402 draw->vtable->in_current_context(draw)) {
1403 dri3_fence_await(draw->conn, new_buffer);
1404 (void) loader_dri3_blit_image(draw,
1405 new_buffer->image,
1406 new_buffer->linear_buffer,
1407 0, 0, draw->width, draw->height,
1408 0, 0, 0);
1409 }
1410 break;
1411 }
1412 buffer = new_buffer;
1413 draw->buffers[buf_id] = buffer;
1414 }
1415 dri3_fence_await(draw->conn, buffer);
1416
1417 /*
1418 * Do we need to preserve the content of a previous buffer?
1419 *
1420 * Note that this blit is needed only to avoid a wait for a buffer that
1421 * is currently in the flip chain or being scanned out from. That's really
1422 * a tradeoff. If we're ok with the wait we can reduce the number of back
1423 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1424 * but in the latter case we must disallow page-flipping.
1425 */
1426 if (buffer_type == loader_dri3_buffer_back &&
1427 draw->cur_blit_source != -1 &&
1428 draw->buffers[draw->cur_blit_source] &&
1429 buffer != draw->buffers[draw->cur_blit_source]) {
1430
1431 struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
1432
1433 /* Avoid flushing here. Will propably do good for tiling hardware. */
1434 (void) loader_dri3_blit_image(draw,
1435 buffer->image,
1436 source->image,
1437 0, 0, draw->width, draw->height,
1438 0, 0, 0);
1439 buffer->last_swap = source->last_swap;
1440 draw->cur_blit_source = -1;
1441 }
1442 /* Return the requested buffer */
1443 return buffer;
1444 }
1445
1446 /** dri3_free_buffers
1447 *
1448 * Free the front bufffer or all of the back buffers. Used
1449 * when the application changes which buffers it needs
1450 */
1451 static void
1452 dri3_free_buffers(__DRIdrawable *driDrawable,
1453 enum loader_dri3_buffer_type buffer_type,
1454 struct loader_dri3_drawable *draw)
1455 {
1456 struct loader_dri3_buffer *buffer;
1457 int first_id;
1458 int n_id;
1459 int buf_id;
1460
1461 switch (buffer_type) {
1462 case loader_dri3_buffer_back:
1463 first_id = LOADER_DRI3_BACK_ID(0);
1464 n_id = LOADER_DRI3_MAX_BACK;
1465 break;
1466 case loader_dri3_buffer_front:
1467 first_id = LOADER_DRI3_FRONT_ID;
1468 n_id = 1;
1469 }
1470
1471 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1472 buffer = draw->buffers[buf_id];
1473 if (buffer) {
1474 dri3_free_render_buffer(draw, buffer);
1475 draw->buffers[buf_id] = NULL;
1476 }
1477 }
1478 }
1479
1480 /** loader_dri3_get_buffers
1481 *
1482 * The published buffer allocation API.
1483 * Returns all of the necessary buffers, allocating
1484 * as needed.
1485 */
1486 int
1487 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1488 unsigned int format,
1489 uint32_t *stamp,
1490 void *loaderPrivate,
1491 uint32_t buffer_mask,
1492 struct __DRIimageList *buffers)
1493 {
1494 struct loader_dri3_drawable *draw = loaderPrivate;
1495 struct loader_dri3_buffer *front, *back;
1496
1497 buffers->image_mask = 0;
1498 buffers->front = NULL;
1499 buffers->back = NULL;
1500
1501 front = NULL;
1502 back = NULL;
1503
1504 if (!dri3_update_drawable(driDrawable, draw))
1505 return false;
1506
1507 /* pixmaps always have front buffers.
1508 * Exchange swaps also mandate fake front buffers.
1509 */
1510 if (draw->is_pixmap || draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
1511 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1512
1513 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1514 /* All pixmaps are owned by the server gpu.
1515 * When we use a different gpu, we can't use the pixmap
1516 * as buffer since it is potentially tiled a way
1517 * our device can't understand. In this case, use
1518 * a fake front buffer. Hopefully the pixmap
1519 * content will get synced with the fake front
1520 * buffer.
1521 */
1522 if (draw->is_pixmap && !draw->is_different_gpu)
1523 front = dri3_get_pixmap_buffer(driDrawable,
1524 format,
1525 loader_dri3_buffer_front,
1526 draw);
1527 else
1528 front = dri3_get_buffer(driDrawable,
1529 format,
1530 loader_dri3_buffer_front,
1531 draw);
1532
1533 if (!front)
1534 return false;
1535 } else {
1536 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1537 draw->have_fake_front = 0;
1538 }
1539
1540 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1541 back = dri3_get_buffer(driDrawable,
1542 format,
1543 loader_dri3_buffer_back,
1544 draw);
1545 if (!back)
1546 return false;
1547 draw->have_back = 1;
1548 } else {
1549 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1550 draw->have_back = 0;
1551 }
1552
1553 if (front) {
1554 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1555 buffers->front = front->image;
1556 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1557 }
1558
1559 if (back) {
1560 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1561 buffers->back = back->image;
1562 }
1563
1564 draw->stamp = stamp;
1565
1566 return true;
1567 }
1568
1569 /** loader_dri3_update_drawable_geometry
1570 *
1571 * Get the current drawable geometry.
1572 */
1573 void
1574 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
1575 {
1576 xcb_get_geometry_cookie_t geom_cookie;
1577 xcb_get_geometry_reply_t *geom_reply;
1578
1579 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
1580
1581 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
1582
1583 if (geom_reply) {
1584 draw->width = geom_reply->width;
1585 draw->height = geom_reply->height;
1586 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1587
1588 free(geom_reply);
1589 }
1590 }
1591
1592
1593 /**
1594 * Make sure the server has flushed all pending swap buffers to hardware
1595 * for this drawable. Ideally we'd want to send an X protocol request to
1596 * have the server block our connection until the swaps are complete. That
1597 * would avoid the potential round-trip here.
1598 */
1599 void
1600 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
1601 {
1602 int64_t ust, msc, sbc;
1603
1604 (void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
1605 }
1606
1607 /**
1608 * Perform any cleanup associated with a close screen operation.
1609 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1610 *
1611 * This function destroys the screen's cached swap context if any.
1612 */
1613 void
1614 loader_dri3_close_screen(__DRIscreen *dri_screen)
1615 {
1616 mtx_lock(&blit_context.mtx);
1617 if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
1618 blit_context.core->destroyContext(blit_context.ctx);
1619 blit_context.ctx = NULL;
1620 }
1621 mtx_unlock(&blit_context.mtx);
1622 }