2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
28 #include <X11/xshmfence.h>
31 #include <xcb/present.h>
33 #include <X11/Xlib-xcb.h>
35 #include "loader_dri3_helper.h"
36 #include "util/macros.h"
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
45 * A cached blit context.
47 struct loader_dri3_blit_context
{
50 __DRIscreen
*cur_screen
;
51 const __DRIcoreExtension
*core
;
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context
= {
56 _MTX_INITIALIZER_NP
, NULL
60 dri3_flush_present_events(struct loader_dri3_drawable
*draw
);
62 static struct loader_dri3_buffer
*
63 dri3_find_back_alloc(struct loader_dri3_drawable
*draw
);
66 * Do we have blit functionality in the image blit extension?
68 * \param draw[in] The drawable intended to blit from / to.
69 * \return true if we have blit functionality. false otherwise.
71 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable
*draw
)
73 return draw
->ext
->image
->base
.version
>= 9 &&
74 draw
->ext
->image
->blitImage
!= NULL
;
78 * Get and lock (for use with the current thread) a dri context associated
79 * with the drawable's dri screen. The context is intended to be used with
80 * the dri image extension's blitImage method.
82 * \param draw[in] Pointer to the drawable whose dri screen we want a
84 * \return A dri context or NULL if context creation failed.
86 * When the caller is done with the context (even if the context returned was
87 * NULL), the caller must call loader_dri3_blit_context_put.
90 loader_dri3_blit_context_get(struct loader_dri3_drawable
*draw
)
92 mtx_lock(&blit_context
.mtx
);
94 if (blit_context
.ctx
&& blit_context
.cur_screen
!= draw
->dri_screen
) {
95 blit_context
.core
->destroyContext(blit_context
.ctx
);
96 blit_context
.ctx
= NULL
;
99 if (!blit_context
.ctx
) {
100 blit_context
.ctx
= draw
->ext
->core
->createNewContext(draw
->dri_screen
,
102 blit_context
.cur_screen
= draw
->dri_screen
;
103 blit_context
.core
= draw
->ext
->core
;
106 return blit_context
.ctx
;
110 * Release (for use with other threads) a dri context previously obtained using
111 * loader_dri3_blit_context_get.
114 loader_dri3_blit_context_put(void)
116 mtx_unlock(&blit_context
.mtx
);
120 * Blit (parts of) the contents of a DRI image to another dri image
122 * \param draw[in] The drawable which owns the images.
123 * \param dst[in] The destination image.
124 * \param src[in] The source image.
125 * \param dstx0[in] Start destination coordinate.
126 * \param dsty0[in] Start destination coordinate.
127 * \param width[in] Blit width.
128 * \param height[in] Blit height.
129 * \param srcx0[in] Start source coordinate.
130 * \param srcy0[in] Start source coordinate.
131 * \param flush_flag[in] Image blit flush flag.
132 * \return true iff successful.
135 loader_dri3_blit_image(struct loader_dri3_drawable
*draw
,
136 __DRIimage
*dst
, __DRIimage
*src
,
137 int dstx0
, int dsty0
, int width
, int height
,
138 int srcx0
, int srcy0
, int flush_flag
)
140 __DRIcontext
*dri_context
;
141 bool use_blit_context
= false;
143 if (!loader_dri3_have_image_blit(draw
))
146 dri_context
= draw
->vtable
->get_dri_context(draw
);
148 if (!dri_context
|| !draw
->vtable
->in_current_context(draw
)) {
149 dri_context
= loader_dri3_blit_context_get(draw
);
150 use_blit_context
= true;
151 flush_flag
|= __BLIT_FLAG_FLUSH
;
155 draw
->ext
->image
->blitImage(dri_context
, dst
, src
, dstx0
, dsty0
,
156 width
, height
, srcx0
, srcy0
,
157 width
, height
, flush_flag
);
159 if (use_blit_context
)
160 loader_dri3_blit_context_put();
162 return dri_context
!= NULL
;
166 dri3_fence_reset(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
168 xshmfence_reset(buffer
->shm_fence
);
172 dri3_fence_set(struct loader_dri3_buffer
*buffer
)
174 xshmfence_trigger(buffer
->shm_fence
);
178 dri3_fence_trigger(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
180 xcb_sync_trigger_fence(c
, buffer
->sync_fence
);
184 dri3_fence_await(xcb_connection_t
*c
, struct loader_dri3_drawable
*draw
,
185 struct loader_dri3_buffer
*buffer
)
188 xshmfence_await(buffer
->shm_fence
);
190 mtx_lock(&draw
->mtx
);
191 dri3_flush_present_events(draw
);
192 mtx_unlock(&draw
->mtx
);
197 dri3_update_num_back(struct loader_dri3_drawable
*draw
)
206 loader_dri3_set_swap_interval(struct loader_dri3_drawable
*draw
, int interval
)
208 draw
->swap_interval
= interval
;
211 /** dri3_free_render_buffer
213 * Free everything associated with one render buffer including pixmap, fence
214 * stuff and the driver image
217 dri3_free_render_buffer(struct loader_dri3_drawable
*draw
,
218 struct loader_dri3_buffer
*buffer
)
220 if (buffer
->own_pixmap
)
221 xcb_free_pixmap(draw
->conn
, buffer
->pixmap
);
222 xcb_sync_destroy_fence(draw
->conn
, buffer
->sync_fence
);
223 xshmfence_unmap_shm(buffer
->shm_fence
);
224 draw
->ext
->image
->destroyImage(buffer
->image
);
225 if (buffer
->linear_buffer
)
226 draw
->ext
->image
->destroyImage(buffer
->linear_buffer
);
231 loader_dri3_drawable_fini(struct loader_dri3_drawable
*draw
)
235 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
237 for (i
= 0; i
< ARRAY_SIZE(draw
->buffers
); i
++) {
238 if (draw
->buffers
[i
])
239 dri3_free_render_buffer(draw
, draw
->buffers
[i
]);
242 if (draw
->special_event
) {
243 xcb_void_cookie_t cookie
=
244 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
245 XCB_PRESENT_EVENT_MASK_NO_EVENT
);
247 xcb_discard_reply(draw
->conn
, cookie
.sequence
);
248 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
251 cnd_destroy(&draw
->event_cnd
);
252 mtx_destroy(&draw
->mtx
);
256 loader_dri3_drawable_init(xcb_connection_t
*conn
,
257 xcb_drawable_t drawable
,
258 __DRIscreen
*dri_screen
,
259 bool is_different_gpu
,
260 const __DRIconfig
*dri_config
,
261 struct loader_dri3_extensions
*ext
,
262 const struct loader_dri3_vtable
*vtable
,
263 struct loader_dri3_drawable
*draw
)
265 xcb_get_geometry_cookie_t cookie
;
266 xcb_get_geometry_reply_t
*reply
;
267 xcb_generic_error_t
*error
;
268 GLint vblank_mode
= DRI_CONF_VBLANK_DEF_INTERVAL_1
;
273 draw
->vtable
= vtable
;
274 draw
->drawable
= drawable
;
275 draw
->dri_screen
= dri_screen
;
276 draw
->is_different_gpu
= is_different_gpu
;
279 draw
->have_fake_front
= 0;
280 draw
->first_init
= true;
282 draw
->cur_blit_source
= -1;
283 draw
->back_format
= __DRI_IMAGE_FORMAT_NONE
;
284 mtx_init(&draw
->mtx
, mtx_plain
);
285 cnd_init(&draw
->event_cnd
);
287 if (draw
->ext
->config
)
288 draw
->ext
->config
->configQueryi(draw
->dri_screen
,
289 "vblank_mode", &vblank_mode
);
291 switch (vblank_mode
) {
292 case DRI_CONF_VBLANK_NEVER
:
293 case DRI_CONF_VBLANK_DEF_INTERVAL_0
:
296 case DRI_CONF_VBLANK_DEF_INTERVAL_1
:
297 case DRI_CONF_VBLANK_ALWAYS_SYNC
:
302 draw
->swap_interval
= swap_interval
;
304 dri3_update_num_back(draw
);
306 /* Create a new drawable */
308 draw
->ext
->image_driver
->createNewDrawable(dri_screen
,
312 if (!draw
->dri_drawable
)
315 cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
316 reply
= xcb_get_geometry_reply(draw
->conn
, cookie
, &error
);
317 if (reply
== NULL
|| error
!= NULL
) {
318 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
322 draw
->width
= reply
->width
;
323 draw
->height
= reply
->height
;
324 draw
->depth
= reply
->depth
;
325 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
328 draw
->swap_method
= __DRI_ATTRIB_SWAP_UNDEFINED
;
329 if (draw
->ext
->core
->base
.version
>= 2) {
330 (void )draw
->ext
->core
->getConfigAttrib(dri_config
,
331 __DRI_ATTRIB_SWAP_METHOD
,
336 * Make sure server has the same swap interval we do for the new
339 loader_dri3_set_swap_interval(draw
, swap_interval
);
345 * Process one Present event
348 dri3_handle_present_event(struct loader_dri3_drawable
*draw
,
349 xcb_present_generic_event_t
*ge
)
351 switch (ge
->evtype
) {
352 case XCB_PRESENT_CONFIGURE_NOTIFY
: {
353 xcb_present_configure_notify_event_t
*ce
= (void *) ge
;
355 draw
->width
= ce
->width
;
356 draw
->height
= ce
->height
;
357 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
358 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
361 case XCB_PRESENT_COMPLETE_NOTIFY
: {
362 xcb_present_complete_notify_event_t
*ce
= (void *) ge
;
364 /* Compute the processed SBC number from the received 32-bit serial number
365 * merged with the upper 32-bits of the sent 64-bit serial number while
368 if (ce
->kind
== XCB_PRESENT_COMPLETE_KIND_PIXMAP
) {
369 draw
->recv_sbc
= (draw
->send_sbc
& 0xffffffff00000000LL
) | ce
->serial
;
370 if (draw
->recv_sbc
> draw
->send_sbc
)
371 draw
->recv_sbc
-= 0x100000000;
373 case XCB_PRESENT_COMPLETE_MODE_FLIP
:
374 draw
->flipping
= true;
376 case XCB_PRESENT_COMPLETE_MODE_COPY
:
377 draw
->flipping
= false;
381 if (draw
->vtable
->show_fps
)
382 draw
->vtable
->show_fps(draw
, ce
->ust
);
386 } else if (ce
->serial
== draw
->eid
) {
387 draw
->notify_ust
= ce
->ust
;
388 draw
->notify_msc
= ce
->msc
;
392 case XCB_PRESENT_EVENT_IDLE_NOTIFY
: {
393 xcb_present_idle_notify_event_t
*ie
= (void *) ge
;
396 for (b
= 0; b
< ARRAY_SIZE(draw
->buffers
); b
++) {
397 struct loader_dri3_buffer
*buf
= draw
->buffers
[b
];
399 if (buf
&& buf
->pixmap
== ie
->pixmap
)
402 if (buf
&& draw
->num_back
<= b
&& b
< LOADER_DRI3_MAX_BACK
&&
403 draw
->cur_blit_source
!= b
&&
405 dri3_free_render_buffer(draw
, buf
);
406 draw
->buffers
[b
] = NULL
;
416 dri3_wait_for_event_locked(struct loader_dri3_drawable
*draw
)
418 xcb_generic_event_t
*ev
;
419 xcb_present_generic_event_t
*ge
;
421 xcb_flush(draw
->conn
);
423 /* Only have one thread waiting for events at a time */
424 if (draw
->has_event_waiter
) {
425 cnd_wait(&draw
->event_cnd
, &draw
->mtx
);
426 /* Another thread has updated the protected info, so retest. */
429 draw
->has_event_waiter
= true;
430 /* Allow other threads access to the drawable while we're waiting. */
431 mtx_unlock(&draw
->mtx
);
432 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
433 mtx_lock(&draw
->mtx
);
434 draw
->has_event_waiter
= false;
435 cnd_broadcast(&draw
->event_cnd
);
440 dri3_handle_present_event(draw
, ge
);
444 /** loader_dri3_wait_for_msc
446 * Get the X server to send an event when the target msc/divisor/remainder is
450 loader_dri3_wait_for_msc(struct loader_dri3_drawable
*draw
,
452 int64_t divisor
, int64_t remainder
,
453 int64_t *ust
, int64_t *msc
, int64_t *sbc
)
455 xcb_void_cookie_t cookie
= xcb_present_notify_msc(draw
->conn
,
461 xcb_generic_event_t
*ev
;
462 unsigned full_sequence
;
464 mtx_lock(&draw
->mtx
);
465 xcb_flush(draw
->conn
);
467 /* Wait for the event */
469 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
471 mtx_unlock(&draw
->mtx
);
475 full_sequence
= ev
->full_sequence
;
476 dri3_handle_present_event(draw
, (void *) ev
);
477 } while (full_sequence
!= cookie
.sequence
|| draw
->notify_msc
< target_msc
);
479 *ust
= draw
->notify_ust
;
480 *msc
= draw
->notify_msc
;
481 *sbc
= draw
->recv_sbc
;
482 mtx_unlock(&draw
->mtx
);
487 /** loader_dri3_wait_for_sbc
489 * Wait for the completed swap buffer count to reach the specified
490 * target. Presumably the application knows that this will be reached with
491 * outstanding complete events, or we're going to be here awhile.
494 loader_dri3_wait_for_sbc(struct loader_dri3_drawable
*draw
,
495 int64_t target_sbc
, int64_t *ust
,
496 int64_t *msc
, int64_t *sbc
)
498 /* From the GLX_OML_sync_control spec:
500 * "If <target_sbc> = 0, the function will block until all previous
501 * swaps requested with glXSwapBuffersMscOML for that window have
504 mtx_lock(&draw
->mtx
);
506 target_sbc
= draw
->send_sbc
;
508 while (draw
->recv_sbc
< target_sbc
) {
509 if (!dri3_wait_for_event_locked(draw
)) {
510 mtx_unlock(&draw
->mtx
);
517 *sbc
= draw
->recv_sbc
;
518 mtx_unlock(&draw
->mtx
);
522 /** loader_dri3_find_back
524 * Find an idle back buffer. If there isn't one, then
525 * wait for a present idle notify event from the X server
528 dri3_find_back(struct loader_dri3_drawable
*draw
)
533 mtx_lock(&draw
->mtx
);
534 /* Increase the likelyhood of reusing current buffer */
535 dri3_flush_present_events(draw
);
537 /* Check whether we need to reuse the current back buffer as new back.
538 * In that case, wait until it's not busy anymore.
540 dri3_update_num_back(draw
);
541 num_to_consider
= draw
->num_back
;
542 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1) {
544 draw
->cur_blit_source
= -1;
548 for (b
= 0; b
< num_to_consider
; b
++) {
549 int id
= LOADER_DRI3_BACK_ID((b
+ draw
->cur_back
) % draw
->num_back
);
550 struct loader_dri3_buffer
*buffer
= draw
->buffers
[id
];
552 if (!buffer
|| !buffer
->busy
) {
554 mtx_unlock(&draw
->mtx
);
558 if (!dri3_wait_for_event_locked(draw
)) {
559 mtx_unlock(&draw
->mtx
);
565 static xcb_gcontext_t
566 dri3_drawable_gc(struct loader_dri3_drawable
*draw
)
570 xcb_create_gc(draw
->conn
,
571 (draw
->gc
= xcb_generate_id(draw
->conn
)),
573 XCB_GC_GRAPHICS_EXPOSURES
,
580 static struct loader_dri3_buffer
*
581 dri3_back_buffer(struct loader_dri3_drawable
*draw
)
583 return draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)];
586 static struct loader_dri3_buffer
*
587 dri3_fake_front_buffer(struct loader_dri3_drawable
*draw
)
589 return draw
->buffers
[LOADER_DRI3_FRONT_ID
];
593 dri3_copy_area(xcb_connection_t
*c
,
594 xcb_drawable_t src_drawable
,
595 xcb_drawable_t dst_drawable
,
604 xcb_void_cookie_t cookie
;
606 cookie
= xcb_copy_area_checked(c
,
616 xcb_discard_reply(c
, cookie
.sequence
);
620 * Asks the driver to flush any queued work necessary for serializing with the
621 * X command stream, and optionally the slightly more strict requirement of
622 * glFlush() equivalence (which would require flushing even if nothing had
623 * been drawn to a window system framebuffer, for example).
626 loader_dri3_flush(struct loader_dri3_drawable
*draw
,
628 enum __DRI2throttleReason throttle_reason
)
630 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
631 __DRIcontext
*dri_context
= draw
->vtable
->get_dri_context(draw
);
634 draw
->ext
->flush
->flush_with_flags(dri_context
, draw
->dri_drawable
,
635 flags
, throttle_reason
);
640 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable
*draw
,
642 int width
, int height
,
645 struct loader_dri3_buffer
*back
;
646 unsigned flags
= __DRI2_FLUSH_DRAWABLE
;
648 /* Check we have the right attachments */
649 if (!draw
->have_back
|| draw
->is_pixmap
)
653 flags
|= __DRI2_FLUSH_CONTEXT
;
654 loader_dri3_flush(draw
, flags
, __DRI2_THROTTLE_SWAPBUFFER
);
656 back
= dri3_find_back_alloc(draw
);
660 y
= draw
->height
- y
- height
;
662 if (draw
->is_different_gpu
) {
663 /* Update the linear buffer part of the back buffer
664 * for the dri3_copy_area operation
666 (void) loader_dri3_blit_image(draw
,
669 0, 0, back
->width
, back
->height
,
670 0, 0, __BLIT_FLAG_FLUSH
);
673 loader_dri3_swapbuffer_barrier(draw
);
674 dri3_fence_reset(draw
->conn
, back
);
675 dri3_copy_area(draw
->conn
,
678 dri3_drawable_gc(draw
),
679 x
, y
, x
, y
, width
, height
);
680 dri3_fence_trigger(draw
->conn
, back
);
681 /* Refresh the fake front (if present) after we just damaged the real
684 if (draw
->have_fake_front
&&
685 !loader_dri3_blit_image(draw
,
686 dri3_fake_front_buffer(draw
)->image
,
689 x
, y
, __BLIT_FLAG_FLUSH
) &&
690 !draw
->is_different_gpu
) {
691 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
692 dri3_copy_area(draw
->conn
,
694 dri3_fake_front_buffer(draw
)->pixmap
,
695 dri3_drawable_gc(draw
),
696 x
, y
, x
, y
, width
, height
);
697 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
698 dri3_fence_await(draw
->conn
, NULL
, dri3_fake_front_buffer(draw
));
700 dri3_fence_await(draw
->conn
, draw
, back
);
704 loader_dri3_copy_drawable(struct loader_dri3_drawable
*draw
,
708 loader_dri3_flush(draw
, __DRI2_FLUSH_DRAWABLE
, 0);
710 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
711 dri3_copy_area(draw
->conn
,
713 dri3_drawable_gc(draw
),
714 0, 0, 0, 0, draw
->width
, draw
->height
);
715 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
716 dri3_fence_await(draw
->conn
, draw
, dri3_fake_front_buffer(draw
));
720 loader_dri3_wait_x(struct loader_dri3_drawable
*draw
)
722 struct loader_dri3_buffer
*front
;
724 if (draw
== NULL
|| !draw
->have_fake_front
)
727 front
= dri3_fake_front_buffer(draw
);
729 loader_dri3_copy_drawable(draw
, front
->pixmap
, draw
->drawable
);
731 /* In the psc->is_different_gpu case, the linear buffer has been updated,
732 * but not yet the tiled buffer.
733 * Copy back to the tiled buffer we use for rendering.
734 * Note that we don't need flushing.
736 if (draw
->is_different_gpu
)
737 (void) loader_dri3_blit_image(draw
,
739 front
->linear_buffer
,
740 0, 0, front
->width
, front
->height
,
745 loader_dri3_wait_gl(struct loader_dri3_drawable
*draw
)
747 struct loader_dri3_buffer
*front
;
749 if (draw
== NULL
|| !draw
->have_fake_front
)
752 front
= dri3_fake_front_buffer(draw
);
754 /* In the psc->is_different_gpu case, we update the linear_buffer
755 * before updating the real front.
757 if (draw
->is_different_gpu
)
758 (void) loader_dri3_blit_image(draw
,
759 front
->linear_buffer
,
761 0, 0, front
->width
, front
->height
,
762 0, 0, __BLIT_FLAG_FLUSH
);
763 loader_dri3_swapbuffer_barrier(draw
);
764 loader_dri3_copy_drawable(draw
, draw
->drawable
, front
->pixmap
);
767 /** dri3_flush_present_events
769 * Process any present events that have been received from the X server
772 dri3_flush_present_events(struct loader_dri3_drawable
*draw
)
774 /* Check to see if any configuration changes have occurred
775 * since we were last invoked
777 if (draw
->has_event_waiter
)
780 if (draw
->special_event
) {
781 xcb_generic_event_t
*ev
;
783 while ((ev
= xcb_poll_for_special_event(draw
->conn
,
784 draw
->special_event
)) != NULL
) {
785 xcb_present_generic_event_t
*ge
= (void *) ev
;
786 dri3_handle_present_event(draw
, ge
);
791 /** loader_dri3_swap_buffers_msc
793 * Make the current back buffer visible using the present extension
796 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable
*draw
,
797 int64_t target_msc
, int64_t divisor
,
798 int64_t remainder
, unsigned flush_flags
,
801 struct loader_dri3_buffer
*back
;
803 uint32_t options
= XCB_PRESENT_OPTION_NONE
;
805 draw
->vtable
->flush_drawable(draw
, flush_flags
);
807 back
= dri3_find_back_alloc(draw
);
809 mtx_lock(&draw
->mtx
);
810 if (draw
->is_different_gpu
&& back
) {
811 /* Update the linear buffer before presenting the pixmap */
812 (void) loader_dri3_blit_image(draw
,
815 0, 0, back
->width
, back
->height
,
816 0, 0, __BLIT_FLAG_FLUSH
);
819 /* If we need to preload the new back buffer, remember the source.
820 * The force_copy parameter is used by EGL to attempt to preserve
821 * the back buffer across a call to this function.
823 if (draw
->swap_method
!= __DRI_ATTRIB_SWAP_UNDEFINED
|| force_copy
)
824 draw
->cur_blit_source
= LOADER_DRI3_BACK_ID(draw
->cur_back
);
826 /* Exchange the back and fake front. Even though the server knows about these
827 * buffers, it has no notion of back and fake front.
829 if (back
&& draw
->have_fake_front
) {
830 struct loader_dri3_buffer
*tmp
;
832 tmp
= dri3_fake_front_buffer(draw
);
833 draw
->buffers
[LOADER_DRI3_FRONT_ID
] = back
;
834 draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)] = tmp
;
836 if (draw
->swap_method
== __DRI_ATTRIB_SWAP_COPY
|| force_copy
)
837 draw
->cur_blit_source
= LOADER_DRI3_FRONT_ID
;
840 dri3_flush_present_events(draw
);
842 if (back
&& !draw
->is_pixmap
) {
843 dri3_fence_reset(draw
->conn
, back
);
845 /* Compute when we want the frame shown by taking the last known
846 * successful MSC and adding in a swap interval for each outstanding swap
847 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
851 if (target_msc
== 0 && divisor
== 0 && remainder
== 0)
852 target_msc
= draw
->msc
+ draw
->swap_interval
*
853 (draw
->send_sbc
- draw
->recv_sbc
);
854 else if (divisor
== 0 && remainder
> 0) {
855 /* From the GLX_OML_sync_control spec:
856 * "If <divisor> = 0, the swap will occur when MSC becomes
857 * greater than or equal to <target_msc>."
859 * Note that there's no mention of the remainder. The Present
860 * extension throws BadValue for remainder != 0 with divisor == 0, so
861 * just drop the passed in value.
866 /* From the GLX_EXT_swap_control spec
867 * and the EGL 1.4 spec (page 53):
869 * "If <interval> is set to a value of 0, buffer swaps are not
870 * synchronized to a video frame."
872 * Implementation note: It is possible to enable triple buffering
873 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
876 if (draw
->swap_interval
== 0)
877 options
|= XCB_PRESENT_OPTION_ASYNC
;
879 /* If we need to populate the new back, but need to reuse the back
880 * buffer slot due to lack of local blit capabilities, make sure
881 * the server doesn't flip and we deadlock.
883 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1)
884 options
|= XCB_PRESENT_OPTION_COPY
;
887 back
->last_swap
= draw
->send_sbc
;
888 xcb_present_pixmap(draw
->conn
,
891 (uint32_t) draw
->send_sbc
,
896 None
, /* target_crtc */
903 ret
= (int64_t) draw
->send_sbc
;
905 /* Schedule a server-side back-preserving blit if necessary.
906 * This happens iff all conditions below are satisfied:
907 * a) We have a fake front,
908 * b) We need to preserve the back buffer,
909 * c) We don't have local blit capabilities.
911 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1 &&
912 draw
->cur_blit_source
!= LOADER_DRI3_BACK_ID(draw
->cur_back
)) {
913 struct loader_dri3_buffer
*new_back
= dri3_back_buffer(draw
);
914 struct loader_dri3_buffer
*src
= draw
->buffers
[draw
->cur_blit_source
];
916 dri3_fence_reset(draw
->conn
, new_back
);
917 dri3_copy_area(draw
->conn
, src
->pixmap
,
919 dri3_drawable_gc(draw
),
920 0, 0, 0, 0, draw
->width
, draw
->height
);
921 dri3_fence_trigger(draw
->conn
, new_back
);
922 new_back
->last_swap
= src
->last_swap
;
925 xcb_flush(draw
->conn
);
929 mtx_unlock(&draw
->mtx
);
931 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
937 loader_dri3_query_buffer_age(struct loader_dri3_drawable
*draw
)
939 struct loader_dri3_buffer
*back
= dri3_find_back_alloc(draw
);
942 mtx_lock(&draw
->mtx
);
943 ret
= (!back
|| back
->last_swap
== 0) ? 0 :
944 draw
->send_sbc
- back
->last_swap
+ 1;
945 mtx_unlock(&draw
->mtx
);
952 * Wrapper around xcb_dri3_open
955 loader_dri3_open(xcb_connection_t
*conn
,
959 xcb_dri3_open_cookie_t cookie
;
960 xcb_dri3_open_reply_t
*reply
;
963 cookie
= xcb_dri3_open(conn
,
967 reply
= xcb_dri3_open_reply(conn
, cookie
, NULL
);
971 if (reply
->nfd
!= 1) {
976 fd
= xcb_dri3_open_reply_fds(conn
, reply
)[0];
978 fcntl(fd
, F_SETFD
, fcntl(fd
, F_GETFD
) | FD_CLOEXEC
);
984 dri3_cpp_for_format(uint32_t format
) {
986 case __DRI_IMAGE_FORMAT_R8
:
988 case __DRI_IMAGE_FORMAT_RGB565
:
989 case __DRI_IMAGE_FORMAT_GR88
:
991 case __DRI_IMAGE_FORMAT_XRGB8888
:
992 case __DRI_IMAGE_FORMAT_ARGB8888
:
993 case __DRI_IMAGE_FORMAT_ABGR8888
:
994 case __DRI_IMAGE_FORMAT_XBGR8888
:
995 case __DRI_IMAGE_FORMAT_XRGB2101010
:
996 case __DRI_IMAGE_FORMAT_ARGB2101010
:
997 case __DRI_IMAGE_FORMAT_XBGR2101010
:
998 case __DRI_IMAGE_FORMAT_ABGR2101010
:
999 case __DRI_IMAGE_FORMAT_SARGB8
:
1001 case __DRI_IMAGE_FORMAT_NONE
:
1007 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1008 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1009 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1010 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1013 image_format_to_fourcc(int format
)
1016 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1018 case __DRI_IMAGE_FORMAT_SARGB8
: return __DRI_IMAGE_FOURCC_SARGB8888
;
1019 case __DRI_IMAGE_FORMAT_RGB565
: return __DRI_IMAGE_FOURCC_RGB565
;
1020 case __DRI_IMAGE_FORMAT_XRGB8888
: return __DRI_IMAGE_FOURCC_XRGB8888
;
1021 case __DRI_IMAGE_FORMAT_ARGB8888
: return __DRI_IMAGE_FOURCC_ARGB8888
;
1022 case __DRI_IMAGE_FORMAT_ABGR8888
: return __DRI_IMAGE_FOURCC_ABGR8888
;
1023 case __DRI_IMAGE_FORMAT_XBGR8888
: return __DRI_IMAGE_FOURCC_XBGR8888
;
1024 case __DRI_IMAGE_FORMAT_XRGB2101010
: return __DRI_IMAGE_FOURCC_XRGB2101010
;
1025 case __DRI_IMAGE_FORMAT_ARGB2101010
: return __DRI_IMAGE_FOURCC_ARGB2101010
;
1026 case __DRI_IMAGE_FORMAT_XBGR2101010
: return __DRI_IMAGE_FOURCC_XBGR2101010
;
1027 case __DRI_IMAGE_FORMAT_ABGR2101010
: return __DRI_IMAGE_FOURCC_ABGR2101010
;
1032 /** loader_dri3_alloc_render_buffer
1034 * Use the driver createImage function to construct a __DRIimage, then
1035 * get a file descriptor for that and create an X pixmap from that
1037 * Allocate an xshmfence for synchronization
1039 static struct loader_dri3_buffer
*
1040 dri3_alloc_render_buffer(struct loader_dri3_drawable
*draw
, unsigned int format
,
1041 int width
, int height
, int depth
)
1043 struct loader_dri3_buffer
*buffer
;
1044 __DRIimage
*pixmap_buffer
;
1045 xcb_pixmap_t pixmap
;
1046 xcb_sync_fence_t sync_fence
;
1047 struct xshmfence
*shm_fence
;
1048 int buffer_fd
, fence_fd
;
1051 /* Create an xshmfence object and
1052 * prepare to send that to the X server
1055 fence_fd
= xshmfence_alloc_shm();
1059 shm_fence
= xshmfence_map_shm(fence_fd
);
1060 if (shm_fence
== NULL
)
1063 /* Allocate the image from the driver
1065 buffer
= calloc(1, sizeof *buffer
);
1069 buffer
->cpp
= dri3_cpp_for_format(format
);
1073 if (!draw
->is_different_gpu
) {
1074 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1077 __DRI_IMAGE_USE_SHARE
|
1078 __DRI_IMAGE_USE_SCANOUT
|
1079 __DRI_IMAGE_USE_BACKBUFFER
,
1081 pixmap_buffer
= buffer
->image
;
1086 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1095 buffer
->linear_buffer
=
1096 draw
->ext
->image
->createImage(draw
->dri_screen
,
1097 width
, height
, format
,
1098 __DRI_IMAGE_USE_SHARE
|
1099 __DRI_IMAGE_USE_LINEAR
|
1100 __DRI_IMAGE_USE_BACKBUFFER
,
1102 pixmap_buffer
= buffer
->linear_buffer
;
1104 if (!buffer
->linear_buffer
)
1105 goto no_linear_buffer
;
1108 /* X wants the stride, so ask the image for it
1110 if (!draw
->ext
->image
->queryImage(pixmap_buffer
, __DRI_IMAGE_ATTRIB_STRIDE
,
1112 goto no_buffer_attrib
;
1114 buffer
->pitch
= stride
;
1116 if (!draw
->ext
->image
->queryImage(pixmap_buffer
, __DRI_IMAGE_ATTRIB_FD
,
1118 goto no_buffer_attrib
;
1120 xcb_dri3_pixmap_from_buffer(draw
->conn
,
1121 (pixmap
= xcb_generate_id(draw
->conn
)),
1124 width
, height
, buffer
->pitch
,
1125 depth
, buffer
->cpp
* 8,
1128 xcb_dri3_fence_from_fd(draw
->conn
,
1130 (sync_fence
= xcb_generate_id(draw
->conn
)),
1134 buffer
->pixmap
= pixmap
;
1135 buffer
->own_pixmap
= true;
1136 buffer
->sync_fence
= sync_fence
;
1137 buffer
->shm_fence
= shm_fence
;
1138 buffer
->width
= width
;
1139 buffer
->height
= height
;
1141 /* Mark the buffer as idle
1143 dri3_fence_set(buffer
);
1148 draw
->ext
->image
->destroyImage(pixmap_buffer
);
1150 if (draw
->is_different_gpu
)
1151 draw
->ext
->image
->destroyImage(buffer
->image
);
1155 xshmfence_unmap_shm(shm_fence
);
1161 /** loader_dri3_update_drawable
1163 * Called the first time we use the drawable and then
1164 * after we receive present configure notify events to
1165 * track the geometry of the drawable
1168 dri3_update_drawable(__DRIdrawable
*driDrawable
,
1169 struct loader_dri3_drawable
*draw
)
1171 mtx_lock(&draw
->mtx
);
1172 if (draw
->first_init
) {
1173 xcb_get_geometry_cookie_t geom_cookie
;
1174 xcb_get_geometry_reply_t
*geom_reply
;
1175 xcb_void_cookie_t cookie
;
1176 xcb_generic_error_t
*error
;
1177 xcb_present_query_capabilities_cookie_t present_capabilities_cookie
;
1178 xcb_present_query_capabilities_reply_t
*present_capabilities_reply
;
1180 draw
->first_init
= false;
1182 /* Try to select for input on the window.
1184 * If the drawable is a window, this will get our events
1187 * Otherwise, we'll get a BadWindow error back from this request which
1188 * will let us know that the drawable is a pixmap instead.
1191 draw
->eid
= xcb_generate_id(draw
->conn
);
1193 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
1194 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY
|
1195 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY
|
1196 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY
);
1198 present_capabilities_cookie
=
1199 xcb_present_query_capabilities(draw
->conn
, draw
->drawable
);
1201 /* Create an XCB event queue to hold present events outside of the usual
1202 * application event queue
1204 draw
->special_event
= xcb_register_for_special_xge(draw
->conn
,
1208 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1210 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1213 mtx_unlock(&draw
->mtx
);
1217 draw
->width
= geom_reply
->width
;
1218 draw
->height
= geom_reply
->height
;
1219 draw
->depth
= geom_reply
->depth
;
1220 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1224 draw
->is_pixmap
= false;
1226 /* Check to see if our select input call failed. If it failed with a
1227 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1228 * special event queue created above and mark the drawable as a pixmap
1231 error
= xcb_request_check(draw
->conn
, cookie
);
1233 present_capabilities_reply
=
1234 xcb_present_query_capabilities_reply(draw
->conn
,
1235 present_capabilities_cookie
,
1238 if (present_capabilities_reply
) {
1239 draw
->present_capabilities
= present_capabilities_reply
->capabilities
;
1240 free(present_capabilities_reply
);
1242 draw
->present_capabilities
= 0;
1245 if (error
->error_code
!= BadWindow
) {
1247 mtx_unlock(&draw
->mtx
);
1250 draw
->is_pixmap
= true;
1251 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
1252 draw
->special_event
= NULL
;
1255 dri3_flush_present_events(draw
);
1256 mtx_unlock(&draw
->mtx
);
1261 loader_dri3_create_image(xcb_connection_t
*c
,
1262 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
,
1263 unsigned int format
,
1264 __DRIscreen
*dri_screen
,
1265 const __DRIimageExtension
*image
,
1266 void *loaderPrivate
)
1269 __DRIimage
*image_planar
, *ret
;
1272 /* Get an FD for the pixmap object
1274 fds
= xcb_dri3_buffer_from_pixmap_reply_fds(c
, bp_reply
);
1276 stride
= bp_reply
->stride
;
1279 /* createImageFromFds creates a wrapper __DRIimage structure which
1280 * can deal with multiple planes for things like Yuv images. So, once
1281 * we've gotten the planar wrapper, pull the single plane out of it and
1282 * discard the wrapper.
1284 image_planar
= image
->createImageFromFds(dri_screen
,
1287 image_format_to_fourcc(format
),
1289 &stride
, &offset
, loaderPrivate
);
1294 ret
= image
->fromPlanar(image_planar
, 0, loaderPrivate
);
1299 image
->destroyImage(image_planar
);
1304 /** dri3_get_pixmap_buffer
1306 * Get the DRM object for a pixmap from the X server and
1307 * wrap that with a __DRIimage structure using createImageFromFds
1309 static struct loader_dri3_buffer
*
1310 dri3_get_pixmap_buffer(__DRIdrawable
*driDrawable
, unsigned int format
,
1311 enum loader_dri3_buffer_type buffer_type
,
1312 struct loader_dri3_drawable
*draw
)
1314 int buf_id
= loader_dri3_pixmap_buf_id(buffer_type
);
1315 struct loader_dri3_buffer
*buffer
= draw
->buffers
[buf_id
];
1316 xcb_drawable_t pixmap
;
1317 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie
;
1318 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
;
1319 xcb_sync_fence_t sync_fence
;
1320 struct xshmfence
*shm_fence
;
1322 __DRIscreen
*cur_screen
;
1327 pixmap
= draw
->drawable
;
1329 buffer
= calloc(1, sizeof *buffer
);
1333 fence_fd
= xshmfence_alloc_shm();
1336 shm_fence
= xshmfence_map_shm(fence_fd
);
1337 if (shm_fence
== NULL
) {
1342 xcb_dri3_fence_from_fd(draw
->conn
,
1344 (sync_fence
= xcb_generate_id(draw
->conn
)),
1348 bp_cookie
= xcb_dri3_buffer_from_pixmap(draw
->conn
, pixmap
);
1349 bp_reply
= xcb_dri3_buffer_from_pixmap_reply(draw
->conn
, bp_cookie
, NULL
);
1353 /* Get the currently-bound screen or revert to using the drawable's screen if
1354 * no contexts are currently bound. The latter case is at least necessary for
1355 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1357 cur_screen
= draw
->vtable
->get_dri_screen();
1359 cur_screen
= draw
->dri_screen
;
1362 buffer
->image
= loader_dri3_create_image(draw
->conn
, bp_reply
, format
,
1363 cur_screen
, draw
->ext
->image
,
1368 buffer
->pixmap
= pixmap
;
1369 buffer
->own_pixmap
= false;
1370 buffer
->width
= bp_reply
->width
;
1371 buffer
->height
= bp_reply
->height
;
1372 buffer
->shm_fence
= shm_fence
;
1373 buffer
->sync_fence
= sync_fence
;
1375 draw
->buffers
[buf_id
] = buffer
;
1383 xcb_sync_destroy_fence(draw
->conn
, sync_fence
);
1384 xshmfence_unmap_shm(shm_fence
);
1393 * Find a front or back buffer, allocating new ones as necessary
1395 static struct loader_dri3_buffer
*
1396 dri3_get_buffer(__DRIdrawable
*driDrawable
,
1397 unsigned int format
,
1398 enum loader_dri3_buffer_type buffer_type
,
1399 struct loader_dri3_drawable
*draw
)
1401 struct loader_dri3_buffer
*buffer
;
1404 if (buffer_type
== loader_dri3_buffer_back
) {
1405 draw
->back_format
= format
;
1407 buf_id
= dri3_find_back(draw
);
1412 buf_id
= LOADER_DRI3_FRONT_ID
;
1415 buffer
= draw
->buffers
[buf_id
];
1417 /* Allocate a new buffer if there isn't an old one, or if that
1418 * old one is the wrong size
1420 if (!buffer
|| buffer
->width
!= draw
->width
||
1421 buffer
->height
!= draw
->height
) {
1422 struct loader_dri3_buffer
*new_buffer
;
1424 /* Allocate the new buffers
1426 new_buffer
= dri3_alloc_render_buffer(draw
,
1434 /* When resizing, copy the contents of the old buffer, waiting for that
1435 * copy to complete using our fences before proceeding
1437 if ((buffer_type
== loader_dri3_buffer_back
||
1438 (buffer_type
== loader_dri3_buffer_front
&& draw
->have_fake_front
))
1441 /* Fill the new buffer with data from an old buffer */
1442 dri3_fence_await(draw
->conn
, draw
, buffer
);
1443 if (!loader_dri3_blit_image(draw
,
1446 0, 0, draw
->width
, draw
->height
,
1448 !buffer
->linear_buffer
) {
1449 dri3_fence_reset(draw
->conn
, new_buffer
);
1450 dri3_copy_area(draw
->conn
,
1453 dri3_drawable_gc(draw
),
1455 draw
->width
, draw
->height
);
1456 dri3_fence_trigger(draw
->conn
, new_buffer
);
1458 dri3_free_render_buffer(draw
, buffer
);
1459 } else if (buffer_type
== loader_dri3_buffer_front
) {
1460 /* Fill the new fake front with data from a real front */
1461 loader_dri3_swapbuffer_barrier(draw
);
1462 dri3_fence_reset(draw
->conn
, new_buffer
);
1463 dri3_copy_area(draw
->conn
,
1466 dri3_drawable_gc(draw
),
1468 draw
->width
, draw
->height
);
1469 dri3_fence_trigger(draw
->conn
, new_buffer
);
1471 if (new_buffer
->linear_buffer
) {
1472 dri3_fence_await(draw
->conn
, draw
, new_buffer
);
1473 (void) loader_dri3_blit_image(draw
,
1475 new_buffer
->linear_buffer
,
1476 0, 0, draw
->width
, draw
->height
,
1480 buffer
= new_buffer
;
1481 draw
->buffers
[buf_id
] = buffer
;
1483 dri3_fence_await(draw
->conn
, draw
, buffer
);
1486 * Do we need to preserve the content of a previous buffer?
1488 * Note that this blit is needed only to avoid a wait for a buffer that
1489 * is currently in the flip chain or being scanned out from. That's really
1490 * a tradeoff. If we're ok with the wait we can reduce the number of back
1491 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1492 * but in the latter case we must disallow page-flipping.
1494 if (buffer_type
== loader_dri3_buffer_back
&&
1495 draw
->cur_blit_source
!= -1 &&
1496 draw
->buffers
[draw
->cur_blit_source
] &&
1497 buffer
!= draw
->buffers
[draw
->cur_blit_source
]) {
1499 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
1501 /* Avoid flushing here. Will propably do good for tiling hardware. */
1502 (void) loader_dri3_blit_image(draw
,
1505 0, 0, draw
->width
, draw
->height
,
1507 buffer
->last_swap
= source
->last_swap
;
1508 draw
->cur_blit_source
= -1;
1510 /* Return the requested buffer */
1514 /** dri3_free_buffers
1516 * Free the front bufffer or all of the back buffers. Used
1517 * when the application changes which buffers it needs
1520 dri3_free_buffers(__DRIdrawable
*driDrawable
,
1521 enum loader_dri3_buffer_type buffer_type
,
1522 struct loader_dri3_drawable
*draw
)
1524 struct loader_dri3_buffer
*buffer
;
1529 switch (buffer_type
) {
1530 case loader_dri3_buffer_back
:
1531 first_id
= LOADER_DRI3_BACK_ID(0);
1532 n_id
= LOADER_DRI3_MAX_BACK
;
1533 draw
->cur_blit_source
= -1;
1535 case loader_dri3_buffer_front
:
1536 first_id
= LOADER_DRI3_FRONT_ID
;
1537 /* Don't free a fake front holding new backbuffer content. */
1538 n_id
= (draw
->cur_blit_source
== LOADER_DRI3_FRONT_ID
) ? 0 : 1;
1541 for (buf_id
= first_id
; buf_id
< first_id
+ n_id
; buf_id
++) {
1542 buffer
= draw
->buffers
[buf_id
];
1544 dri3_free_render_buffer(draw
, buffer
);
1545 draw
->buffers
[buf_id
] = NULL
;
1550 /** loader_dri3_get_buffers
1552 * The published buffer allocation API.
1553 * Returns all of the necessary buffers, allocating
1557 loader_dri3_get_buffers(__DRIdrawable
*driDrawable
,
1558 unsigned int format
,
1560 void *loaderPrivate
,
1561 uint32_t buffer_mask
,
1562 struct __DRIimageList
*buffers
)
1564 struct loader_dri3_drawable
*draw
= loaderPrivate
;
1565 struct loader_dri3_buffer
*front
, *back
;
1567 buffers
->image_mask
= 0;
1568 buffers
->front
= NULL
;
1569 buffers
->back
= NULL
;
1574 if (!dri3_update_drawable(driDrawable
, draw
))
1577 /* pixmaps always have front buffers.
1578 * Exchange swaps also mandate fake front buffers.
1580 if (draw
->is_pixmap
|| draw
->swap_method
== __DRI_ATTRIB_SWAP_EXCHANGE
)
1581 buffer_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
1583 if (buffer_mask
& __DRI_IMAGE_BUFFER_FRONT
) {
1584 /* All pixmaps are owned by the server gpu.
1585 * When we use a different gpu, we can't use the pixmap
1586 * as buffer since it is potentially tiled a way
1587 * our device can't understand. In this case, use
1588 * a fake front buffer. Hopefully the pixmap
1589 * content will get synced with the fake front
1592 if (draw
->is_pixmap
&& !draw
->is_different_gpu
)
1593 front
= dri3_get_pixmap_buffer(driDrawable
,
1595 loader_dri3_buffer_front
,
1598 front
= dri3_get_buffer(driDrawable
,
1600 loader_dri3_buffer_front
,
1606 dri3_free_buffers(driDrawable
, loader_dri3_buffer_front
, draw
);
1607 draw
->have_fake_front
= 0;
1610 if (buffer_mask
& __DRI_IMAGE_BUFFER_BACK
) {
1611 back
= dri3_get_buffer(driDrawable
,
1613 loader_dri3_buffer_back
,
1617 draw
->have_back
= 1;
1619 dri3_free_buffers(driDrawable
, loader_dri3_buffer_back
, draw
);
1620 draw
->have_back
= 0;
1624 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
1625 buffers
->front
= front
->image
;
1626 draw
->have_fake_front
= draw
->is_different_gpu
|| !draw
->is_pixmap
;
1630 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_BACK
;
1631 buffers
->back
= back
->image
;
1634 draw
->stamp
= stamp
;
1639 /** loader_dri3_update_drawable_geometry
1641 * Get the current drawable geometry.
1644 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable
*draw
)
1646 xcb_get_geometry_cookie_t geom_cookie
;
1647 xcb_get_geometry_reply_t
*geom_reply
;
1649 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1651 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1654 draw
->width
= geom_reply
->width
;
1655 draw
->height
= geom_reply
->height
;
1656 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1657 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
1665 * Make sure the server has flushed all pending swap buffers to hardware
1666 * for this drawable. Ideally we'd want to send an X protocol request to
1667 * have the server block our connection until the swaps are complete. That
1668 * would avoid the potential round-trip here.
1671 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable
*draw
)
1673 int64_t ust
, msc
, sbc
;
1675 (void) loader_dri3_wait_for_sbc(draw
, 0, &ust
, &msc
, &sbc
);
1679 * Perform any cleanup associated with a close screen operation.
1680 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1682 * This function destroys the screen's cached swap context if any.
1685 loader_dri3_close_screen(__DRIscreen
*dri_screen
)
1687 mtx_lock(&blit_context
.mtx
);
1688 if (blit_context
.ctx
&& blit_context
.cur_screen
== dri_screen
) {
1689 blit_context
.core
->destroyContext(blit_context
.ctx
);
1690 blit_context
.ctx
= NULL
;
1692 mtx_unlock(&blit_context
.mtx
);
1696 * Find a backbuffer slot - potentially allocating a back buffer
1698 * \param draw[in,out] Pointer to the drawable for which to find back.
1699 * \return Pointer to a new back buffer or NULL if allocation failed or was
1702 * Find a potentially new back buffer, and if it's not been allocated yet and
1703 * in addition needs initializing, then try to allocate and initialize it.
1706 static struct loader_dri3_buffer
*
1707 dri3_find_back_alloc(struct loader_dri3_drawable
*draw
)
1709 struct loader_dri3_buffer
*back
;
1712 id
= dri3_find_back(draw
);
1716 back
= draw
->buffers
[id
];
1717 /* Allocate a new back if we haven't got one */
1718 if (!back
&& draw
->back_format
!= __DRI_IMAGE_FORMAT_NONE
&&
1719 dri3_update_drawable(draw
->dri_drawable
, draw
))
1720 back
= dri3_alloc_render_buffer(draw
, draw
->back_format
,
1721 draw
->width
, draw
->height
, draw
->depth
);
1726 draw
->buffers
[id
] = back
;
1728 /* If necessary, prefill the back with data according to swap_method mode. */
1729 if (draw
->cur_blit_source
!= -1 &&
1730 draw
->buffers
[draw
->cur_blit_source
] &&
1731 back
!= draw
->buffers
[draw
->cur_blit_source
]) {
1732 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
1734 dri3_fence_await(draw
->conn
, draw
, source
);
1735 dri3_fence_await(draw
->conn
, draw
, back
);
1736 (void) loader_dri3_blit_image(draw
,
1739 0, 0, draw
->width
, draw
->height
,
1741 back
->last_swap
= source
->last_swap
;
1742 draw
->cur_blit_source
= -1;