2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
29 #include <X11/xshmfence.h>
32 #include <xcb/present.h>
34 #include <X11/Xlib-xcb.h>
36 #include "loader_dri3_helper.h"
37 #include "util/macros.h"
38 #include "drm_fourcc.h"
40 /* From xmlpool/options.h, user exposed so should be stable */
41 #define DRI_CONF_VBLANK_NEVER 0
42 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
43 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
44 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
47 * A cached blit context.
49 struct loader_dri3_blit_context
{
52 __DRIscreen
*cur_screen
;
53 const __DRIcoreExtension
*core
;
56 /* For simplicity we maintain the cache only for a single screen at a time */
57 static struct loader_dri3_blit_context blit_context
= {
58 _MTX_INITIALIZER_NP
, NULL
62 dri3_flush_present_events(struct loader_dri3_drawable
*draw
);
64 static struct loader_dri3_buffer
*
65 dri3_find_back_alloc(struct loader_dri3_drawable
*draw
);
68 * Do we have blit functionality in the image blit extension?
70 * \param draw[in] The drawable intended to blit from / to.
71 * \return true if we have blit functionality. false otherwise.
73 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable
*draw
)
75 return draw
->ext
->image
->base
.version
>= 9 &&
76 draw
->ext
->image
->blitImage
!= NULL
;
80 * Get and lock (for use with the current thread) a dri context associated
81 * with the drawable's dri screen. The context is intended to be used with
82 * the dri image extension's blitImage method.
84 * \param draw[in] Pointer to the drawable whose dri screen we want a
86 * \return A dri context or NULL if context creation failed.
88 * When the caller is done with the context (even if the context returned was
89 * NULL), the caller must call loader_dri3_blit_context_put.
92 loader_dri3_blit_context_get(struct loader_dri3_drawable
*draw
)
94 mtx_lock(&blit_context
.mtx
);
96 if (blit_context
.ctx
&& blit_context
.cur_screen
!= draw
->dri_screen
) {
97 blit_context
.core
->destroyContext(blit_context
.ctx
);
98 blit_context
.ctx
= NULL
;
101 if (!blit_context
.ctx
) {
102 blit_context
.ctx
= draw
->ext
->core
->createNewContext(draw
->dri_screen
,
104 blit_context
.cur_screen
= draw
->dri_screen
;
105 blit_context
.core
= draw
->ext
->core
;
108 return blit_context
.ctx
;
112 * Release (for use with other threads) a dri context previously obtained using
113 * loader_dri3_blit_context_get.
116 loader_dri3_blit_context_put(void)
118 mtx_unlock(&blit_context
.mtx
);
122 * Blit (parts of) the contents of a DRI image to another dri image
124 * \param draw[in] The drawable which owns the images.
125 * \param dst[in] The destination image.
126 * \param src[in] The source image.
127 * \param dstx0[in] Start destination coordinate.
128 * \param dsty0[in] Start destination coordinate.
129 * \param width[in] Blit width.
130 * \param height[in] Blit height.
131 * \param srcx0[in] Start source coordinate.
132 * \param srcy0[in] Start source coordinate.
133 * \param flush_flag[in] Image blit flush flag.
134 * \return true iff successful.
137 loader_dri3_blit_image(struct loader_dri3_drawable
*draw
,
138 __DRIimage
*dst
, __DRIimage
*src
,
139 int dstx0
, int dsty0
, int width
, int height
,
140 int srcx0
, int srcy0
, int flush_flag
)
142 __DRIcontext
*dri_context
;
143 bool use_blit_context
= false;
145 if (!loader_dri3_have_image_blit(draw
))
148 dri_context
= draw
->vtable
->get_dri_context(draw
);
150 if (!dri_context
|| !draw
->vtable
->in_current_context(draw
)) {
151 dri_context
= loader_dri3_blit_context_get(draw
);
152 use_blit_context
= true;
153 flush_flag
|= __BLIT_FLAG_FLUSH
;
157 draw
->ext
->image
->blitImage(dri_context
, dst
, src
, dstx0
, dsty0
,
158 width
, height
, srcx0
, srcy0
,
159 width
, height
, flush_flag
);
161 if (use_blit_context
)
162 loader_dri3_blit_context_put();
164 return dri_context
!= NULL
;
168 dri3_fence_reset(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
170 xshmfence_reset(buffer
->shm_fence
);
174 dri3_fence_set(struct loader_dri3_buffer
*buffer
)
176 xshmfence_trigger(buffer
->shm_fence
);
180 dri3_fence_trigger(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
182 xcb_sync_trigger_fence(c
, buffer
->sync_fence
);
186 dri3_fence_await(xcb_connection_t
*c
, struct loader_dri3_drawable
*draw
,
187 struct loader_dri3_buffer
*buffer
)
190 xshmfence_await(buffer
->shm_fence
);
192 mtx_lock(&draw
->mtx
);
193 dri3_flush_present_events(draw
);
194 mtx_unlock(&draw
->mtx
);
199 dri3_update_num_back(struct loader_dri3_drawable
*draw
)
201 if (draw
->last_present_mode
== XCB_PRESENT_COMPLETE_MODE_FLIP
)
208 loader_dri3_set_swap_interval(struct loader_dri3_drawable
*draw
, int interval
)
210 draw
->swap_interval
= interval
;
213 /** dri3_free_render_buffer
215 * Free everything associated with one render buffer including pixmap, fence
216 * stuff and the driver image
219 dri3_free_render_buffer(struct loader_dri3_drawable
*draw
,
220 struct loader_dri3_buffer
*buffer
)
222 if (buffer
->own_pixmap
)
223 xcb_free_pixmap(draw
->conn
, buffer
->pixmap
);
224 xcb_sync_destroy_fence(draw
->conn
, buffer
->sync_fence
);
225 xshmfence_unmap_shm(buffer
->shm_fence
);
226 draw
->ext
->image
->destroyImage(buffer
->image
);
227 if (buffer
->linear_buffer
)
228 draw
->ext
->image
->destroyImage(buffer
->linear_buffer
);
233 loader_dri3_drawable_fini(struct loader_dri3_drawable
*draw
)
237 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
239 for (i
= 0; i
< ARRAY_SIZE(draw
->buffers
); i
++) {
240 if (draw
->buffers
[i
])
241 dri3_free_render_buffer(draw
, draw
->buffers
[i
]);
244 if (draw
->special_event
) {
245 xcb_void_cookie_t cookie
=
246 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
247 XCB_PRESENT_EVENT_MASK_NO_EVENT
);
249 xcb_discard_reply(draw
->conn
, cookie
.sequence
);
250 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
253 cnd_destroy(&draw
->event_cnd
);
254 mtx_destroy(&draw
->mtx
);
258 loader_dri3_drawable_init(xcb_connection_t
*conn
,
259 xcb_drawable_t drawable
,
260 __DRIscreen
*dri_screen
,
261 bool is_different_gpu
,
262 bool multiplanes_available
,
263 const __DRIconfig
*dri_config
,
264 struct loader_dri3_extensions
*ext
,
265 const struct loader_dri3_vtable
*vtable
,
266 struct loader_dri3_drawable
*draw
)
268 xcb_get_geometry_cookie_t cookie
;
269 xcb_get_geometry_reply_t
*reply
;
270 xcb_generic_error_t
*error
;
271 GLint vblank_mode
= DRI_CONF_VBLANK_DEF_INTERVAL_1
;
276 draw
->vtable
= vtable
;
277 draw
->drawable
= drawable
;
278 draw
->dri_screen
= dri_screen
;
279 draw
->is_different_gpu
= is_different_gpu
;
280 draw
->multiplanes_available
= multiplanes_available
;
283 draw
->have_fake_front
= 0;
284 draw
->first_init
= true;
286 draw
->cur_blit_source
= -1;
287 draw
->back_format
= __DRI_IMAGE_FORMAT_NONE
;
288 mtx_init(&draw
->mtx
, mtx_plain
);
289 cnd_init(&draw
->event_cnd
);
291 if (draw
->ext
->config
)
292 draw
->ext
->config
->configQueryi(draw
->dri_screen
,
293 "vblank_mode", &vblank_mode
);
295 switch (vblank_mode
) {
296 case DRI_CONF_VBLANK_NEVER
:
297 case DRI_CONF_VBLANK_DEF_INTERVAL_0
:
300 case DRI_CONF_VBLANK_DEF_INTERVAL_1
:
301 case DRI_CONF_VBLANK_ALWAYS_SYNC
:
306 draw
->swap_interval
= swap_interval
;
308 dri3_update_num_back(draw
);
310 /* Create a new drawable */
312 draw
->ext
->image_driver
->createNewDrawable(dri_screen
,
316 if (!draw
->dri_drawable
)
319 cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
320 reply
= xcb_get_geometry_reply(draw
->conn
, cookie
, &error
);
321 if (reply
== NULL
|| error
!= NULL
) {
322 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
326 draw
->width
= reply
->width
;
327 draw
->height
= reply
->height
;
328 draw
->depth
= reply
->depth
;
329 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
332 draw
->swap_method
= __DRI_ATTRIB_SWAP_UNDEFINED
;
333 if (draw
->ext
->core
->base
.version
>= 2) {
334 (void )draw
->ext
->core
->getConfigAttrib(dri_config
,
335 __DRI_ATTRIB_SWAP_METHOD
,
340 * Make sure server has the same swap interval we do for the new
343 loader_dri3_set_swap_interval(draw
, swap_interval
);
349 * Process one Present event
352 dri3_handle_present_event(struct loader_dri3_drawable
*draw
,
353 xcb_present_generic_event_t
*ge
)
355 switch (ge
->evtype
) {
356 case XCB_PRESENT_CONFIGURE_NOTIFY
: {
357 xcb_present_configure_notify_event_t
*ce
= (void *) ge
;
359 draw
->width
= ce
->width
;
360 draw
->height
= ce
->height
;
361 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
362 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
365 case XCB_PRESENT_COMPLETE_NOTIFY
: {
366 xcb_present_complete_notify_event_t
*ce
= (void *) ge
;
368 /* Compute the processed SBC number from the received 32-bit serial number
369 * merged with the upper 32-bits of the sent 64-bit serial number while
372 if (ce
->kind
== XCB_PRESENT_COMPLETE_KIND_PIXMAP
) {
373 draw
->recv_sbc
= (draw
->send_sbc
& 0xffffffff00000000LL
) | ce
->serial
;
374 if (draw
->recv_sbc
> draw
->send_sbc
)
375 draw
->recv_sbc
-= 0x100000000;
377 draw
->last_present_mode
= ce
->mode
;
379 if (draw
->vtable
->show_fps
)
380 draw
->vtable
->show_fps(draw
, ce
->ust
);
384 } else if (ce
->serial
== draw
->eid
) {
385 draw
->notify_ust
= ce
->ust
;
386 draw
->notify_msc
= ce
->msc
;
390 case XCB_PRESENT_EVENT_IDLE_NOTIFY
: {
391 xcb_present_idle_notify_event_t
*ie
= (void *) ge
;
394 for (b
= 0; b
< ARRAY_SIZE(draw
->buffers
); b
++) {
395 struct loader_dri3_buffer
*buf
= draw
->buffers
[b
];
397 if (buf
&& buf
->pixmap
== ie
->pixmap
)
400 if (buf
&& draw
->num_back
<= b
&& b
< LOADER_DRI3_MAX_BACK
&&
401 draw
->cur_blit_source
!= b
&&
403 dri3_free_render_buffer(draw
, buf
);
404 draw
->buffers
[b
] = NULL
;
414 dri3_wait_for_event_locked(struct loader_dri3_drawable
*draw
)
416 xcb_generic_event_t
*ev
;
417 xcb_present_generic_event_t
*ge
;
419 xcb_flush(draw
->conn
);
421 /* Only have one thread waiting for events at a time */
422 if (draw
->has_event_waiter
) {
423 cnd_wait(&draw
->event_cnd
, &draw
->mtx
);
424 /* Another thread has updated the protected info, so retest. */
427 draw
->has_event_waiter
= true;
428 /* Allow other threads access to the drawable while we're waiting. */
429 mtx_unlock(&draw
->mtx
);
430 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
431 mtx_lock(&draw
->mtx
);
432 draw
->has_event_waiter
= false;
433 cnd_broadcast(&draw
->event_cnd
);
438 dri3_handle_present_event(draw
, ge
);
442 /** loader_dri3_wait_for_msc
444 * Get the X server to send an event when the target msc/divisor/remainder is
448 loader_dri3_wait_for_msc(struct loader_dri3_drawable
*draw
,
450 int64_t divisor
, int64_t remainder
,
451 int64_t *ust
, int64_t *msc
, int64_t *sbc
)
453 xcb_void_cookie_t cookie
= xcb_present_notify_msc(draw
->conn
,
459 xcb_generic_event_t
*ev
;
460 unsigned full_sequence
;
462 mtx_lock(&draw
->mtx
);
463 xcb_flush(draw
->conn
);
465 /* Wait for the event */
467 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
469 mtx_unlock(&draw
->mtx
);
473 full_sequence
= ev
->full_sequence
;
474 dri3_handle_present_event(draw
, (void *) ev
);
475 } while (full_sequence
!= cookie
.sequence
|| draw
->notify_msc
< target_msc
);
477 *ust
= draw
->notify_ust
;
478 *msc
= draw
->notify_msc
;
479 *sbc
= draw
->recv_sbc
;
480 mtx_unlock(&draw
->mtx
);
485 /** loader_dri3_wait_for_sbc
487 * Wait for the completed swap buffer count to reach the specified
488 * target. Presumably the application knows that this will be reached with
489 * outstanding complete events, or we're going to be here awhile.
492 loader_dri3_wait_for_sbc(struct loader_dri3_drawable
*draw
,
493 int64_t target_sbc
, int64_t *ust
,
494 int64_t *msc
, int64_t *sbc
)
496 /* From the GLX_OML_sync_control spec:
498 * "If <target_sbc> = 0, the function will block until all previous
499 * swaps requested with glXSwapBuffersMscOML for that window have
502 mtx_lock(&draw
->mtx
);
504 target_sbc
= draw
->send_sbc
;
506 while (draw
->recv_sbc
< target_sbc
) {
507 if (!dri3_wait_for_event_locked(draw
)) {
508 mtx_unlock(&draw
->mtx
);
515 *sbc
= draw
->recv_sbc
;
516 mtx_unlock(&draw
->mtx
);
520 /** loader_dri3_find_back
522 * Find an idle back buffer. If there isn't one, then
523 * wait for a present idle notify event from the X server
526 dri3_find_back(struct loader_dri3_drawable
*draw
)
531 mtx_lock(&draw
->mtx
);
532 /* Increase the likelyhood of reusing current buffer */
533 dri3_flush_present_events(draw
);
535 /* Check whether we need to reuse the current back buffer as new back.
536 * In that case, wait until it's not busy anymore.
538 dri3_update_num_back(draw
);
539 num_to_consider
= draw
->num_back
;
540 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1) {
542 draw
->cur_blit_source
= -1;
546 for (b
= 0; b
< num_to_consider
; b
++) {
547 int id
= LOADER_DRI3_BACK_ID((b
+ draw
->cur_back
) % draw
->num_back
);
548 struct loader_dri3_buffer
*buffer
= draw
->buffers
[id
];
550 if (!buffer
|| !buffer
->busy
) {
552 mtx_unlock(&draw
->mtx
);
556 if (!dri3_wait_for_event_locked(draw
)) {
557 mtx_unlock(&draw
->mtx
);
563 static xcb_gcontext_t
564 dri3_drawable_gc(struct loader_dri3_drawable
*draw
)
568 xcb_create_gc(draw
->conn
,
569 (draw
->gc
= xcb_generate_id(draw
->conn
)),
571 XCB_GC_GRAPHICS_EXPOSURES
,
578 static struct loader_dri3_buffer
*
579 dri3_back_buffer(struct loader_dri3_drawable
*draw
)
581 return draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)];
584 static struct loader_dri3_buffer
*
585 dri3_fake_front_buffer(struct loader_dri3_drawable
*draw
)
587 return draw
->buffers
[LOADER_DRI3_FRONT_ID
];
591 dri3_copy_area(xcb_connection_t
*c
,
592 xcb_drawable_t src_drawable
,
593 xcb_drawable_t dst_drawable
,
602 xcb_void_cookie_t cookie
;
604 cookie
= xcb_copy_area_checked(c
,
614 xcb_discard_reply(c
, cookie
.sequence
);
618 * Asks the driver to flush any queued work necessary for serializing with the
619 * X command stream, and optionally the slightly more strict requirement of
620 * glFlush() equivalence (which would require flushing even if nothing had
621 * been drawn to a window system framebuffer, for example).
624 loader_dri3_flush(struct loader_dri3_drawable
*draw
,
626 enum __DRI2throttleReason throttle_reason
)
628 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
629 __DRIcontext
*dri_context
= draw
->vtable
->get_dri_context(draw
);
632 draw
->ext
->flush
->flush_with_flags(dri_context
, draw
->dri_drawable
,
633 flags
, throttle_reason
);
638 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable
*draw
,
640 int width
, int height
,
643 struct loader_dri3_buffer
*back
;
644 unsigned flags
= __DRI2_FLUSH_DRAWABLE
;
646 /* Check we have the right attachments */
647 if (!draw
->have_back
|| draw
->is_pixmap
)
651 flags
|= __DRI2_FLUSH_CONTEXT
;
652 loader_dri3_flush(draw
, flags
, __DRI2_THROTTLE_SWAPBUFFER
);
654 back
= dri3_find_back_alloc(draw
);
658 y
= draw
->height
- y
- height
;
660 if (draw
->is_different_gpu
) {
661 /* Update the linear buffer part of the back buffer
662 * for the dri3_copy_area operation
664 (void) loader_dri3_blit_image(draw
,
667 0, 0, back
->width
, back
->height
,
668 0, 0, __BLIT_FLAG_FLUSH
);
671 loader_dri3_swapbuffer_barrier(draw
);
672 dri3_fence_reset(draw
->conn
, back
);
673 dri3_copy_area(draw
->conn
,
676 dri3_drawable_gc(draw
),
677 x
, y
, x
, y
, width
, height
);
678 dri3_fence_trigger(draw
->conn
, back
);
679 /* Refresh the fake front (if present) after we just damaged the real
682 if (draw
->have_fake_front
&&
683 !loader_dri3_blit_image(draw
,
684 dri3_fake_front_buffer(draw
)->image
,
687 x
, y
, __BLIT_FLAG_FLUSH
) &&
688 !draw
->is_different_gpu
) {
689 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
690 dri3_copy_area(draw
->conn
,
692 dri3_fake_front_buffer(draw
)->pixmap
,
693 dri3_drawable_gc(draw
),
694 x
, y
, x
, y
, width
, height
);
695 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
696 dri3_fence_await(draw
->conn
, NULL
, dri3_fake_front_buffer(draw
));
698 dri3_fence_await(draw
->conn
, draw
, back
);
702 loader_dri3_copy_drawable(struct loader_dri3_drawable
*draw
,
706 loader_dri3_flush(draw
, __DRI2_FLUSH_DRAWABLE
, 0);
708 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
709 dri3_copy_area(draw
->conn
,
711 dri3_drawable_gc(draw
),
712 0, 0, 0, 0, draw
->width
, draw
->height
);
713 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
714 dri3_fence_await(draw
->conn
, draw
, dri3_fake_front_buffer(draw
));
718 loader_dri3_wait_x(struct loader_dri3_drawable
*draw
)
720 struct loader_dri3_buffer
*front
;
722 if (draw
== NULL
|| !draw
->have_fake_front
)
725 front
= dri3_fake_front_buffer(draw
);
727 loader_dri3_copy_drawable(draw
, front
->pixmap
, draw
->drawable
);
729 /* In the psc->is_different_gpu case, the linear buffer has been updated,
730 * but not yet the tiled buffer.
731 * Copy back to the tiled buffer we use for rendering.
732 * Note that we don't need flushing.
734 if (draw
->is_different_gpu
)
735 (void) loader_dri3_blit_image(draw
,
737 front
->linear_buffer
,
738 0, 0, front
->width
, front
->height
,
743 loader_dri3_wait_gl(struct loader_dri3_drawable
*draw
)
745 struct loader_dri3_buffer
*front
;
747 if (draw
== NULL
|| !draw
->have_fake_front
)
750 front
= dri3_fake_front_buffer(draw
);
752 /* In the psc->is_different_gpu case, we update the linear_buffer
753 * before updating the real front.
755 if (draw
->is_different_gpu
)
756 (void) loader_dri3_blit_image(draw
,
757 front
->linear_buffer
,
759 0, 0, front
->width
, front
->height
,
760 0, 0, __BLIT_FLAG_FLUSH
);
761 loader_dri3_swapbuffer_barrier(draw
);
762 loader_dri3_copy_drawable(draw
, draw
->drawable
, front
->pixmap
);
765 /** dri3_flush_present_events
767 * Process any present events that have been received from the X server
770 dri3_flush_present_events(struct loader_dri3_drawable
*draw
)
772 /* Check to see if any configuration changes have occurred
773 * since we were last invoked
775 if (draw
->has_event_waiter
)
778 if (draw
->special_event
) {
779 xcb_generic_event_t
*ev
;
781 while ((ev
= xcb_poll_for_special_event(draw
->conn
,
782 draw
->special_event
)) != NULL
) {
783 xcb_present_generic_event_t
*ge
= (void *) ev
;
784 dri3_handle_present_event(draw
, ge
);
789 /** loader_dri3_swap_buffers_msc
791 * Make the current back buffer visible using the present extension
794 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable
*draw
,
795 int64_t target_msc
, int64_t divisor
,
796 int64_t remainder
, unsigned flush_flags
,
799 struct loader_dri3_buffer
*back
;
801 uint32_t options
= XCB_PRESENT_OPTION_NONE
;
803 draw
->vtable
->flush_drawable(draw
, flush_flags
);
805 back
= dri3_find_back_alloc(draw
);
807 mtx_lock(&draw
->mtx
);
808 if (draw
->is_different_gpu
&& back
) {
809 /* Update the linear buffer before presenting the pixmap */
810 (void) loader_dri3_blit_image(draw
,
813 0, 0, back
->width
, back
->height
,
814 0, 0, __BLIT_FLAG_FLUSH
);
817 /* If we need to preload the new back buffer, remember the source.
818 * The force_copy parameter is used by EGL to attempt to preserve
819 * the back buffer across a call to this function.
821 if (draw
->swap_method
!= __DRI_ATTRIB_SWAP_UNDEFINED
|| force_copy
)
822 draw
->cur_blit_source
= LOADER_DRI3_BACK_ID(draw
->cur_back
);
824 /* Exchange the back and fake front. Even though the server knows about these
825 * buffers, it has no notion of back and fake front.
827 if (back
&& draw
->have_fake_front
) {
828 struct loader_dri3_buffer
*tmp
;
830 tmp
= dri3_fake_front_buffer(draw
);
831 draw
->buffers
[LOADER_DRI3_FRONT_ID
] = back
;
832 draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)] = tmp
;
834 if (draw
->swap_method
== __DRI_ATTRIB_SWAP_COPY
|| force_copy
)
835 draw
->cur_blit_source
= LOADER_DRI3_FRONT_ID
;
838 dri3_flush_present_events(draw
);
840 if (back
&& !draw
->is_pixmap
) {
841 dri3_fence_reset(draw
->conn
, back
);
843 /* Compute when we want the frame shown by taking the last known
844 * successful MSC and adding in a swap interval for each outstanding swap
845 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
849 if (target_msc
== 0 && divisor
== 0 && remainder
== 0)
850 target_msc
= draw
->msc
+ draw
->swap_interval
*
851 (draw
->send_sbc
- draw
->recv_sbc
);
852 else if (divisor
== 0 && remainder
> 0) {
853 /* From the GLX_OML_sync_control spec:
854 * "If <divisor> = 0, the swap will occur when MSC becomes
855 * greater than or equal to <target_msc>."
857 * Note that there's no mention of the remainder. The Present
858 * extension throws BadValue for remainder != 0 with divisor == 0, so
859 * just drop the passed in value.
864 /* From the GLX_EXT_swap_control spec
865 * and the EGL 1.4 spec (page 53):
867 * "If <interval> is set to a value of 0, buffer swaps are not
868 * synchronized to a video frame."
870 * Implementation note: It is possible to enable triple buffering
871 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
874 if (draw
->swap_interval
== 0)
875 options
|= XCB_PRESENT_OPTION_ASYNC
;
877 /* If we need to populate the new back, but need to reuse the back
878 * buffer slot due to lack of local blit capabilities, make sure
879 * the server doesn't flip and we deadlock.
881 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1)
882 options
|= XCB_PRESENT_OPTION_COPY
;
885 back
->last_swap
= draw
->send_sbc
;
886 xcb_present_pixmap(draw
->conn
,
889 (uint32_t) draw
->send_sbc
,
894 None
, /* target_crtc */
901 ret
= (int64_t) draw
->send_sbc
;
903 /* Schedule a server-side back-preserving blit if necessary.
904 * This happens iff all conditions below are satisfied:
905 * a) We have a fake front,
906 * b) We need to preserve the back buffer,
907 * c) We don't have local blit capabilities.
909 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1 &&
910 draw
->cur_blit_source
!= LOADER_DRI3_BACK_ID(draw
->cur_back
)) {
911 struct loader_dri3_buffer
*new_back
= dri3_back_buffer(draw
);
912 struct loader_dri3_buffer
*src
= draw
->buffers
[draw
->cur_blit_source
];
914 dri3_fence_reset(draw
->conn
, new_back
);
915 dri3_copy_area(draw
->conn
, src
->pixmap
,
917 dri3_drawable_gc(draw
),
918 0, 0, 0, 0, draw
->width
, draw
->height
);
919 dri3_fence_trigger(draw
->conn
, new_back
);
920 new_back
->last_swap
= src
->last_swap
;
923 xcb_flush(draw
->conn
);
927 mtx_unlock(&draw
->mtx
);
929 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
935 loader_dri3_query_buffer_age(struct loader_dri3_drawable
*draw
)
937 struct loader_dri3_buffer
*back
= dri3_find_back_alloc(draw
);
940 mtx_lock(&draw
->mtx
);
941 ret
= (!back
|| back
->last_swap
== 0) ? 0 :
942 draw
->send_sbc
- back
->last_swap
+ 1;
943 mtx_unlock(&draw
->mtx
);
950 * Wrapper around xcb_dri3_open
953 loader_dri3_open(xcb_connection_t
*conn
,
957 xcb_dri3_open_cookie_t cookie
;
958 xcb_dri3_open_reply_t
*reply
;
961 cookie
= xcb_dri3_open(conn
,
965 reply
= xcb_dri3_open_reply(conn
, cookie
, NULL
);
969 if (reply
->nfd
!= 1) {
974 fd
= xcb_dri3_open_reply_fds(conn
, reply
)[0];
976 fcntl(fd
, F_SETFD
, fcntl(fd
, F_GETFD
) | FD_CLOEXEC
);
982 dri3_cpp_for_format(uint32_t format
) {
984 case __DRI_IMAGE_FORMAT_R8
:
986 case __DRI_IMAGE_FORMAT_RGB565
:
987 case __DRI_IMAGE_FORMAT_GR88
:
989 case __DRI_IMAGE_FORMAT_XRGB8888
:
990 case __DRI_IMAGE_FORMAT_ARGB8888
:
991 case __DRI_IMAGE_FORMAT_ABGR8888
:
992 case __DRI_IMAGE_FORMAT_XBGR8888
:
993 case __DRI_IMAGE_FORMAT_XRGB2101010
:
994 case __DRI_IMAGE_FORMAT_ARGB2101010
:
995 case __DRI_IMAGE_FORMAT_XBGR2101010
:
996 case __DRI_IMAGE_FORMAT_ABGR2101010
:
997 case __DRI_IMAGE_FORMAT_SARGB8
:
999 case __DRI_IMAGE_FORMAT_NONE
:
1005 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1006 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1007 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1008 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1011 image_format_to_fourcc(int format
)
1014 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1016 case __DRI_IMAGE_FORMAT_SARGB8
: return __DRI_IMAGE_FOURCC_SARGB8888
;
1017 case __DRI_IMAGE_FORMAT_RGB565
: return __DRI_IMAGE_FOURCC_RGB565
;
1018 case __DRI_IMAGE_FORMAT_XRGB8888
: return __DRI_IMAGE_FOURCC_XRGB8888
;
1019 case __DRI_IMAGE_FORMAT_ARGB8888
: return __DRI_IMAGE_FOURCC_ARGB8888
;
1020 case __DRI_IMAGE_FORMAT_ABGR8888
: return __DRI_IMAGE_FOURCC_ABGR8888
;
1021 case __DRI_IMAGE_FORMAT_XBGR8888
: return __DRI_IMAGE_FOURCC_XBGR8888
;
1022 case __DRI_IMAGE_FORMAT_XRGB2101010
: return __DRI_IMAGE_FOURCC_XRGB2101010
;
1023 case __DRI_IMAGE_FORMAT_ARGB2101010
: return __DRI_IMAGE_FOURCC_ARGB2101010
;
1024 case __DRI_IMAGE_FORMAT_XBGR2101010
: return __DRI_IMAGE_FOURCC_XBGR2101010
;
1025 case __DRI_IMAGE_FORMAT_ABGR2101010
: return __DRI_IMAGE_FOURCC_ABGR2101010
;
1031 has_supported_modifier(struct loader_dri3_drawable
*draw
, unsigned int format
,
1032 uint64_t *modifiers
, uint32_t count
)
1034 uint64_t *supported_modifiers
;
1035 int32_t supported_modifiers_count
;
1039 if (!draw
->ext
->image
->queryDmaBufModifiers(draw
->dri_screen
,
1040 format
, 0, NULL
, NULL
,
1041 &supported_modifiers_count
) ||
1042 supported_modifiers_count
== 0)
1045 supported_modifiers
= malloc(supported_modifiers_count
* sizeof(uint64_t));
1046 if (!supported_modifiers
)
1049 draw
->ext
->image
->queryDmaBufModifiers(draw
->dri_screen
, format
,
1050 supported_modifiers_count
,
1051 supported_modifiers
, NULL
,
1052 &supported_modifiers_count
);
1054 for (i
= 0; !found
&& i
< supported_modifiers_count
; i
++) {
1055 for (j
= 0; !found
&& j
< count
; j
++) {
1056 if (supported_modifiers
[i
] == modifiers
[j
])
1061 free(supported_modifiers
);
1065 /** loader_dri3_alloc_render_buffer
1067 * Use the driver createImage function to construct a __DRIimage, then
1068 * get a file descriptor for that and create an X pixmap from that
1070 * Allocate an xshmfence for synchronization
1072 static struct loader_dri3_buffer
*
1073 dri3_alloc_render_buffer(struct loader_dri3_drawable
*draw
, unsigned int format
,
1074 int width
, int height
, int depth
)
1076 struct loader_dri3_buffer
*buffer
;
1077 __DRIimage
*pixmap_buffer
;
1078 xcb_pixmap_t pixmap
;
1079 xcb_sync_fence_t sync_fence
;
1080 struct xshmfence
*shm_fence
;
1081 int buffer_fds
[4], fence_fd
;
1086 /* Create an xshmfence object and
1087 * prepare to send that to the X server
1090 fence_fd
= xshmfence_alloc_shm();
1094 shm_fence
= xshmfence_map_shm(fence_fd
);
1095 if (shm_fence
== NULL
)
1098 /* Allocate the image from the driver
1100 buffer
= calloc(1, sizeof *buffer
);
1104 buffer
->cpp
= dri3_cpp_for_format(format
);
1108 if (!draw
->is_different_gpu
) {
1109 if (draw
->multiplanes_available
&&
1110 draw
->ext
->image
->base
.version
>= 15 &&
1111 draw
->ext
->image
->queryDmaBufModifiers
&&
1112 draw
->ext
->image
->createImageWithModifiers
) {
1113 xcb_dri3_get_supported_modifiers_cookie_t mod_cookie
;
1114 xcb_dri3_get_supported_modifiers_reply_t
*mod_reply
;
1115 xcb_generic_error_t
*error
= NULL
;
1116 uint64_t *modifiers
= NULL
;
1119 mod_cookie
= xcb_dri3_get_supported_modifiers(draw
->conn
,
1121 depth
, buffer
->cpp
* 8);
1122 mod_reply
= xcb_dri3_get_supported_modifiers_reply(draw
->conn
,
1128 if (mod_reply
->num_window_modifiers
) {
1129 count
= mod_reply
->num_window_modifiers
;
1130 modifiers
= malloc(count
* sizeof(uint64_t));
1137 xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply
),
1138 count
* sizeof(uint64_t));
1140 if (!has_supported_modifier(draw
, image_format_to_fourcc(format
),
1141 modifiers
, count
)) {
1148 if (mod_reply
->num_screen_modifiers
&& modifiers
== NULL
) {
1149 count
= mod_reply
->num_screen_modifiers
;
1150 modifiers
= malloc(count
* sizeof(uint64_t));
1158 xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply
),
1159 count
* sizeof(uint64_t));
1164 buffer
->image
= draw
->ext
->image
->createImageWithModifiers(draw
->dri_screen
,
1174 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1177 __DRI_IMAGE_USE_SHARE
|
1178 __DRI_IMAGE_USE_SCANOUT
|
1179 __DRI_IMAGE_USE_BACKBUFFER
,
1182 pixmap_buffer
= buffer
->image
;
1187 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1196 buffer
->linear_buffer
=
1197 draw
->ext
->image
->createImage(draw
->dri_screen
,
1198 width
, height
, format
,
1199 __DRI_IMAGE_USE_SHARE
|
1200 __DRI_IMAGE_USE_LINEAR
|
1201 __DRI_IMAGE_USE_BACKBUFFER
,
1203 pixmap_buffer
= buffer
->linear_buffer
;
1205 if (!buffer
->linear_buffer
)
1206 goto no_linear_buffer
;
1209 /* X want some information about the planes, so ask the image for it
1211 if (!draw
->ext
->image
->queryImage(pixmap_buffer
, __DRI_IMAGE_ATTRIB_NUM_PLANES
,
1215 for (i
= 0; i
< num_planes
; i
++) {
1216 __DRIimage
*image
= draw
->ext
->image
->fromPlanar(pixmap_buffer
, i
, NULL
);
1220 image
= pixmap_buffer
;
1223 ret
= draw
->ext
->image
->queryImage(image
, __DRI_IMAGE_ATTRIB_FD
,
1225 ret
&= draw
->ext
->image
->queryImage(image
, __DRI_IMAGE_ATTRIB_STRIDE
,
1226 &buffer
->strides
[i
]);
1227 ret
&= draw
->ext
->image
->queryImage(image
, __DRI_IMAGE_ATTRIB_OFFSET
,
1228 &buffer
->offsets
[i
]);
1229 if (image
!= pixmap_buffer
)
1230 draw
->ext
->image
->destroyImage(image
);
1233 goto no_buffer_attrib
;
1236 ret
= draw
->ext
->image
->queryImage(pixmap_buffer
,
1237 __DRI_IMAGE_ATTRIB_MODIFIER_UPPER
, &mod
);
1238 buffer
->modifier
= (uint64_t) mod
<< 32;
1239 ret
&= draw
->ext
->image
->queryImage(pixmap_buffer
,
1240 __DRI_IMAGE_ATTRIB_MODIFIER_LOWER
, &mod
);
1241 buffer
->modifier
|= (uint64_t)(mod
& 0xffffffff);
1244 buffer
->modifier
= DRM_FORMAT_MOD_INVALID
;
1246 pixmap
= xcb_generate_id(draw
->conn
);
1247 if (draw
->multiplanes_available
&&
1248 buffer
->modifier
!= DRM_FORMAT_MOD_INVALID
) {
1249 xcb_dri3_pixmap_from_buffers(draw
->conn
,
1254 buffer
->strides
[0], buffer
->offsets
[0],
1255 buffer
->strides
[1], buffer
->offsets
[1],
1256 buffer
->strides
[2], buffer
->offsets
[2],
1257 buffer
->strides
[3], buffer
->offsets
[3],
1258 depth
, buffer
->cpp
* 8,
1262 xcb_dri3_pixmap_from_buffer(draw
->conn
,
1266 width
, height
, buffer
->strides
[0],
1267 depth
, buffer
->cpp
* 8,
1271 xcb_dri3_fence_from_fd(draw
->conn
,
1273 (sync_fence
= xcb_generate_id(draw
->conn
)),
1277 buffer
->pixmap
= pixmap
;
1278 buffer
->own_pixmap
= true;
1279 buffer
->sync_fence
= sync_fence
;
1280 buffer
->shm_fence
= shm_fence
;
1281 buffer
->width
= width
;
1282 buffer
->height
= height
;
1284 /* Mark the buffer as idle
1286 dri3_fence_set(buffer
);
1292 close(buffer_fds
[i
]);
1294 draw
->ext
->image
->destroyImage(pixmap_buffer
);
1296 if (draw
->is_different_gpu
)
1297 draw
->ext
->image
->destroyImage(buffer
->image
);
1301 xshmfence_unmap_shm(shm_fence
);
1307 /** loader_dri3_update_drawable
1309 * Called the first time we use the drawable and then
1310 * after we receive present configure notify events to
1311 * track the geometry of the drawable
1314 dri3_update_drawable(__DRIdrawable
*driDrawable
,
1315 struct loader_dri3_drawable
*draw
)
1317 mtx_lock(&draw
->mtx
);
1318 if (draw
->first_init
) {
1319 xcb_get_geometry_cookie_t geom_cookie
;
1320 xcb_get_geometry_reply_t
*geom_reply
;
1321 xcb_void_cookie_t cookie
;
1322 xcb_generic_error_t
*error
;
1323 xcb_present_query_capabilities_cookie_t present_capabilities_cookie
;
1324 xcb_present_query_capabilities_reply_t
*present_capabilities_reply
;
1326 draw
->first_init
= false;
1328 /* Try to select for input on the window.
1330 * If the drawable is a window, this will get our events
1333 * Otherwise, we'll get a BadWindow error back from this request which
1334 * will let us know that the drawable is a pixmap instead.
1337 draw
->eid
= xcb_generate_id(draw
->conn
);
1339 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
1340 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY
|
1341 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY
|
1342 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY
);
1344 present_capabilities_cookie
=
1345 xcb_present_query_capabilities(draw
->conn
, draw
->drawable
);
1347 /* Create an XCB event queue to hold present events outside of the usual
1348 * application event queue
1350 draw
->special_event
= xcb_register_for_special_xge(draw
->conn
,
1354 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1356 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1359 mtx_unlock(&draw
->mtx
);
1363 draw
->width
= geom_reply
->width
;
1364 draw
->height
= geom_reply
->height
;
1365 draw
->depth
= geom_reply
->depth
;
1366 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1370 draw
->is_pixmap
= false;
1372 /* Check to see if our select input call failed. If it failed with a
1373 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1374 * special event queue created above and mark the drawable as a pixmap
1377 error
= xcb_request_check(draw
->conn
, cookie
);
1379 present_capabilities_reply
=
1380 xcb_present_query_capabilities_reply(draw
->conn
,
1381 present_capabilities_cookie
,
1384 if (present_capabilities_reply
) {
1385 draw
->present_capabilities
= present_capabilities_reply
->capabilities
;
1386 free(present_capabilities_reply
);
1388 draw
->present_capabilities
= 0;
1391 if (error
->error_code
!= BadWindow
) {
1393 mtx_unlock(&draw
->mtx
);
1396 draw
->is_pixmap
= true;
1397 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
1398 draw
->special_event
= NULL
;
1401 dri3_flush_present_events(draw
);
1402 mtx_unlock(&draw
->mtx
);
1407 loader_dri3_create_image(xcb_connection_t
*c
,
1408 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
,
1409 unsigned int format
,
1410 __DRIscreen
*dri_screen
,
1411 const __DRIimageExtension
*image
,
1412 void *loaderPrivate
)
1415 __DRIimage
*image_planar
, *ret
;
1418 /* Get an FD for the pixmap object
1420 fds
= xcb_dri3_buffer_from_pixmap_reply_fds(c
, bp_reply
);
1422 stride
= bp_reply
->stride
;
1425 /* createImageFromFds creates a wrapper __DRIimage structure which
1426 * can deal with multiple planes for things like Yuv images. So, once
1427 * we've gotten the planar wrapper, pull the single plane out of it and
1428 * discard the wrapper.
1430 image_planar
= image
->createImageFromFds(dri_screen
,
1433 image_format_to_fourcc(format
),
1435 &stride
, &offset
, loaderPrivate
);
1440 ret
= image
->fromPlanar(image_planar
, 0, loaderPrivate
);
1445 image
->destroyImage(image_planar
);
1451 loader_dri3_create_image_from_buffers(xcb_connection_t
*c
,
1452 xcb_dri3_buffers_from_pixmap_reply_t
*bp_reply
,
1453 unsigned int format
,
1454 __DRIscreen
*dri_screen
,
1455 const __DRIimageExtension
*image
,
1456 void *loaderPrivate
)
1460 uint32_t *strides_in
, *offsets_in
;
1461 int strides
[4], offsets
[4];
1465 if (bp_reply
->nfd
> 4)
1468 fds
= xcb_dri3_buffers_from_pixmap_reply_fds(c
, bp_reply
);
1469 strides_in
= xcb_dri3_buffers_from_pixmap_strides(bp_reply
);
1470 offsets_in
= xcb_dri3_buffers_from_pixmap_offsets(bp_reply
);
1471 for (i
= 0; i
< bp_reply
->nfd
; i
++) {
1472 strides
[i
] = strides_in
[i
];
1473 offsets
[i
] = offsets_in
[i
];
1476 ret
= image
->createImageFromDmaBufs2(dri_screen
,
1479 image_format_to_fourcc(format
),
1483 0, 0, 0, 0, /* UNDEFINED */
1484 &error
, loaderPrivate
);
1486 for (i
= 0; i
< bp_reply
->nfd
; i
++)
1492 /** dri3_get_pixmap_buffer
1494 * Get the DRM object for a pixmap from the X server and
1495 * wrap that with a __DRIimage structure using createImageFromFds
1497 static struct loader_dri3_buffer
*
1498 dri3_get_pixmap_buffer(__DRIdrawable
*driDrawable
, unsigned int format
,
1499 enum loader_dri3_buffer_type buffer_type
,
1500 struct loader_dri3_drawable
*draw
)
1502 int buf_id
= loader_dri3_pixmap_buf_id(buffer_type
);
1503 struct loader_dri3_buffer
*buffer
= draw
->buffers
[buf_id
];
1504 xcb_drawable_t pixmap
;
1505 xcb_sync_fence_t sync_fence
;
1506 struct xshmfence
*shm_fence
;
1510 __DRIscreen
*cur_screen
;
1515 pixmap
= draw
->drawable
;
1517 buffer
= calloc(1, sizeof *buffer
);
1521 fence_fd
= xshmfence_alloc_shm();
1524 shm_fence
= xshmfence_map_shm(fence_fd
);
1525 if (shm_fence
== NULL
) {
1530 /* Get the currently-bound screen or revert to using the drawable's screen if
1531 * no contexts are currently bound. The latter case is at least necessary for
1532 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1534 cur_screen
= draw
->vtable
->get_dri_screen();
1536 cur_screen
= draw
->dri_screen
;
1539 xcb_dri3_fence_from_fd(draw
->conn
,
1541 (sync_fence
= xcb_generate_id(draw
->conn
)),
1545 if (draw
->multiplanes_available
&&
1546 draw
->ext
->image
->base
.version
>= 15 &&
1547 draw
->ext
->image
->createImageFromDmaBufs2
) {
1548 xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie
;
1549 xcb_dri3_buffers_from_pixmap_reply_t
*bps_reply
;
1551 bps_cookie
= xcb_dri3_buffers_from_pixmap(draw
->conn
, pixmap
);
1552 bps_reply
= xcb_dri3_buffers_from_pixmap_reply(draw
->conn
, bps_cookie
,
1557 loader_dri3_create_image_from_buffers(draw
->conn
, bps_reply
, format
,
1558 cur_screen
, draw
->ext
->image
,
1560 width
= bps_reply
->width
;
1561 height
= bps_reply
->height
;
1564 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie
;
1565 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
;
1567 bp_cookie
= xcb_dri3_buffer_from_pixmap(draw
->conn
, pixmap
);
1568 bp_reply
= xcb_dri3_buffer_from_pixmap_reply(draw
->conn
, bp_cookie
, NULL
);
1572 buffer
->image
= loader_dri3_create_image(draw
->conn
, bp_reply
, format
,
1573 cur_screen
, draw
->ext
->image
,
1575 width
= bp_reply
->width
;
1576 height
= bp_reply
->height
;
1583 buffer
->pixmap
= pixmap
;
1584 buffer
->own_pixmap
= false;
1585 buffer
->width
= width
;
1586 buffer
->height
= height
;
1587 buffer
->shm_fence
= shm_fence
;
1588 buffer
->sync_fence
= sync_fence
;
1590 draw
->buffers
[buf_id
] = buffer
;
1595 xcb_sync_destroy_fence(draw
->conn
, sync_fence
);
1596 xshmfence_unmap_shm(shm_fence
);
1605 * Find a front or back buffer, allocating new ones as necessary
1607 static struct loader_dri3_buffer
*
1608 dri3_get_buffer(__DRIdrawable
*driDrawable
,
1609 unsigned int format
,
1610 enum loader_dri3_buffer_type buffer_type
,
1611 struct loader_dri3_drawable
*draw
)
1613 struct loader_dri3_buffer
*buffer
;
1616 if (buffer_type
== loader_dri3_buffer_back
) {
1617 draw
->back_format
= format
;
1619 buf_id
= dri3_find_back(draw
);
1624 buf_id
= LOADER_DRI3_FRONT_ID
;
1627 buffer
= draw
->buffers
[buf_id
];
1629 /* Allocate a new buffer if there isn't an old one, or if that
1630 * old one is the wrong size
1632 if (!buffer
|| buffer
->width
!= draw
->width
||
1633 buffer
->height
!= draw
->height
) {
1634 struct loader_dri3_buffer
*new_buffer
;
1636 /* Allocate the new buffers
1638 new_buffer
= dri3_alloc_render_buffer(draw
,
1646 /* When resizing, copy the contents of the old buffer, waiting for that
1647 * copy to complete using our fences before proceeding
1649 if ((buffer_type
== loader_dri3_buffer_back
||
1650 (buffer_type
== loader_dri3_buffer_front
&& draw
->have_fake_front
))
1653 /* Fill the new buffer with data from an old buffer */
1654 dri3_fence_await(draw
->conn
, draw
, buffer
);
1655 if (!loader_dri3_blit_image(draw
,
1658 0, 0, draw
->width
, draw
->height
,
1660 !buffer
->linear_buffer
) {
1661 dri3_fence_reset(draw
->conn
, new_buffer
);
1662 dri3_copy_area(draw
->conn
,
1665 dri3_drawable_gc(draw
),
1667 draw
->width
, draw
->height
);
1668 dri3_fence_trigger(draw
->conn
, new_buffer
);
1670 dri3_free_render_buffer(draw
, buffer
);
1671 } else if (buffer_type
== loader_dri3_buffer_front
) {
1672 /* Fill the new fake front with data from a real front */
1673 loader_dri3_swapbuffer_barrier(draw
);
1674 dri3_fence_reset(draw
->conn
, new_buffer
);
1675 dri3_copy_area(draw
->conn
,
1678 dri3_drawable_gc(draw
),
1680 draw
->width
, draw
->height
);
1681 dri3_fence_trigger(draw
->conn
, new_buffer
);
1683 if (new_buffer
->linear_buffer
) {
1684 dri3_fence_await(draw
->conn
, draw
, new_buffer
);
1685 (void) loader_dri3_blit_image(draw
,
1687 new_buffer
->linear_buffer
,
1688 0, 0, draw
->width
, draw
->height
,
1692 buffer
= new_buffer
;
1693 draw
->buffers
[buf_id
] = buffer
;
1695 dri3_fence_await(draw
->conn
, draw
, buffer
);
1698 * Do we need to preserve the content of a previous buffer?
1700 * Note that this blit is needed only to avoid a wait for a buffer that
1701 * is currently in the flip chain or being scanned out from. That's really
1702 * a tradeoff. If we're ok with the wait we can reduce the number of back
1703 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1704 * but in the latter case we must disallow page-flipping.
1706 if (buffer_type
== loader_dri3_buffer_back
&&
1707 draw
->cur_blit_source
!= -1 &&
1708 draw
->buffers
[draw
->cur_blit_source
] &&
1709 buffer
!= draw
->buffers
[draw
->cur_blit_source
]) {
1711 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
1713 /* Avoid flushing here. Will propably do good for tiling hardware. */
1714 (void) loader_dri3_blit_image(draw
,
1717 0, 0, draw
->width
, draw
->height
,
1719 buffer
->last_swap
= source
->last_swap
;
1720 draw
->cur_blit_source
= -1;
1722 /* Return the requested buffer */
1726 /** dri3_free_buffers
1728 * Free the front bufffer or all of the back buffers. Used
1729 * when the application changes which buffers it needs
1732 dri3_free_buffers(__DRIdrawable
*driDrawable
,
1733 enum loader_dri3_buffer_type buffer_type
,
1734 struct loader_dri3_drawable
*draw
)
1736 struct loader_dri3_buffer
*buffer
;
1741 switch (buffer_type
) {
1742 case loader_dri3_buffer_back
:
1743 first_id
= LOADER_DRI3_BACK_ID(0);
1744 n_id
= LOADER_DRI3_MAX_BACK
;
1745 draw
->cur_blit_source
= -1;
1747 case loader_dri3_buffer_front
:
1748 first_id
= LOADER_DRI3_FRONT_ID
;
1749 /* Don't free a fake front holding new backbuffer content. */
1750 n_id
= (draw
->cur_blit_source
== LOADER_DRI3_FRONT_ID
) ? 0 : 1;
1753 for (buf_id
= first_id
; buf_id
< first_id
+ n_id
; buf_id
++) {
1754 buffer
= draw
->buffers
[buf_id
];
1756 dri3_free_render_buffer(draw
, buffer
);
1757 draw
->buffers
[buf_id
] = NULL
;
1762 /** loader_dri3_get_buffers
1764 * The published buffer allocation API.
1765 * Returns all of the necessary buffers, allocating
1769 loader_dri3_get_buffers(__DRIdrawable
*driDrawable
,
1770 unsigned int format
,
1772 void *loaderPrivate
,
1773 uint32_t buffer_mask
,
1774 struct __DRIimageList
*buffers
)
1776 struct loader_dri3_drawable
*draw
= loaderPrivate
;
1777 struct loader_dri3_buffer
*front
, *back
;
1779 buffers
->image_mask
= 0;
1780 buffers
->front
= NULL
;
1781 buffers
->back
= NULL
;
1786 if (!dri3_update_drawable(driDrawable
, draw
))
1789 /* pixmaps always have front buffers.
1790 * Exchange swaps also mandate fake front buffers.
1792 if (draw
->is_pixmap
|| draw
->swap_method
== __DRI_ATTRIB_SWAP_EXCHANGE
)
1793 buffer_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
1795 if (buffer_mask
& __DRI_IMAGE_BUFFER_FRONT
) {
1796 /* All pixmaps are owned by the server gpu.
1797 * When we use a different gpu, we can't use the pixmap
1798 * as buffer since it is potentially tiled a way
1799 * our device can't understand. In this case, use
1800 * a fake front buffer. Hopefully the pixmap
1801 * content will get synced with the fake front
1804 if (draw
->is_pixmap
&& !draw
->is_different_gpu
)
1805 front
= dri3_get_pixmap_buffer(driDrawable
,
1807 loader_dri3_buffer_front
,
1810 front
= dri3_get_buffer(driDrawable
,
1812 loader_dri3_buffer_front
,
1818 dri3_free_buffers(driDrawable
, loader_dri3_buffer_front
, draw
);
1819 draw
->have_fake_front
= 0;
1822 if (buffer_mask
& __DRI_IMAGE_BUFFER_BACK
) {
1823 back
= dri3_get_buffer(driDrawable
,
1825 loader_dri3_buffer_back
,
1829 draw
->have_back
= 1;
1831 dri3_free_buffers(driDrawable
, loader_dri3_buffer_back
, draw
);
1832 draw
->have_back
= 0;
1836 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
1837 buffers
->front
= front
->image
;
1838 draw
->have_fake_front
= draw
->is_different_gpu
|| !draw
->is_pixmap
;
1842 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_BACK
;
1843 buffers
->back
= back
->image
;
1846 draw
->stamp
= stamp
;
1851 /** loader_dri3_update_drawable_geometry
1853 * Get the current drawable geometry.
1856 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable
*draw
)
1858 xcb_get_geometry_cookie_t geom_cookie
;
1859 xcb_get_geometry_reply_t
*geom_reply
;
1861 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1863 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1866 draw
->width
= geom_reply
->width
;
1867 draw
->height
= geom_reply
->height
;
1868 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1869 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
1877 * Make sure the server has flushed all pending swap buffers to hardware
1878 * for this drawable. Ideally we'd want to send an X protocol request to
1879 * have the server block our connection until the swaps are complete. That
1880 * would avoid the potential round-trip here.
1883 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable
*draw
)
1885 int64_t ust
, msc
, sbc
;
1887 (void) loader_dri3_wait_for_sbc(draw
, 0, &ust
, &msc
, &sbc
);
1891 * Perform any cleanup associated with a close screen operation.
1892 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1894 * This function destroys the screen's cached swap context if any.
1897 loader_dri3_close_screen(__DRIscreen
*dri_screen
)
1899 mtx_lock(&blit_context
.mtx
);
1900 if (blit_context
.ctx
&& blit_context
.cur_screen
== dri_screen
) {
1901 blit_context
.core
->destroyContext(blit_context
.ctx
);
1902 blit_context
.ctx
= NULL
;
1904 mtx_unlock(&blit_context
.mtx
);
1908 * Find a backbuffer slot - potentially allocating a back buffer
1910 * \param draw[in,out] Pointer to the drawable for which to find back.
1911 * \return Pointer to a new back buffer or NULL if allocation failed or was
1914 * Find a potentially new back buffer, and if it's not been allocated yet and
1915 * in addition needs initializing, then try to allocate and initialize it.
1918 static struct loader_dri3_buffer
*
1919 dri3_find_back_alloc(struct loader_dri3_drawable
*draw
)
1921 struct loader_dri3_buffer
*back
;
1924 id
= dri3_find_back(draw
);
1928 back
= draw
->buffers
[id
];
1929 /* Allocate a new back if we haven't got one */
1930 if (!back
&& draw
->back_format
!= __DRI_IMAGE_FORMAT_NONE
&&
1931 dri3_update_drawable(draw
->dri_drawable
, draw
))
1932 back
= dri3_alloc_render_buffer(draw
, draw
->back_format
,
1933 draw
->width
, draw
->height
, draw
->depth
);
1938 draw
->buffers
[id
] = back
;
1940 /* If necessary, prefill the back with data according to swap_method mode. */
1941 if (draw
->cur_blit_source
!= -1 &&
1942 draw
->buffers
[draw
->cur_blit_source
] &&
1943 back
!= draw
->buffers
[draw
->cur_blit_source
]) {
1944 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
1946 dri3_fence_await(draw
->conn
, draw
, source
);
1947 dri3_fence_await(draw
->conn
, draw
, back
);
1948 (void) loader_dri3_blit_image(draw
,
1951 0, 0, draw
->width
, draw
->height
,
1953 back
->last_swap
= source
->last_swap
;
1954 draw
->cur_blit_source
= -1;