2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
29 #include <X11/xshmfence.h>
32 #include <xcb/present.h>
33 #include <xcb/xfixes.h>
35 #include <X11/Xlib-xcb.h>
37 #include "loader_dri3_helper.h"
38 #include "util/macros.h"
39 #include "drm-uapi/drm_fourcc.h"
41 /* From driconf.h, user exposed so should be stable */
42 #define DRI_CONF_VBLANK_NEVER 0
43 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
44 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
45 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
48 * A cached blit context.
50 struct loader_dri3_blit_context
{
53 __DRIscreen
*cur_screen
;
54 const __DRIcoreExtension
*core
;
57 /* For simplicity we maintain the cache only for a single screen at a time */
58 static struct loader_dri3_blit_context blit_context
= {
59 _MTX_INITIALIZER_NP
, NULL
63 dri3_flush_present_events(struct loader_dri3_drawable
*draw
);
65 static struct loader_dri3_buffer
*
66 dri3_find_back_alloc(struct loader_dri3_drawable
*draw
);
69 get_screen_for_root(xcb_connection_t
*conn
, xcb_window_t root
)
71 xcb_screen_iterator_t screen_iter
=
72 xcb_setup_roots_iterator(xcb_get_setup(conn
));
74 for (; screen_iter
.rem
; xcb_screen_next (&screen_iter
)) {
75 if (screen_iter
.data
->root
== root
)
76 return screen_iter
.data
;
82 static xcb_visualtype_t
*
83 get_xcb_visualtype_for_depth(struct loader_dri3_drawable
*draw
, int depth
)
85 xcb_visualtype_iterator_t visual_iter
;
86 xcb_screen_t
*screen
= draw
->screen
;
87 xcb_depth_iterator_t depth_iter
;
92 depth_iter
= xcb_screen_allowed_depths_iterator(screen
);
93 for (; depth_iter
.rem
; xcb_depth_next(&depth_iter
)) {
94 if (depth_iter
.data
->depth
!= depth
)
97 visual_iter
= xcb_depth_visuals_iterator(depth_iter
.data
);
99 return visual_iter
.data
;
105 /* Sets the adaptive sync window property state. */
107 set_adaptive_sync_property(xcb_connection_t
*conn
, xcb_drawable_t drawable
,
110 static char const name
[] = "_VARIABLE_REFRESH";
111 xcb_intern_atom_cookie_t cookie
;
112 xcb_intern_atom_reply_t
* reply
;
113 xcb_void_cookie_t check
;
115 cookie
= xcb_intern_atom(conn
, 0, strlen(name
), name
);
116 reply
= xcb_intern_atom_reply(conn
, cookie
, NULL
);
121 check
= xcb_change_property_checked(conn
, XCB_PROP_MODE_REPLACE
,
122 drawable
, reply
->atom
,
123 XCB_ATOM_CARDINAL
, 32, 1, &state
);
125 check
= xcb_delete_property_checked(conn
, drawable
, reply
->atom
);
127 xcb_discard_reply(conn
, check
.sequence
);
131 /* Get red channel mask for given drawable at given depth. */
133 dri3_get_red_mask_for_depth(struct loader_dri3_drawable
*draw
, int depth
)
135 xcb_visualtype_t
*visual
= get_xcb_visualtype_for_depth(draw
, depth
);
138 return visual
->red_mask
;
144 * Do we have blit functionality in the image blit extension?
146 * \param draw[in] The drawable intended to blit from / to.
147 * \return true if we have blit functionality. false otherwise.
149 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable
*draw
)
151 return draw
->ext
->image
->base
.version
>= 9 &&
152 draw
->ext
->image
->blitImage
!= NULL
;
156 * Get and lock (for use with the current thread) a dri context associated
157 * with the drawable's dri screen. The context is intended to be used with
158 * the dri image extension's blitImage method.
160 * \param draw[in] Pointer to the drawable whose dri screen we want a
162 * \return A dri context or NULL if context creation failed.
164 * When the caller is done with the context (even if the context returned was
165 * NULL), the caller must call loader_dri3_blit_context_put.
167 static __DRIcontext
*
168 loader_dri3_blit_context_get(struct loader_dri3_drawable
*draw
)
170 mtx_lock(&blit_context
.mtx
);
172 if (blit_context
.ctx
&& blit_context
.cur_screen
!= draw
->dri_screen
) {
173 blit_context
.core
->destroyContext(blit_context
.ctx
);
174 blit_context
.ctx
= NULL
;
177 if (!blit_context
.ctx
) {
178 blit_context
.ctx
= draw
->ext
->core
->createNewContext(draw
->dri_screen
,
180 blit_context
.cur_screen
= draw
->dri_screen
;
181 blit_context
.core
= draw
->ext
->core
;
184 return blit_context
.ctx
;
188 * Release (for use with other threads) a dri context previously obtained using
189 * loader_dri3_blit_context_get.
192 loader_dri3_blit_context_put(void)
194 mtx_unlock(&blit_context
.mtx
);
198 * Blit (parts of) the contents of a DRI image to another dri image
200 * \param draw[in] The drawable which owns the images.
201 * \param dst[in] The destination image.
202 * \param src[in] The source image.
203 * \param dstx0[in] Start destination coordinate.
204 * \param dsty0[in] Start destination coordinate.
205 * \param width[in] Blit width.
206 * \param height[in] Blit height.
207 * \param srcx0[in] Start source coordinate.
208 * \param srcy0[in] Start source coordinate.
209 * \param flush_flag[in] Image blit flush flag.
210 * \return true iff successful.
213 loader_dri3_blit_image(struct loader_dri3_drawable
*draw
,
214 __DRIimage
*dst
, __DRIimage
*src
,
215 int dstx0
, int dsty0
, int width
, int height
,
216 int srcx0
, int srcy0
, int flush_flag
)
218 __DRIcontext
*dri_context
;
219 bool use_blit_context
= false;
221 if (!loader_dri3_have_image_blit(draw
))
224 dri_context
= draw
->vtable
->get_dri_context(draw
);
226 if (!dri_context
|| !draw
->vtable
->in_current_context(draw
)) {
227 dri_context
= loader_dri3_blit_context_get(draw
);
228 use_blit_context
= true;
229 flush_flag
|= __BLIT_FLAG_FLUSH
;
233 draw
->ext
->image
->blitImage(dri_context
, dst
, src
, dstx0
, dsty0
,
234 width
, height
, srcx0
, srcy0
,
235 width
, height
, flush_flag
);
237 if (use_blit_context
)
238 loader_dri3_blit_context_put();
240 return dri_context
!= NULL
;
244 dri3_fence_reset(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
246 xshmfence_reset(buffer
->shm_fence
);
250 dri3_fence_set(struct loader_dri3_buffer
*buffer
)
252 xshmfence_trigger(buffer
->shm_fence
);
256 dri3_fence_trigger(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
258 xcb_sync_trigger_fence(c
, buffer
->sync_fence
);
262 dri3_fence_await(xcb_connection_t
*c
, struct loader_dri3_drawable
*draw
,
263 struct loader_dri3_buffer
*buffer
)
266 xshmfence_await(buffer
->shm_fence
);
268 mtx_lock(&draw
->mtx
);
269 dri3_flush_present_events(draw
);
270 mtx_unlock(&draw
->mtx
);
275 dri3_update_num_back(struct loader_dri3_drawable
*draw
)
277 if (draw
->last_present_mode
== XCB_PRESENT_COMPLETE_MODE_FLIP
)
284 loader_dri3_set_swap_interval(struct loader_dri3_drawable
*draw
, int interval
)
286 draw
->swap_interval
= interval
;
289 /** dri3_free_render_buffer
291 * Free everything associated with one render buffer including pixmap, fence
292 * stuff and the driver image
295 dri3_free_render_buffer(struct loader_dri3_drawable
*draw
,
296 struct loader_dri3_buffer
*buffer
)
298 if (buffer
->own_pixmap
)
299 xcb_free_pixmap(draw
->conn
, buffer
->pixmap
);
300 xcb_sync_destroy_fence(draw
->conn
, buffer
->sync_fence
);
301 xshmfence_unmap_shm(buffer
->shm_fence
);
302 draw
->ext
->image
->destroyImage(buffer
->image
);
303 if (buffer
->linear_buffer
)
304 draw
->ext
->image
->destroyImage(buffer
->linear_buffer
);
309 loader_dri3_drawable_fini(struct loader_dri3_drawable
*draw
)
313 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
315 for (i
= 0; i
< ARRAY_SIZE(draw
->buffers
); i
++) {
316 if (draw
->buffers
[i
])
317 dri3_free_render_buffer(draw
, draw
->buffers
[i
]);
320 if (draw
->special_event
) {
321 xcb_void_cookie_t cookie
=
322 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
323 XCB_PRESENT_EVENT_MASK_NO_EVENT
);
325 xcb_discard_reply(draw
->conn
, cookie
.sequence
);
326 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
329 cnd_destroy(&draw
->event_cnd
);
330 mtx_destroy(&draw
->mtx
);
334 loader_dri3_drawable_init(xcb_connection_t
*conn
,
335 xcb_drawable_t drawable
,
336 __DRIscreen
*dri_screen
,
337 bool is_different_gpu
,
338 bool multiplanes_available
,
339 const __DRIconfig
*dri_config
,
340 struct loader_dri3_extensions
*ext
,
341 const struct loader_dri3_vtable
*vtable
,
342 struct loader_dri3_drawable
*draw
)
344 xcb_get_geometry_cookie_t cookie
;
345 xcb_get_geometry_reply_t
*reply
;
346 xcb_generic_error_t
*error
;
347 GLint vblank_mode
= DRI_CONF_VBLANK_DEF_INTERVAL_1
;
352 draw
->vtable
= vtable
;
353 draw
->drawable
= drawable
;
354 draw
->dri_screen
= dri_screen
;
355 draw
->is_different_gpu
= is_different_gpu
;
356 draw
->multiplanes_available
= multiplanes_available
;
359 draw
->have_fake_front
= 0;
360 draw
->first_init
= true;
361 draw
->adaptive_sync
= false;
362 draw
->adaptive_sync_active
= false;
364 draw
->cur_blit_source
= -1;
365 draw
->back_format
= __DRI_IMAGE_FORMAT_NONE
;
366 mtx_init(&draw
->mtx
, mtx_plain
);
367 cnd_init(&draw
->event_cnd
);
369 if (draw
->ext
->config
) {
370 unsigned char adaptive_sync
= 0;
372 draw
->ext
->config
->configQueryi(draw
->dri_screen
,
373 "vblank_mode", &vblank_mode
);
375 draw
->ext
->config
->configQueryb(draw
->dri_screen
,
379 draw
->adaptive_sync
= adaptive_sync
;
382 if (!draw
->adaptive_sync
)
383 set_adaptive_sync_property(conn
, draw
->drawable
, false);
385 switch (vblank_mode
) {
386 case DRI_CONF_VBLANK_NEVER
:
387 case DRI_CONF_VBLANK_DEF_INTERVAL_0
:
390 case DRI_CONF_VBLANK_DEF_INTERVAL_1
:
391 case DRI_CONF_VBLANK_ALWAYS_SYNC
:
396 draw
->swap_interval
= swap_interval
;
398 dri3_update_num_back(draw
);
400 /* Create a new drawable */
402 draw
->ext
->image_driver
->createNewDrawable(dri_screen
,
406 if (!draw
->dri_drawable
)
409 cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
410 reply
= xcb_get_geometry_reply(draw
->conn
, cookie
, &error
);
411 if (reply
== NULL
|| error
!= NULL
) {
412 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
416 draw
->screen
= get_screen_for_root(draw
->conn
, reply
->root
);
417 draw
->width
= reply
->width
;
418 draw
->height
= reply
->height
;
419 draw
->depth
= reply
->depth
;
420 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
423 draw
->swap_method
= __DRI_ATTRIB_SWAP_UNDEFINED
;
424 if (draw
->ext
->core
->base
.version
>= 2) {
425 (void )draw
->ext
->core
->getConfigAttrib(dri_config
,
426 __DRI_ATTRIB_SWAP_METHOD
,
431 * Make sure server has the same swap interval we do for the new
434 loader_dri3_set_swap_interval(draw
, swap_interval
);
440 * Process one Present event
443 dri3_handle_present_event(struct loader_dri3_drawable
*draw
,
444 xcb_present_generic_event_t
*ge
)
446 switch (ge
->evtype
) {
447 case XCB_PRESENT_CONFIGURE_NOTIFY
: {
448 xcb_present_configure_notify_event_t
*ce
= (void *) ge
;
450 draw
->width
= ce
->width
;
451 draw
->height
= ce
->height
;
452 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
453 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
456 case XCB_PRESENT_COMPLETE_NOTIFY
: {
457 xcb_present_complete_notify_event_t
*ce
= (void *) ge
;
459 /* Compute the processed SBC number from the received 32-bit serial number
460 * merged with the upper 32-bits of the sent 64-bit serial number while
463 if (ce
->kind
== XCB_PRESENT_COMPLETE_KIND_PIXMAP
) {
464 uint64_t recv_sbc
= (draw
->send_sbc
& 0xffffffff00000000LL
) | ce
->serial
;
466 /* Only assume wraparound if that results in exactly the previous
467 * SBC + 1, otherwise ignore received SBC > sent SBC (those are
468 * probably from a previous loader_dri3_drawable instance) to avoid
469 * calculating bogus target MSC values in loader_dri3_swap_buffers_msc
471 if (recv_sbc
<= draw
->send_sbc
)
472 draw
->recv_sbc
= recv_sbc
;
473 else if (recv_sbc
== (draw
->recv_sbc
+ 0x100000001ULL
))
474 draw
->recv_sbc
= recv_sbc
- 0x100000000ULL
;
476 /* When moving from flip to copy, we assume that we can allocate in
477 * a more optimal way if we don't need to cater for the display
480 if (ce
->mode
== XCB_PRESENT_COMPLETE_MODE_COPY
&&
481 draw
->last_present_mode
== XCB_PRESENT_COMPLETE_MODE_FLIP
) {
482 for (int b
= 0; b
< ARRAY_SIZE(draw
->buffers
); b
++) {
483 if (draw
->buffers
[b
])
484 draw
->buffers
[b
]->reallocate
= true;
488 /* If the server tells us that our allocation is suboptimal, we
491 #ifdef HAVE_DRI3_MODIFIERS
492 if (ce
->mode
== XCB_PRESENT_COMPLETE_MODE_SUBOPTIMAL_COPY
&&
493 draw
->last_present_mode
!= ce
->mode
) {
494 for (int b
= 0; b
< ARRAY_SIZE(draw
->buffers
); b
++) {
495 if (draw
->buffers
[b
])
496 draw
->buffers
[b
]->reallocate
= true;
500 draw
->last_present_mode
= ce
->mode
;
502 if (draw
->vtable
->show_fps
)
503 draw
->vtable
->show_fps(draw
, ce
->ust
);
507 } else if (ce
->serial
== draw
->eid
) {
508 draw
->notify_ust
= ce
->ust
;
509 draw
->notify_msc
= ce
->msc
;
513 case XCB_PRESENT_EVENT_IDLE_NOTIFY
: {
514 xcb_present_idle_notify_event_t
*ie
= (void *) ge
;
517 for (b
= 0; b
< ARRAY_SIZE(draw
->buffers
); b
++) {
518 struct loader_dri3_buffer
*buf
= draw
->buffers
[b
];
520 if (buf
&& buf
->pixmap
== ie
->pixmap
)
530 dri3_wait_for_event_locked(struct loader_dri3_drawable
*draw
,
531 unsigned *full_sequence
)
533 xcb_generic_event_t
*ev
;
534 xcb_present_generic_event_t
*ge
;
536 xcb_flush(draw
->conn
);
538 /* Only have one thread waiting for events at a time */
539 if (draw
->has_event_waiter
) {
540 cnd_wait(&draw
->event_cnd
, &draw
->mtx
);
542 *full_sequence
= draw
->last_special_event_sequence
;
543 /* Another thread has updated the protected info, so retest. */
546 draw
->has_event_waiter
= true;
547 /* Allow other threads access to the drawable while we're waiting. */
548 mtx_unlock(&draw
->mtx
);
549 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
550 mtx_lock(&draw
->mtx
);
551 draw
->has_event_waiter
= false;
552 cnd_broadcast(&draw
->event_cnd
);
556 draw
->last_special_event_sequence
= ev
->full_sequence
;
558 *full_sequence
= ev
->full_sequence
;
560 dri3_handle_present_event(draw
, ge
);
564 /** loader_dri3_wait_for_msc
566 * Get the X server to send an event when the target msc/divisor/remainder is
570 loader_dri3_wait_for_msc(struct loader_dri3_drawable
*draw
,
572 int64_t divisor
, int64_t remainder
,
573 int64_t *ust
, int64_t *msc
, int64_t *sbc
)
575 xcb_void_cookie_t cookie
= xcb_present_notify_msc(draw
->conn
,
581 unsigned full_sequence
;
583 mtx_lock(&draw
->mtx
);
585 /* Wait for the event */
587 if (!dri3_wait_for_event_locked(draw
, &full_sequence
)) {
588 mtx_unlock(&draw
->mtx
);
591 } while (full_sequence
!= cookie
.sequence
|| draw
->notify_msc
< target_msc
);
593 *ust
= draw
->notify_ust
;
594 *msc
= draw
->notify_msc
;
595 *sbc
= draw
->recv_sbc
;
596 mtx_unlock(&draw
->mtx
);
601 /** loader_dri3_wait_for_sbc
603 * Wait for the completed swap buffer count to reach the specified
604 * target. Presumably the application knows that this will be reached with
605 * outstanding complete events, or we're going to be here awhile.
608 loader_dri3_wait_for_sbc(struct loader_dri3_drawable
*draw
,
609 int64_t target_sbc
, int64_t *ust
,
610 int64_t *msc
, int64_t *sbc
)
612 /* From the GLX_OML_sync_control spec:
614 * "If <target_sbc> = 0, the function will block until all previous
615 * swaps requested with glXSwapBuffersMscOML for that window have
618 mtx_lock(&draw
->mtx
);
620 target_sbc
= draw
->send_sbc
;
622 while (draw
->recv_sbc
< target_sbc
) {
623 if (!dri3_wait_for_event_locked(draw
, NULL
)) {
624 mtx_unlock(&draw
->mtx
);
631 *sbc
= draw
->recv_sbc
;
632 mtx_unlock(&draw
->mtx
);
636 /** loader_dri3_find_back
638 * Find an idle back buffer. If there isn't one, then
639 * wait for a present idle notify event from the X server
642 dri3_find_back(struct loader_dri3_drawable
*draw
)
647 mtx_lock(&draw
->mtx
);
648 /* Increase the likelyhood of reusing current buffer */
649 dri3_flush_present_events(draw
);
651 /* Check whether we need to reuse the current back buffer as new back.
652 * In that case, wait until it's not busy anymore.
654 num_to_consider
= draw
->num_back
;
655 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1) {
657 draw
->cur_blit_source
= -1;
661 for (b
= 0; b
< num_to_consider
; b
++) {
662 int id
= LOADER_DRI3_BACK_ID((b
+ draw
->cur_back
) % draw
->num_back
);
663 struct loader_dri3_buffer
*buffer
= draw
->buffers
[id
];
665 if (!buffer
|| !buffer
->busy
) {
667 mtx_unlock(&draw
->mtx
);
671 if (!dri3_wait_for_event_locked(draw
, NULL
)) {
672 mtx_unlock(&draw
->mtx
);
678 static xcb_gcontext_t
679 dri3_drawable_gc(struct loader_dri3_drawable
*draw
)
683 xcb_create_gc(draw
->conn
,
684 (draw
->gc
= xcb_generate_id(draw
->conn
)),
686 XCB_GC_GRAPHICS_EXPOSURES
,
693 static struct loader_dri3_buffer
*
694 dri3_back_buffer(struct loader_dri3_drawable
*draw
)
696 return draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)];
699 static struct loader_dri3_buffer
*
700 dri3_fake_front_buffer(struct loader_dri3_drawable
*draw
)
702 return draw
->buffers
[LOADER_DRI3_FRONT_ID
];
706 dri3_copy_area(xcb_connection_t
*c
,
707 xcb_drawable_t src_drawable
,
708 xcb_drawable_t dst_drawable
,
717 xcb_void_cookie_t cookie
;
719 cookie
= xcb_copy_area_checked(c
,
729 xcb_discard_reply(c
, cookie
.sequence
);
733 * Asks the driver to flush any queued work necessary for serializing with the
734 * X command stream, and optionally the slightly more strict requirement of
735 * glFlush() equivalence (which would require flushing even if nothing had
736 * been drawn to a window system framebuffer, for example).
739 loader_dri3_flush(struct loader_dri3_drawable
*draw
,
741 enum __DRI2throttleReason throttle_reason
)
743 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
744 __DRIcontext
*dri_context
= draw
->vtable
->get_dri_context(draw
);
747 draw
->ext
->flush
->flush_with_flags(dri_context
, draw
->dri_drawable
,
748 flags
, throttle_reason
);
753 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable
*draw
,
755 int width
, int height
,
758 struct loader_dri3_buffer
*back
;
759 unsigned flags
= __DRI2_FLUSH_DRAWABLE
;
761 /* Check we have the right attachments */
762 if (!draw
->have_back
|| draw
->is_pixmap
)
766 flags
|= __DRI2_FLUSH_CONTEXT
;
767 loader_dri3_flush(draw
, flags
, __DRI2_THROTTLE_COPYSUBBUFFER
);
769 back
= dri3_find_back_alloc(draw
);
773 y
= draw
->height
- y
- height
;
775 if (draw
->is_different_gpu
) {
776 /* Update the linear buffer part of the back buffer
777 * for the dri3_copy_area operation
779 (void) loader_dri3_blit_image(draw
,
782 0, 0, back
->width
, back
->height
,
783 0, 0, __BLIT_FLAG_FLUSH
);
786 loader_dri3_swapbuffer_barrier(draw
);
787 dri3_fence_reset(draw
->conn
, back
);
788 dri3_copy_area(draw
->conn
,
791 dri3_drawable_gc(draw
),
792 x
, y
, x
, y
, width
, height
);
793 dri3_fence_trigger(draw
->conn
, back
);
794 /* Refresh the fake front (if present) after we just damaged the real
797 if (draw
->have_fake_front
&&
798 !loader_dri3_blit_image(draw
,
799 dri3_fake_front_buffer(draw
)->image
,
802 x
, y
, __BLIT_FLAG_FLUSH
) &&
803 !draw
->is_different_gpu
) {
804 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
805 dri3_copy_area(draw
->conn
,
807 dri3_fake_front_buffer(draw
)->pixmap
,
808 dri3_drawable_gc(draw
),
809 x
, y
, x
, y
, width
, height
);
810 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
811 dri3_fence_await(draw
->conn
, NULL
, dri3_fake_front_buffer(draw
));
813 dri3_fence_await(draw
->conn
, draw
, back
);
817 loader_dri3_copy_drawable(struct loader_dri3_drawable
*draw
,
821 loader_dri3_flush(draw
, __DRI2_FLUSH_DRAWABLE
, __DRI2_THROTTLE_COPYSUBBUFFER
);
823 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
824 dri3_copy_area(draw
->conn
,
826 dri3_drawable_gc(draw
),
827 0, 0, 0, 0, draw
->width
, draw
->height
);
828 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
829 dri3_fence_await(draw
->conn
, draw
, dri3_fake_front_buffer(draw
));
833 loader_dri3_wait_x(struct loader_dri3_drawable
*draw
)
835 struct loader_dri3_buffer
*front
;
837 if (draw
== NULL
|| !draw
->have_fake_front
)
840 front
= dri3_fake_front_buffer(draw
);
842 loader_dri3_copy_drawable(draw
, front
->pixmap
, draw
->drawable
);
844 /* In the psc->is_different_gpu case, the linear buffer has been updated,
845 * but not yet the tiled buffer.
846 * Copy back to the tiled buffer we use for rendering.
847 * Note that we don't need flushing.
849 if (draw
->is_different_gpu
)
850 (void) loader_dri3_blit_image(draw
,
852 front
->linear_buffer
,
853 0, 0, front
->width
, front
->height
,
858 loader_dri3_wait_gl(struct loader_dri3_drawable
*draw
)
860 struct loader_dri3_buffer
*front
;
862 if (draw
== NULL
|| !draw
->have_fake_front
)
865 front
= dri3_fake_front_buffer(draw
);
867 /* In the psc->is_different_gpu case, we update the linear_buffer
868 * before updating the real front.
870 if (draw
->is_different_gpu
)
871 (void) loader_dri3_blit_image(draw
,
872 front
->linear_buffer
,
874 0, 0, front
->width
, front
->height
,
875 0, 0, __BLIT_FLAG_FLUSH
);
876 loader_dri3_swapbuffer_barrier(draw
);
877 loader_dri3_copy_drawable(draw
, draw
->drawable
, front
->pixmap
);
880 /** dri3_flush_present_events
882 * Process any present events that have been received from the X server
885 dri3_flush_present_events(struct loader_dri3_drawable
*draw
)
887 /* Check to see if any configuration changes have occurred
888 * since we were last invoked
890 if (draw
->has_event_waiter
)
893 if (draw
->special_event
) {
894 xcb_generic_event_t
*ev
;
896 while ((ev
= xcb_poll_for_special_event(draw
->conn
,
897 draw
->special_event
)) != NULL
) {
898 xcb_present_generic_event_t
*ge
= (void *) ev
;
899 dri3_handle_present_event(draw
, ge
);
904 /** loader_dri3_swap_buffers_msc
906 * Make the current back buffer visible using the present extension
909 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable
*draw
,
910 int64_t target_msc
, int64_t divisor
,
911 int64_t remainder
, unsigned flush_flags
,
912 const int *rects
, int n_rects
,
915 struct loader_dri3_buffer
*back
;
917 uint32_t options
= XCB_PRESENT_OPTION_NONE
;
919 draw
->vtable
->flush_drawable(draw
, flush_flags
);
921 back
= dri3_find_back_alloc(draw
);
923 mtx_lock(&draw
->mtx
);
925 if (draw
->adaptive_sync
&& !draw
->adaptive_sync_active
) {
926 set_adaptive_sync_property(draw
->conn
, draw
->drawable
, true);
927 draw
->adaptive_sync_active
= true;
930 if (draw
->is_different_gpu
&& back
) {
931 /* Update the linear buffer before presenting the pixmap */
932 (void) loader_dri3_blit_image(draw
,
935 0, 0, back
->width
, back
->height
,
936 0, 0, __BLIT_FLAG_FLUSH
);
939 /* If we need to preload the new back buffer, remember the source.
940 * The force_copy parameter is used by EGL to attempt to preserve
941 * the back buffer across a call to this function.
943 if (draw
->swap_method
!= __DRI_ATTRIB_SWAP_UNDEFINED
|| force_copy
)
944 draw
->cur_blit_source
= LOADER_DRI3_BACK_ID(draw
->cur_back
);
946 /* Exchange the back and fake front. Even though the server knows about these
947 * buffers, it has no notion of back and fake front.
949 if (back
&& draw
->have_fake_front
) {
950 struct loader_dri3_buffer
*tmp
;
952 tmp
= dri3_fake_front_buffer(draw
);
953 draw
->buffers
[LOADER_DRI3_FRONT_ID
] = back
;
954 draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)] = tmp
;
956 if (draw
->swap_method
== __DRI_ATTRIB_SWAP_COPY
|| force_copy
)
957 draw
->cur_blit_source
= LOADER_DRI3_FRONT_ID
;
960 dri3_flush_present_events(draw
);
962 if (back
&& !draw
->is_pixmap
) {
963 dri3_fence_reset(draw
->conn
, back
);
965 /* Compute when we want the frame shown by taking the last known
966 * successful MSC and adding in a swap interval for each outstanding swap
967 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
971 if (target_msc
== 0 && divisor
== 0 && remainder
== 0)
972 target_msc
= draw
->msc
+ draw
->swap_interval
*
973 (draw
->send_sbc
- draw
->recv_sbc
);
974 else if (divisor
== 0 && remainder
> 0) {
975 /* From the GLX_OML_sync_control spec:
976 * "If <divisor> = 0, the swap will occur when MSC becomes
977 * greater than or equal to <target_msc>."
979 * Note that there's no mention of the remainder. The Present
980 * extension throws BadValue for remainder != 0 with divisor == 0, so
981 * just drop the passed in value.
986 /* From the GLX_EXT_swap_control spec
987 * and the EGL 1.4 spec (page 53):
989 * "If <interval> is set to a value of 0, buffer swaps are not
990 * synchronized to a video frame."
992 * Implementation note: It is possible to enable triple buffering
993 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
996 if (draw
->swap_interval
== 0)
997 options
|= XCB_PRESENT_OPTION_ASYNC
;
999 /* If we need to populate the new back, but need to reuse the back
1000 * buffer slot due to lack of local blit capabilities, make sure
1001 * the server doesn't flip and we deadlock.
1003 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1)
1004 options
|= XCB_PRESENT_OPTION_COPY
;
1005 #ifdef HAVE_DRI3_MODIFIERS
1006 if (draw
->multiplanes_available
)
1007 options
|= XCB_PRESENT_OPTION_SUBOPTIMAL
;
1010 back
->last_swap
= draw
->send_sbc
;
1012 xcb_xfixes_region_t region
= 0;
1013 xcb_rectangle_t xcb_rects
[64];
1015 if (n_rects
> 0 && n_rects
<= ARRAY_SIZE(xcb_rects
)) {
1016 for (int i
= 0; i
< n_rects
; i
++) {
1017 const int *rect
= &rects
[i
* 4];
1018 xcb_rects
[i
].x
= rect
[0];
1019 xcb_rects
[i
].y
= draw
->height
- rect
[1] - rect
[3];
1020 xcb_rects
[i
].width
= rect
[2];
1021 xcb_rects
[i
].height
= rect
[3];
1024 region
= xcb_generate_id(draw
->conn
);
1025 xcb_xfixes_create_region(draw
->conn
, region
, n_rects
, xcb_rects
);
1028 xcb_present_pixmap(draw
->conn
,
1031 (uint32_t) draw
->send_sbc
,
1033 region
, /* update */
1036 None
, /* target_crtc */
1042 remainder
, 0, NULL
);
1043 ret
= (int64_t) draw
->send_sbc
;
1046 xcb_xfixes_destroy_region(draw
->conn
, region
);
1048 /* Schedule a server-side back-preserving blit if necessary.
1049 * This happens iff all conditions below are satisfied:
1050 * a) We have a fake front,
1051 * b) We need to preserve the back buffer,
1052 * c) We don't have local blit capabilities.
1054 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1 &&
1055 draw
->cur_blit_source
!= LOADER_DRI3_BACK_ID(draw
->cur_back
)) {
1056 struct loader_dri3_buffer
*new_back
= dri3_back_buffer(draw
);
1057 struct loader_dri3_buffer
*src
= draw
->buffers
[draw
->cur_blit_source
];
1059 dri3_fence_reset(draw
->conn
, new_back
);
1060 dri3_copy_area(draw
->conn
, src
->pixmap
,
1062 dri3_drawable_gc(draw
),
1063 0, 0, 0, 0, draw
->width
, draw
->height
);
1064 dri3_fence_trigger(draw
->conn
, new_back
);
1065 new_back
->last_swap
= src
->last_swap
;
1068 xcb_flush(draw
->conn
);
1072 mtx_unlock(&draw
->mtx
);
1074 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
1080 loader_dri3_query_buffer_age(struct loader_dri3_drawable
*draw
)
1082 struct loader_dri3_buffer
*back
= dri3_find_back_alloc(draw
);
1085 mtx_lock(&draw
->mtx
);
1086 ret
= (!back
|| back
->last_swap
== 0) ? 0 :
1087 draw
->send_sbc
- back
->last_swap
+ 1;
1088 mtx_unlock(&draw
->mtx
);
1093 /** loader_dri3_open
1095 * Wrapper around xcb_dri3_open
1098 loader_dri3_open(xcb_connection_t
*conn
,
1102 xcb_dri3_open_cookie_t cookie
;
1103 xcb_dri3_open_reply_t
*reply
;
1106 cookie
= xcb_dri3_open(conn
,
1110 reply
= xcb_dri3_open_reply(conn
, cookie
, NULL
);
1114 if (reply
->nfd
!= 1) {
1119 fd
= xcb_dri3_open_reply_fds(conn
, reply
)[0];
1121 fcntl(fd
, F_SETFD
, fcntl(fd
, F_GETFD
) | FD_CLOEXEC
);
1127 dri3_cpp_for_format(uint32_t format
) {
1129 case __DRI_IMAGE_FORMAT_R8
:
1131 case __DRI_IMAGE_FORMAT_RGB565
:
1132 case __DRI_IMAGE_FORMAT_GR88
:
1134 case __DRI_IMAGE_FORMAT_XRGB8888
:
1135 case __DRI_IMAGE_FORMAT_ARGB8888
:
1136 case __DRI_IMAGE_FORMAT_ABGR8888
:
1137 case __DRI_IMAGE_FORMAT_XBGR8888
:
1138 case __DRI_IMAGE_FORMAT_XRGB2101010
:
1139 case __DRI_IMAGE_FORMAT_ARGB2101010
:
1140 case __DRI_IMAGE_FORMAT_XBGR2101010
:
1141 case __DRI_IMAGE_FORMAT_ABGR2101010
:
1142 case __DRI_IMAGE_FORMAT_SARGB8
:
1143 case __DRI_IMAGE_FORMAT_SABGR8
:
1144 case __DRI_IMAGE_FORMAT_SXRGB8
:
1146 case __DRI_IMAGE_FORMAT_XBGR16161616F
:
1147 case __DRI_IMAGE_FORMAT_ABGR16161616F
:
1149 case __DRI_IMAGE_FORMAT_NONE
:
1155 /* Map format of render buffer to corresponding format for the linear_buffer
1156 * used for sharing with the display gpu of a Prime setup (== is_different_gpu).
1157 * Usually linear_format == format, except for depth >= 30 formats, where
1158 * different gpu vendors have different preferences wrt. color channel ordering.
1161 dri3_linear_format_for_format(struct loader_dri3_drawable
*draw
, uint32_t format
)
1164 case __DRI_IMAGE_FORMAT_XRGB2101010
:
1165 case __DRI_IMAGE_FORMAT_XBGR2101010
:
1166 /* Different preferred formats for different hw */
1167 if (dri3_get_red_mask_for_depth(draw
, 30) == 0x3ff)
1168 return __DRI_IMAGE_FORMAT_XBGR2101010
;
1170 return __DRI_IMAGE_FORMAT_XRGB2101010
;
1172 case __DRI_IMAGE_FORMAT_ARGB2101010
:
1173 case __DRI_IMAGE_FORMAT_ABGR2101010
:
1174 /* Different preferred formats for different hw */
1175 if (dri3_get_red_mask_for_depth(draw
, 30) == 0x3ff)
1176 return __DRI_IMAGE_FORMAT_ABGR2101010
;
1178 return __DRI_IMAGE_FORMAT_ARGB2101010
;
1185 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1186 * the createImageFromFds call takes DRM_FORMAT codes. To avoid
1187 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1188 * translate to DRM_FORMAT codes in the call to createImageFromFds
1191 image_format_to_fourcc(int format
)
1194 /* Convert from __DRI_IMAGE_FORMAT to DRM_FORMAT (sigh) */
1196 case __DRI_IMAGE_FORMAT_SARGB8
: return __DRI_IMAGE_FOURCC_SARGB8888
;
1197 case __DRI_IMAGE_FORMAT_SABGR8
: return __DRI_IMAGE_FOURCC_SABGR8888
;
1198 case __DRI_IMAGE_FORMAT_SXRGB8
: return __DRI_IMAGE_FOURCC_SXRGB8888
;
1199 case __DRI_IMAGE_FORMAT_RGB565
: return DRM_FORMAT_RGB565
;
1200 case __DRI_IMAGE_FORMAT_XRGB8888
: return DRM_FORMAT_XRGB8888
;
1201 case __DRI_IMAGE_FORMAT_ARGB8888
: return DRM_FORMAT_ARGB8888
;
1202 case __DRI_IMAGE_FORMAT_ABGR8888
: return DRM_FORMAT_ABGR8888
;
1203 case __DRI_IMAGE_FORMAT_XBGR8888
: return DRM_FORMAT_XBGR8888
;
1204 case __DRI_IMAGE_FORMAT_XRGB2101010
: return DRM_FORMAT_XRGB2101010
;
1205 case __DRI_IMAGE_FORMAT_ARGB2101010
: return DRM_FORMAT_ARGB2101010
;
1206 case __DRI_IMAGE_FORMAT_XBGR2101010
: return DRM_FORMAT_XBGR2101010
;
1207 case __DRI_IMAGE_FORMAT_ABGR2101010
: return DRM_FORMAT_ABGR2101010
;
1208 case __DRI_IMAGE_FORMAT_XBGR16161616F
: return DRM_FORMAT_XBGR16161616F
;
1209 case __DRI_IMAGE_FORMAT_ABGR16161616F
: return DRM_FORMAT_ABGR16161616F
;
1214 #ifdef HAVE_DRI3_MODIFIERS
1216 has_supported_modifier(struct loader_dri3_drawable
*draw
, unsigned int format
,
1217 uint64_t *modifiers
, uint32_t count
)
1219 uint64_t *supported_modifiers
;
1220 int32_t supported_modifiers_count
;
1224 if (!draw
->ext
->image
->queryDmaBufModifiers(draw
->dri_screen
,
1225 format
, 0, NULL
, NULL
,
1226 &supported_modifiers_count
) ||
1227 supported_modifiers_count
== 0)
1230 supported_modifiers
= malloc(supported_modifiers_count
* sizeof(uint64_t));
1231 if (!supported_modifiers
)
1234 draw
->ext
->image
->queryDmaBufModifiers(draw
->dri_screen
, format
,
1235 supported_modifiers_count
,
1236 supported_modifiers
, NULL
,
1237 &supported_modifiers_count
);
1239 for (i
= 0; !found
&& i
< supported_modifiers_count
; i
++) {
1240 for (j
= 0; !found
&& j
< count
; j
++) {
1241 if (supported_modifiers
[i
] == modifiers
[j
])
1246 free(supported_modifiers
);
1251 /** loader_dri3_alloc_render_buffer
1253 * Use the driver createImage function to construct a __DRIimage, then
1254 * get a file descriptor for that and create an X pixmap from that
1256 * Allocate an xshmfence for synchronization
1258 static struct loader_dri3_buffer
*
1259 dri3_alloc_render_buffer(struct loader_dri3_drawable
*draw
, unsigned int format
,
1260 int width
, int height
, int depth
)
1262 struct loader_dri3_buffer
*buffer
;
1263 __DRIimage
*pixmap_buffer
;
1264 xcb_pixmap_t pixmap
;
1265 xcb_sync_fence_t sync_fence
;
1266 struct xshmfence
*shm_fence
;
1267 int buffer_fds
[4], fence_fd
;
1272 /* Create an xshmfence object and
1273 * prepare to send that to the X server
1276 fence_fd
= xshmfence_alloc_shm();
1280 shm_fence
= xshmfence_map_shm(fence_fd
);
1281 if (shm_fence
== NULL
)
1284 /* Allocate the image from the driver
1286 buffer
= calloc(1, sizeof *buffer
);
1290 buffer
->cpp
= dri3_cpp_for_format(format
);
1294 if (!draw
->is_different_gpu
) {
1295 #ifdef HAVE_DRI3_MODIFIERS
1296 if (draw
->multiplanes_available
&&
1297 draw
->ext
->image
->base
.version
>= 15 &&
1298 draw
->ext
->image
->queryDmaBufModifiers
&&
1299 draw
->ext
->image
->createImageWithModifiers
) {
1300 xcb_dri3_get_supported_modifiers_cookie_t mod_cookie
;
1301 xcb_dri3_get_supported_modifiers_reply_t
*mod_reply
;
1302 xcb_generic_error_t
*error
= NULL
;
1303 uint64_t *modifiers
= NULL
;
1306 mod_cookie
= xcb_dri3_get_supported_modifiers(draw
->conn
,
1308 depth
, buffer
->cpp
* 8);
1309 mod_reply
= xcb_dri3_get_supported_modifiers_reply(draw
->conn
,
1315 if (mod_reply
->num_window_modifiers
) {
1316 count
= mod_reply
->num_window_modifiers
;
1317 modifiers
= malloc(count
* sizeof(uint64_t));
1324 xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply
),
1325 count
* sizeof(uint64_t));
1327 if (!has_supported_modifier(draw
, image_format_to_fourcc(format
),
1328 modifiers
, count
)) {
1335 if (mod_reply
->num_screen_modifiers
&& modifiers
== NULL
) {
1336 count
= mod_reply
->num_screen_modifiers
;
1337 modifiers
= malloc(count
* sizeof(uint64_t));
1345 xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply
),
1346 count
* sizeof(uint64_t));
1351 /* don't use createImageWithModifiers() if we have no
1352 * modifiers, other things depend on the use flags when
1353 * there are no modifiers to know that a buffer can be
1357 buffer
->image
= draw
->ext
->image
->createImageWithModifiers(draw
->dri_screen
,
1369 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1372 __DRI_IMAGE_USE_SHARE
|
1373 __DRI_IMAGE_USE_SCANOUT
|
1374 __DRI_IMAGE_USE_BACKBUFFER
,
1377 pixmap_buffer
= buffer
->image
;
1382 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1391 buffer
->linear_buffer
=
1392 draw
->ext
->image
->createImage(draw
->dri_screen
,
1394 dri3_linear_format_for_format(draw
, format
),
1395 __DRI_IMAGE_USE_SHARE
|
1396 __DRI_IMAGE_USE_LINEAR
|
1397 __DRI_IMAGE_USE_BACKBUFFER
,
1399 pixmap_buffer
= buffer
->linear_buffer
;
1401 if (!buffer
->linear_buffer
)
1402 goto no_linear_buffer
;
1405 /* X want some information about the planes, so ask the image for it
1407 if (!draw
->ext
->image
->queryImage(pixmap_buffer
, __DRI_IMAGE_ATTRIB_NUM_PLANES
,
1411 for (i
= 0; i
< num_planes
; i
++) {
1412 __DRIimage
*image
= draw
->ext
->image
->fromPlanar(pixmap_buffer
, i
, NULL
);
1416 image
= pixmap_buffer
;
1421 ret
= draw
->ext
->image
->queryImage(image
, __DRI_IMAGE_ATTRIB_FD
,
1423 ret
&= draw
->ext
->image
->queryImage(image
, __DRI_IMAGE_ATTRIB_STRIDE
,
1424 &buffer
->strides
[i
]);
1425 ret
&= draw
->ext
->image
->queryImage(image
, __DRI_IMAGE_ATTRIB_OFFSET
,
1426 &buffer
->offsets
[i
]);
1427 if (image
!= pixmap_buffer
)
1428 draw
->ext
->image
->destroyImage(image
);
1431 goto no_buffer_attrib
;
1434 ret
= draw
->ext
->image
->queryImage(pixmap_buffer
,
1435 __DRI_IMAGE_ATTRIB_MODIFIER_UPPER
, &mod
);
1436 buffer
->modifier
= (uint64_t) mod
<< 32;
1437 ret
&= draw
->ext
->image
->queryImage(pixmap_buffer
,
1438 __DRI_IMAGE_ATTRIB_MODIFIER_LOWER
, &mod
);
1439 buffer
->modifier
|= (uint64_t)(mod
& 0xffffffff);
1442 buffer
->modifier
= DRM_FORMAT_MOD_INVALID
;
1444 pixmap
= xcb_generate_id(draw
->conn
);
1445 #ifdef HAVE_DRI3_MODIFIERS
1446 if (draw
->multiplanes_available
&&
1447 buffer
->modifier
!= DRM_FORMAT_MOD_INVALID
) {
1448 xcb_dri3_pixmap_from_buffers(draw
->conn
,
1453 buffer
->strides
[0], buffer
->offsets
[0],
1454 buffer
->strides
[1], buffer
->offsets
[1],
1455 buffer
->strides
[2], buffer
->offsets
[2],
1456 buffer
->strides
[3], buffer
->offsets
[3],
1457 depth
, buffer
->cpp
* 8,
1463 xcb_dri3_pixmap_from_buffer(draw
->conn
,
1467 width
, height
, buffer
->strides
[0],
1468 depth
, buffer
->cpp
* 8,
1472 xcb_dri3_fence_from_fd(draw
->conn
,
1474 (sync_fence
= xcb_generate_id(draw
->conn
)),
1478 buffer
->pixmap
= pixmap
;
1479 buffer
->own_pixmap
= true;
1480 buffer
->sync_fence
= sync_fence
;
1481 buffer
->shm_fence
= shm_fence
;
1482 buffer
->width
= width
;
1483 buffer
->height
= height
;
1485 /* Mark the buffer as idle
1487 dri3_fence_set(buffer
);
1493 if (buffer_fds
[i
] != -1)
1494 close(buffer_fds
[i
]);
1496 draw
->ext
->image
->destroyImage(pixmap_buffer
);
1498 if (draw
->is_different_gpu
)
1499 draw
->ext
->image
->destroyImage(buffer
->image
);
1503 xshmfence_unmap_shm(shm_fence
);
1509 /** loader_dri3_update_drawable
1511 * Called the first time we use the drawable and then
1512 * after we receive present configure notify events to
1513 * track the geometry of the drawable
1516 dri3_update_drawable(struct loader_dri3_drawable
*draw
)
1518 mtx_lock(&draw
->mtx
);
1519 if (draw
->first_init
) {
1520 xcb_get_geometry_cookie_t geom_cookie
;
1521 xcb_get_geometry_reply_t
*geom_reply
;
1522 xcb_void_cookie_t cookie
;
1523 xcb_generic_error_t
*error
;
1524 xcb_present_query_capabilities_cookie_t present_capabilities_cookie
;
1525 xcb_present_query_capabilities_reply_t
*present_capabilities_reply
;
1526 xcb_window_t root_win
;
1528 draw
->first_init
= false;
1530 /* Try to select for input on the window.
1532 * If the drawable is a window, this will get our events
1535 * Otherwise, we'll get a BadWindow error back from this request which
1536 * will let us know that the drawable is a pixmap instead.
1539 draw
->eid
= xcb_generate_id(draw
->conn
);
1541 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
1542 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY
|
1543 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY
|
1544 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY
);
1546 present_capabilities_cookie
=
1547 xcb_present_query_capabilities(draw
->conn
, draw
->drawable
);
1549 /* Create an XCB event queue to hold present events outside of the usual
1550 * application event queue
1552 draw
->special_event
= xcb_register_for_special_xge(draw
->conn
,
1556 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1558 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1561 mtx_unlock(&draw
->mtx
);
1564 draw
->width
= geom_reply
->width
;
1565 draw
->height
= geom_reply
->height
;
1566 draw
->depth
= geom_reply
->depth
;
1567 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1568 root_win
= geom_reply
->root
;
1572 draw
->is_pixmap
= false;
1574 /* Check to see if our select input call failed. If it failed with a
1575 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1576 * special event queue created above and mark the drawable as a pixmap
1579 error
= xcb_request_check(draw
->conn
, cookie
);
1581 present_capabilities_reply
=
1582 xcb_present_query_capabilities_reply(draw
->conn
,
1583 present_capabilities_cookie
,
1586 if (present_capabilities_reply
) {
1587 draw
->present_capabilities
= present_capabilities_reply
->capabilities
;
1588 free(present_capabilities_reply
);
1590 draw
->present_capabilities
= 0;
1593 if (error
->error_code
!= BadWindow
) {
1595 mtx_unlock(&draw
->mtx
);
1599 draw
->is_pixmap
= true;
1600 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
1601 draw
->special_event
= NULL
;
1604 if (draw
->is_pixmap
)
1605 draw
->window
= root_win
;
1607 draw
->window
= draw
->drawable
;
1609 dri3_flush_present_events(draw
);
1610 mtx_unlock(&draw
->mtx
);
1615 loader_dri3_create_image(xcb_connection_t
*c
,
1616 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
,
1617 unsigned int format
,
1618 __DRIscreen
*dri_screen
,
1619 const __DRIimageExtension
*image
,
1620 void *loaderPrivate
)
1623 __DRIimage
*image_planar
, *ret
;
1626 /* Get an FD for the pixmap object
1628 fds
= xcb_dri3_buffer_from_pixmap_reply_fds(c
, bp_reply
);
1630 stride
= bp_reply
->stride
;
1633 /* createImageFromFds creates a wrapper __DRIimage structure which
1634 * can deal with multiple planes for things like Yuv images. So, once
1635 * we've gotten the planar wrapper, pull the single plane out of it and
1636 * discard the wrapper.
1638 image_planar
= image
->createImageFromFds(dri_screen
,
1641 image_format_to_fourcc(format
),
1643 &stride
, &offset
, loaderPrivate
);
1648 ret
= image
->fromPlanar(image_planar
, 0, loaderPrivate
);
1653 image
->destroyImage(image_planar
);
1658 #ifdef HAVE_DRI3_MODIFIERS
1660 loader_dri3_create_image_from_buffers(xcb_connection_t
*c
,
1661 xcb_dri3_buffers_from_pixmap_reply_t
*bp_reply
,
1662 unsigned int format
,
1663 __DRIscreen
*dri_screen
,
1664 const __DRIimageExtension
*image
,
1665 void *loaderPrivate
)
1669 uint32_t *strides_in
, *offsets_in
;
1670 int strides
[4], offsets
[4];
1674 if (bp_reply
->nfd
> 4)
1677 fds
= xcb_dri3_buffers_from_pixmap_reply_fds(c
, bp_reply
);
1678 strides_in
= xcb_dri3_buffers_from_pixmap_strides(bp_reply
);
1679 offsets_in
= xcb_dri3_buffers_from_pixmap_offsets(bp_reply
);
1680 for (i
= 0; i
< bp_reply
->nfd
; i
++) {
1681 strides
[i
] = strides_in
[i
];
1682 offsets
[i
] = offsets_in
[i
];
1685 ret
= image
->createImageFromDmaBufs2(dri_screen
,
1688 image_format_to_fourcc(format
),
1692 0, 0, 0, 0, /* UNDEFINED */
1693 &error
, loaderPrivate
);
1695 for (i
= 0; i
< bp_reply
->nfd
; i
++)
1702 /** dri3_get_pixmap_buffer
1704 * Get the DRM object for a pixmap from the X server and
1705 * wrap that with a __DRIimage structure using createImageFromFds
1707 static struct loader_dri3_buffer
*
1708 dri3_get_pixmap_buffer(__DRIdrawable
*driDrawable
, unsigned int format
,
1709 enum loader_dri3_buffer_type buffer_type
,
1710 struct loader_dri3_drawable
*draw
)
1712 int buf_id
= loader_dri3_pixmap_buf_id(buffer_type
);
1713 struct loader_dri3_buffer
*buffer
= draw
->buffers
[buf_id
];
1714 xcb_drawable_t pixmap
;
1715 xcb_sync_fence_t sync_fence
;
1716 struct xshmfence
*shm_fence
;
1720 __DRIscreen
*cur_screen
;
1725 pixmap
= draw
->drawable
;
1727 buffer
= calloc(1, sizeof *buffer
);
1731 fence_fd
= xshmfence_alloc_shm();
1734 shm_fence
= xshmfence_map_shm(fence_fd
);
1735 if (shm_fence
== NULL
) {
1740 /* Get the currently-bound screen or revert to using the drawable's screen if
1741 * no contexts are currently bound. The latter case is at least necessary for
1742 * obs-studio, when using Window Capture (Xcomposite) as a Source.
1744 cur_screen
= draw
->vtable
->get_dri_screen();
1746 cur_screen
= draw
->dri_screen
;
1749 xcb_dri3_fence_from_fd(draw
->conn
,
1751 (sync_fence
= xcb_generate_id(draw
->conn
)),
1754 #ifdef HAVE_DRI3_MODIFIERS
1755 if (draw
->multiplanes_available
&&
1756 draw
->ext
->image
->base
.version
>= 15 &&
1757 draw
->ext
->image
->createImageFromDmaBufs2
) {
1758 xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie
;
1759 xcb_dri3_buffers_from_pixmap_reply_t
*bps_reply
;
1761 bps_cookie
= xcb_dri3_buffers_from_pixmap(draw
->conn
, pixmap
);
1762 bps_reply
= xcb_dri3_buffers_from_pixmap_reply(draw
->conn
, bps_cookie
,
1767 loader_dri3_create_image_from_buffers(draw
->conn
, bps_reply
, format
,
1768 cur_screen
, draw
->ext
->image
,
1770 width
= bps_reply
->width
;
1771 height
= bps_reply
->height
;
1776 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie
;
1777 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
;
1779 bp_cookie
= xcb_dri3_buffer_from_pixmap(draw
->conn
, pixmap
);
1780 bp_reply
= xcb_dri3_buffer_from_pixmap_reply(draw
->conn
, bp_cookie
, NULL
);
1784 buffer
->image
= loader_dri3_create_image(draw
->conn
, bp_reply
, format
,
1785 cur_screen
, draw
->ext
->image
,
1787 width
= bp_reply
->width
;
1788 height
= bp_reply
->height
;
1795 buffer
->pixmap
= pixmap
;
1796 buffer
->own_pixmap
= false;
1797 buffer
->width
= width
;
1798 buffer
->height
= height
;
1799 buffer
->shm_fence
= shm_fence
;
1800 buffer
->sync_fence
= sync_fence
;
1802 draw
->buffers
[buf_id
] = buffer
;
1807 xcb_sync_destroy_fence(draw
->conn
, sync_fence
);
1808 xshmfence_unmap_shm(shm_fence
);
1817 * Find a front or back buffer, allocating new ones as necessary
1819 static struct loader_dri3_buffer
*
1820 dri3_get_buffer(__DRIdrawable
*driDrawable
,
1821 unsigned int format
,
1822 enum loader_dri3_buffer_type buffer_type
,
1823 struct loader_dri3_drawable
*draw
)
1825 struct loader_dri3_buffer
*buffer
;
1826 bool fence_await
= buffer_type
== loader_dri3_buffer_back
;
1829 if (buffer_type
== loader_dri3_buffer_back
) {
1830 draw
->back_format
= format
;
1832 buf_id
= dri3_find_back(draw
);
1837 buf_id
= LOADER_DRI3_FRONT_ID
;
1840 buffer
= draw
->buffers
[buf_id
];
1842 /* Allocate a new buffer if there isn't an old one, if that
1843 * old one is the wrong size, or if it's suboptimal
1845 if (!buffer
|| buffer
->width
!= draw
->width
||
1846 buffer
->height
!= draw
->height
||
1847 buffer
->reallocate
) {
1848 struct loader_dri3_buffer
*new_buffer
;
1850 /* Allocate the new buffers
1852 new_buffer
= dri3_alloc_render_buffer(draw
,
1860 /* When resizing, copy the contents of the old buffer, waiting for that
1861 * copy to complete using our fences before proceeding
1863 if ((buffer_type
== loader_dri3_buffer_back
||
1864 (buffer_type
== loader_dri3_buffer_front
&& draw
->have_fake_front
))
1867 /* Fill the new buffer with data from an old buffer */
1868 if (!loader_dri3_blit_image(draw
,
1872 MIN2(buffer
->width
, new_buffer
->width
),
1873 MIN2(buffer
->height
, new_buffer
->height
),
1875 !buffer
->linear_buffer
) {
1876 dri3_fence_reset(draw
->conn
, new_buffer
);
1877 dri3_copy_area(draw
->conn
,
1880 dri3_drawable_gc(draw
),
1882 draw
->width
, draw
->height
);
1883 dri3_fence_trigger(draw
->conn
, new_buffer
);
1886 dri3_free_render_buffer(draw
, buffer
);
1887 } else if (buffer_type
== loader_dri3_buffer_front
) {
1888 /* Fill the new fake front with data from a real front */
1889 loader_dri3_swapbuffer_barrier(draw
);
1890 dri3_fence_reset(draw
->conn
, new_buffer
);
1891 dri3_copy_area(draw
->conn
,
1894 dri3_drawable_gc(draw
),
1896 draw
->width
, draw
->height
);
1897 dri3_fence_trigger(draw
->conn
, new_buffer
);
1899 if (new_buffer
->linear_buffer
) {
1900 dri3_fence_await(draw
->conn
, draw
, new_buffer
);
1901 (void) loader_dri3_blit_image(draw
,
1903 new_buffer
->linear_buffer
,
1904 0, 0, draw
->width
, draw
->height
,
1909 buffer
= new_buffer
;
1910 draw
->buffers
[buf_id
] = buffer
;
1914 dri3_fence_await(draw
->conn
, draw
, buffer
);
1917 * Do we need to preserve the content of a previous buffer?
1919 * Note that this blit is needed only to avoid a wait for a buffer that
1920 * is currently in the flip chain or being scanned out from. That's really
1921 * a tradeoff. If we're ok with the wait we can reduce the number of back
1922 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1923 * but in the latter case we must disallow page-flipping.
1925 if (buffer_type
== loader_dri3_buffer_back
&&
1926 draw
->cur_blit_source
!= -1 &&
1927 draw
->buffers
[draw
->cur_blit_source
] &&
1928 buffer
!= draw
->buffers
[draw
->cur_blit_source
]) {
1930 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
1932 /* Avoid flushing here. Will propably do good for tiling hardware. */
1933 (void) loader_dri3_blit_image(draw
,
1936 0, 0, draw
->width
, draw
->height
,
1938 buffer
->last_swap
= source
->last_swap
;
1939 draw
->cur_blit_source
= -1;
1941 /* Return the requested buffer */
1945 /** dri3_free_buffers
1947 * Free the front bufffer or all of the back buffers. Used
1948 * when the application changes which buffers it needs
1951 dri3_free_buffers(__DRIdrawable
*driDrawable
,
1952 enum loader_dri3_buffer_type buffer_type
,
1953 struct loader_dri3_drawable
*draw
)
1955 struct loader_dri3_buffer
*buffer
;
1960 switch (buffer_type
) {
1961 case loader_dri3_buffer_back
:
1962 first_id
= LOADER_DRI3_BACK_ID(0);
1963 n_id
= LOADER_DRI3_MAX_BACK
;
1964 draw
->cur_blit_source
= -1;
1966 case loader_dri3_buffer_front
:
1967 first_id
= LOADER_DRI3_FRONT_ID
;
1968 /* Don't free a fake front holding new backbuffer content. */
1969 n_id
= (draw
->cur_blit_source
== LOADER_DRI3_FRONT_ID
) ? 0 : 1;
1972 for (buf_id
= first_id
; buf_id
< first_id
+ n_id
; buf_id
++) {
1973 buffer
= draw
->buffers
[buf_id
];
1975 dri3_free_render_buffer(draw
, buffer
);
1976 draw
->buffers
[buf_id
] = NULL
;
1981 /** loader_dri3_get_buffers
1983 * The published buffer allocation API.
1984 * Returns all of the necessary buffers, allocating
1988 loader_dri3_get_buffers(__DRIdrawable
*driDrawable
,
1989 unsigned int format
,
1991 void *loaderPrivate
,
1992 uint32_t buffer_mask
,
1993 struct __DRIimageList
*buffers
)
1995 struct loader_dri3_drawable
*draw
= loaderPrivate
;
1996 struct loader_dri3_buffer
*front
, *back
;
1999 buffers
->image_mask
= 0;
2000 buffers
->front
= NULL
;
2001 buffers
->back
= NULL
;
2006 if (!dri3_update_drawable(draw
))
2009 dri3_update_num_back(draw
);
2011 /* Free no longer needed back buffers */
2012 for (buf_id
= draw
->num_back
; buf_id
< LOADER_DRI3_MAX_BACK
; buf_id
++) {
2013 if (draw
->cur_blit_source
!= buf_id
&& draw
->buffers
[buf_id
]) {
2014 dri3_free_render_buffer(draw
, draw
->buffers
[buf_id
]);
2015 draw
->buffers
[buf_id
] = NULL
;
2019 /* pixmaps always have front buffers.
2020 * Exchange swaps also mandate fake front buffers.
2022 if (draw
->is_pixmap
|| draw
->swap_method
== __DRI_ATTRIB_SWAP_EXCHANGE
)
2023 buffer_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
2025 if (buffer_mask
& __DRI_IMAGE_BUFFER_FRONT
) {
2026 /* All pixmaps are owned by the server gpu.
2027 * When we use a different gpu, we can't use the pixmap
2028 * as buffer since it is potentially tiled a way
2029 * our device can't understand. In this case, use
2030 * a fake front buffer. Hopefully the pixmap
2031 * content will get synced with the fake front
2034 if (draw
->is_pixmap
&& !draw
->is_different_gpu
)
2035 front
= dri3_get_pixmap_buffer(driDrawable
,
2037 loader_dri3_buffer_front
,
2040 front
= dri3_get_buffer(driDrawable
,
2042 loader_dri3_buffer_front
,
2048 dri3_free_buffers(driDrawable
, loader_dri3_buffer_front
, draw
);
2049 draw
->have_fake_front
= 0;
2052 if (buffer_mask
& __DRI_IMAGE_BUFFER_BACK
) {
2053 back
= dri3_get_buffer(driDrawable
,
2055 loader_dri3_buffer_back
,
2059 draw
->have_back
= 1;
2061 dri3_free_buffers(driDrawable
, loader_dri3_buffer_back
, draw
);
2062 draw
->have_back
= 0;
2066 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
2067 buffers
->front
= front
->image
;
2068 draw
->have_fake_front
= draw
->is_different_gpu
|| !draw
->is_pixmap
;
2072 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_BACK
;
2073 buffers
->back
= back
->image
;
2076 draw
->stamp
= stamp
;
2081 /** loader_dri3_update_drawable_geometry
2083 * Get the current drawable geometry.
2086 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable
*draw
)
2088 xcb_get_geometry_cookie_t geom_cookie
;
2089 xcb_get_geometry_reply_t
*geom_reply
;
2091 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
2093 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
2096 draw
->width
= geom_reply
->width
;
2097 draw
->height
= geom_reply
->height
;
2098 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
2099 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
2107 * Make sure the server has flushed all pending swap buffers to hardware
2108 * for this drawable. Ideally we'd want to send an X protocol request to
2109 * have the server block our connection until the swaps are complete. That
2110 * would avoid the potential round-trip here.
2113 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable
*draw
)
2115 int64_t ust
, msc
, sbc
;
2117 (void) loader_dri3_wait_for_sbc(draw
, 0, &ust
, &msc
, &sbc
);
2121 * Perform any cleanup associated with a close screen operation.
2122 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
2124 * This function destroys the screen's cached swap context if any.
2127 loader_dri3_close_screen(__DRIscreen
*dri_screen
)
2129 mtx_lock(&blit_context
.mtx
);
2130 if (blit_context
.ctx
&& blit_context
.cur_screen
== dri_screen
) {
2131 blit_context
.core
->destroyContext(blit_context
.ctx
);
2132 blit_context
.ctx
= NULL
;
2134 mtx_unlock(&blit_context
.mtx
);
2138 * Find a backbuffer slot - potentially allocating a back buffer
2140 * \param draw[in,out] Pointer to the drawable for which to find back.
2141 * \return Pointer to a new back buffer or NULL if allocation failed or was
2144 * Find a potentially new back buffer, and if it's not been allocated yet and
2145 * in addition needs initializing, then try to allocate and initialize it.
2148 static struct loader_dri3_buffer
*
2149 dri3_find_back_alloc(struct loader_dri3_drawable
*draw
)
2151 struct loader_dri3_buffer
*back
;
2154 id
= dri3_find_back(draw
);
2158 back
= draw
->buffers
[id
];
2159 /* Allocate a new back if we haven't got one */
2160 if (!back
&& draw
->back_format
!= __DRI_IMAGE_FORMAT_NONE
&&
2161 dri3_update_drawable(draw
))
2162 back
= dri3_alloc_render_buffer(draw
, draw
->back_format
,
2163 draw
->width
, draw
->height
, draw
->depth
);
2168 draw
->buffers
[id
] = back
;
2170 /* If necessary, prefill the back with data according to swap_method mode. */
2171 if (draw
->cur_blit_source
!= -1 &&
2172 draw
->buffers
[draw
->cur_blit_source
] &&
2173 back
!= draw
->buffers
[draw
->cur_blit_source
]) {
2174 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
2176 dri3_fence_await(draw
->conn
, draw
, source
);
2177 dri3_fence_await(draw
->conn
, draw
, back
);
2178 (void) loader_dri3_blit_image(draw
,
2181 0, 0, draw
->width
, draw
->height
,
2183 back
->last_swap
= source
->last_swap
;
2184 draw
->cur_blit_source
= -1;