2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
28 #include <X11/xshmfence.h>
31 #include <xcb/present.h>
33 #include <X11/Xlib-xcb.h>
35 #include <c11/threads.h>
36 #include "loader_dri3_helper.h"
38 /* From xmlpool/options.h, user exposed so should be stable */
39 #define DRI_CONF_VBLANK_NEVER 0
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
41 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
42 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
45 * A cached blit context.
47 struct loader_dri3_blit_context
{
50 __DRIscreen
*cur_screen
;
51 const __DRIcoreExtension
*core
;
54 /* For simplicity we maintain the cache only for a single screen at a time */
55 static struct loader_dri3_blit_context blit_context
= {
56 _MTX_INITIALIZER_NP
, NULL
60 dri3_flush_present_events(struct loader_dri3_drawable
*draw
);
63 * Do we have blit functionality in the image blit extension?
65 * \param draw[in] The drawable intended to blit from / to.
66 * \return true if we have blit functionality. false otherwise.
68 static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable
*draw
)
70 return draw
->ext
->image
->base
.version
>= 9 &&
71 draw
->ext
->image
->blitImage
!= NULL
;
75 * Get and lock (for use with the current thread) a dri context associated
76 * with the drawable's dri screen. The context is intended to be used with
77 * the dri image extension's blitImage method.
79 * \param draw[in] Pointer to the drawable whose dri screen we want a
81 * \return A dri context or NULL if context creation failed.
83 * When the caller is done with the context (even if the context returned was
84 * NULL), the caller must call loader_dri3_blit_context_put.
87 loader_dri3_blit_context_get(struct loader_dri3_drawable
*draw
)
89 mtx_lock(&blit_context
.mtx
);
91 if (blit_context
.ctx
&& blit_context
.cur_screen
!= draw
->dri_screen
) {
92 blit_context
.core
->destroyContext(blit_context
.ctx
);
93 blit_context
.ctx
= NULL
;
96 if (!blit_context
.ctx
) {
97 blit_context
.ctx
= draw
->ext
->core
->createNewContext(draw
->dri_screen
,
99 blit_context
.cur_screen
= draw
->dri_screen
;
100 blit_context
.core
= draw
->ext
->core
;
103 return blit_context
.ctx
;
107 * Release (for use with other threads) a dri context previously obtained using
108 * loader_dri3_blit_context_get.
111 loader_dri3_blit_context_put(void)
113 mtx_unlock(&blit_context
.mtx
);
117 * Blit (parts of) the contents of a DRI image to another dri image
119 * \param draw[in] The drawable which owns the images.
120 * \param dst[in] The destination image.
121 * \param src[in] The source image.
122 * \param dstx0[in] Start destination coordinate.
123 * \param dsty0[in] Start destination coordinate.
124 * \param width[in] Blit width.
125 * \param height[in] Blit height.
126 * \param srcx0[in] Start source coordinate.
127 * \param srcy0[in] Start source coordinate.
128 * \param flush_flag[in] Image blit flush flag.
129 * \return true iff successful.
132 loader_dri3_blit_image(struct loader_dri3_drawable
*draw
,
133 __DRIimage
*dst
, __DRIimage
*src
,
134 int dstx0
, int dsty0
, int width
, int height
,
135 int srcx0
, int srcy0
, int flush_flag
)
137 __DRIcontext
*dri_context
;
138 bool use_blit_context
= false;
140 if (!loader_dri3_have_image_blit(draw
))
143 dri_context
= draw
->vtable
->get_dri_context(draw
);
145 if (!dri_context
|| !draw
->vtable
->in_current_context(draw
)) {
146 dri_context
= loader_dri3_blit_context_get(draw
);
147 use_blit_context
= true;
148 flush_flag
|= __BLIT_FLAG_FLUSH
;
152 draw
->ext
->image
->blitImage(dri_context
, dst
, src
, dstx0
, dsty0
,
153 width
, height
, srcx0
, srcy0
,
154 width
, height
, flush_flag
);
156 if (use_blit_context
)
157 loader_dri3_blit_context_put();
159 return dri_context
!= NULL
;
163 dri3_fence_reset(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
165 xshmfence_reset(buffer
->shm_fence
);
169 dri3_fence_set(struct loader_dri3_buffer
*buffer
)
171 xshmfence_trigger(buffer
->shm_fence
);
175 dri3_fence_trigger(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
177 xcb_sync_trigger_fence(c
, buffer
->sync_fence
);
181 dri3_fence_await(xcb_connection_t
*c
, struct loader_dri3_buffer
*buffer
)
184 xshmfence_await(buffer
->shm_fence
);
188 dri3_update_num_back(struct loader_dri3_drawable
*draw
)
197 loader_dri3_set_swap_interval(struct loader_dri3_drawable
*draw
, int interval
)
199 draw
->swap_interval
= interval
;
200 dri3_update_num_back(draw
);
203 /** dri3_free_render_buffer
205 * Free everything associated with one render buffer including pixmap, fence
206 * stuff and the driver image
209 dri3_free_render_buffer(struct loader_dri3_drawable
*draw
,
210 struct loader_dri3_buffer
*buffer
)
212 if (buffer
->own_pixmap
)
213 xcb_free_pixmap(draw
->conn
, buffer
->pixmap
);
214 xcb_sync_destroy_fence(draw
->conn
, buffer
->sync_fence
);
215 xshmfence_unmap_shm(buffer
->shm_fence
);
216 draw
->ext
->image
->destroyImage(buffer
->image
);
217 if (buffer
->linear_buffer
)
218 draw
->ext
->image
->destroyImage(buffer
->linear_buffer
);
223 loader_dri3_drawable_fini(struct loader_dri3_drawable
*draw
)
227 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
229 for (i
= 0; i
< LOADER_DRI3_NUM_BUFFERS
; i
++) {
230 if (draw
->buffers
[i
])
231 dri3_free_render_buffer(draw
, draw
->buffers
[i
]);
234 if (draw
->special_event
) {
235 xcb_void_cookie_t cookie
=
236 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
237 XCB_PRESENT_EVENT_MASK_NO_EVENT
);
239 xcb_discard_reply(draw
->conn
, cookie
.sequence
);
240 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
245 loader_dri3_drawable_init(xcb_connection_t
*conn
,
246 xcb_drawable_t drawable
,
247 __DRIscreen
*dri_screen
,
248 bool is_different_gpu
,
249 const __DRIconfig
*dri_config
,
250 struct loader_dri3_extensions
*ext
,
251 const struct loader_dri3_vtable
*vtable
,
252 struct loader_dri3_drawable
*draw
)
254 xcb_get_geometry_cookie_t cookie
;
255 xcb_get_geometry_reply_t
*reply
;
256 xcb_generic_error_t
*error
;
257 GLint vblank_mode
= DRI_CONF_VBLANK_DEF_INTERVAL_1
;
262 draw
->vtable
= vtable
;
263 draw
->drawable
= drawable
;
264 draw
->dri_screen
= dri_screen
;
265 draw
->is_different_gpu
= is_different_gpu
;
268 draw
->have_fake_front
= 0;
269 draw
->first_init
= true;
271 draw
->cur_blit_source
= -1;
273 if (draw
->ext
->config
)
274 draw
->ext
->config
->configQueryi(draw
->dri_screen
,
275 "vblank_mode", &vblank_mode
);
277 switch (vblank_mode
) {
278 case DRI_CONF_VBLANK_NEVER
:
279 case DRI_CONF_VBLANK_DEF_INTERVAL_0
:
282 case DRI_CONF_VBLANK_DEF_INTERVAL_1
:
283 case DRI_CONF_VBLANK_ALWAYS_SYNC
:
288 draw
->swap_interval
= swap_interval
;
290 dri3_update_num_back(draw
);
292 /* Create a new drawable */
294 draw
->ext
->image_driver
->createNewDrawable(dri_screen
,
298 if (!draw
->dri_drawable
)
301 cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
302 reply
= xcb_get_geometry_reply(draw
->conn
, cookie
, &error
);
303 if (reply
== NULL
|| error
!= NULL
) {
304 draw
->ext
->core
->destroyDrawable(draw
->dri_drawable
);
308 draw
->width
= reply
->width
;
309 draw
->height
= reply
->height
;
310 draw
->depth
= reply
->depth
;
311 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
315 * Make sure server has the same swap interval we do for the new
318 loader_dri3_set_swap_interval(draw
, swap_interval
);
324 * Process one Present event
327 dri3_handle_present_event(struct loader_dri3_drawable
*draw
,
328 xcb_present_generic_event_t
*ge
)
330 switch (ge
->evtype
) {
331 case XCB_PRESENT_CONFIGURE_NOTIFY
: {
332 xcb_present_configure_notify_event_t
*ce
= (void *) ge
;
334 draw
->width
= ce
->width
;
335 draw
->height
= ce
->height
;
336 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
339 case XCB_PRESENT_COMPLETE_NOTIFY
: {
340 xcb_present_complete_notify_event_t
*ce
= (void *) ge
;
342 /* Compute the processed SBC number from the received 32-bit serial number
343 * merged with the upper 32-bits of the sent 64-bit serial number while
346 if (ce
->kind
== XCB_PRESENT_COMPLETE_KIND_PIXMAP
) {
347 draw
->recv_sbc
= (draw
->send_sbc
& 0xffffffff00000000LL
) | ce
->serial
;
348 if (draw
->recv_sbc
> draw
->send_sbc
)
349 draw
->recv_sbc
-= 0x100000000;
351 case XCB_PRESENT_COMPLETE_MODE_FLIP
:
352 draw
->flipping
= true;
354 case XCB_PRESENT_COMPLETE_MODE_COPY
:
355 draw
->flipping
= false;
358 dri3_update_num_back(draw
);
360 if (draw
->vtable
->show_fps
)
361 draw
->vtable
->show_fps(draw
, ce
->ust
);
366 draw
->recv_msc_serial
= ce
->serial
;
367 draw
->notify_ust
= ce
->ust
;
368 draw
->notify_msc
= ce
->msc
;
372 case XCB_PRESENT_EVENT_IDLE_NOTIFY
: {
373 xcb_present_idle_notify_event_t
*ie
= (void *) ge
;
376 for (b
= 0; b
< sizeof(draw
->buffers
) / sizeof(draw
->buffers
[0]); b
++) {
377 struct loader_dri3_buffer
*buf
= draw
->buffers
[b
];
379 if (buf
&& buf
->pixmap
== ie
->pixmap
) {
381 if (draw
->num_back
<= b
&& b
< LOADER_DRI3_MAX_BACK
) {
382 dri3_free_render_buffer(draw
, buf
);
383 draw
->buffers
[b
] = NULL
;
395 dri3_wait_for_event(struct loader_dri3_drawable
*draw
)
397 xcb_generic_event_t
*ev
;
398 xcb_present_generic_event_t
*ge
;
400 xcb_flush(draw
->conn
);
401 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
405 dri3_handle_present_event(draw
, ge
);
409 /** loader_dri3_wait_for_msc
411 * Get the X server to send an event when the target msc/divisor/remainder is
415 loader_dri3_wait_for_msc(struct loader_dri3_drawable
*draw
,
417 int64_t divisor
, int64_t remainder
,
418 int64_t *ust
, int64_t *msc
, int64_t *sbc
)
422 msc_serial
= ++draw
->send_msc_serial
;
423 xcb_present_notify_msc(draw
->conn
,
430 xcb_flush(draw
->conn
);
432 /* Wait for the event */
433 if (draw
->special_event
) {
434 while ((int32_t) (msc_serial
- draw
->recv_msc_serial
) > 0) {
435 if (!dri3_wait_for_event(draw
))
440 *ust
= draw
->notify_ust
;
441 *msc
= draw
->notify_msc
;
442 *sbc
= draw
->recv_sbc
;
447 /** loader_dri3_wait_for_sbc
449 * Wait for the completed swap buffer count to reach the specified
450 * target. Presumably the application knows that this will be reached with
451 * outstanding complete events, or we're going to be here awhile.
454 loader_dri3_wait_for_sbc(struct loader_dri3_drawable
*draw
,
455 int64_t target_sbc
, int64_t *ust
,
456 int64_t *msc
, int64_t *sbc
)
458 /* From the GLX_OML_sync_control spec:
460 * "If <target_sbc> = 0, the function will block until all previous
461 * swaps requested with glXSwapBuffersMscOML for that window have
465 target_sbc
= draw
->send_sbc
;
467 while (draw
->recv_sbc
< target_sbc
) {
468 if (!dri3_wait_for_event(draw
))
474 *sbc
= draw
->recv_sbc
;
478 /** loader_dri3_find_back
480 * Find an idle back buffer. If there isn't one, then
481 * wait for a present idle notify event from the X server
484 dri3_find_back(struct loader_dri3_drawable
*draw
)
487 xcb_generic_event_t
*ev
;
488 xcb_present_generic_event_t
*ge
;
489 int num_to_consider
= draw
->num_back
;
491 /* Increase the likelyhood of reusing current buffer */
492 dri3_flush_present_events(draw
);
494 /* Check whether we need to reuse the current back buffer as new back.
495 * In that case, wait until it's not busy anymore.
497 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1) {
499 draw
->cur_blit_source
= -1;
503 for (b
= 0; b
< num_to_consider
; b
++) {
504 int id
= LOADER_DRI3_BACK_ID((b
+ draw
->cur_back
) % draw
->num_back
);
505 struct loader_dri3_buffer
*buffer
= draw
->buffers
[id
];
507 if (!buffer
|| !buffer
->busy
) {
512 xcb_flush(draw
->conn
);
513 ev
= xcb_wait_for_special_event(draw
->conn
, draw
->special_event
);
517 dri3_handle_present_event(draw
, ge
);
521 static xcb_gcontext_t
522 dri3_drawable_gc(struct loader_dri3_drawable
*draw
)
526 xcb_create_gc(draw
->conn
,
527 (draw
->gc
= xcb_generate_id(draw
->conn
)),
529 XCB_GC_GRAPHICS_EXPOSURES
,
536 static struct loader_dri3_buffer
*
537 dri3_back_buffer(struct loader_dri3_drawable
*draw
)
539 return draw
->buffers
[LOADER_DRI3_BACK_ID(draw
->cur_back
)];
542 static struct loader_dri3_buffer
*
543 dri3_fake_front_buffer(struct loader_dri3_drawable
*draw
)
545 return draw
->buffers
[LOADER_DRI3_FRONT_ID
];
549 dri3_copy_area(xcb_connection_t
*c
,
550 xcb_drawable_t src_drawable
,
551 xcb_drawable_t dst_drawable
,
560 xcb_void_cookie_t cookie
;
562 cookie
= xcb_copy_area_checked(c
,
572 xcb_discard_reply(c
, cookie
.sequence
);
576 * Asks the driver to flush any queued work necessary for serializing with the
577 * X command stream, and optionally the slightly more strict requirement of
578 * glFlush() equivalence (which would require flushing even if nothing had
579 * been drawn to a window system framebuffer, for example).
582 loader_dri3_flush(struct loader_dri3_drawable
*draw
,
584 enum __DRI2throttleReason throttle_reason
)
586 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
587 __DRIcontext
*dri_context
= draw
->vtable
->get_dri_context(draw
);
590 draw
->ext
->flush
->flush_with_flags(dri_context
, draw
->dri_drawable
,
591 flags
, throttle_reason
);
596 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable
*draw
,
598 int width
, int height
,
601 struct loader_dri3_buffer
*back
;
602 unsigned flags
= __DRI2_FLUSH_DRAWABLE
;
604 /* Check we have the right attachments */
605 if (!draw
->have_back
|| draw
->is_pixmap
)
609 flags
|= __DRI2_FLUSH_CONTEXT
;
610 loader_dri3_flush(draw
, flags
, __DRI2_THROTTLE_SWAPBUFFER
);
612 back
= dri3_back_buffer(draw
);
613 y
= draw
->height
- y
- height
;
615 if (draw
->is_different_gpu
) {
616 /* Update the linear buffer part of the back buffer
617 * for the dri3_copy_area operation
619 (void) loader_dri3_blit_image(draw
,
622 0, 0, back
->width
, back
->height
,
623 0, 0, __BLIT_FLAG_FLUSH
);
624 /* We use blit_image to update our fake front,
626 if (draw
->have_fake_front
)
627 (void) loader_dri3_blit_image(draw
,
628 dri3_fake_front_buffer(draw
)->image
,
631 x
, y
, __BLIT_FLAG_FLUSH
);
634 loader_dri3_swapbuffer_barrier(draw
);
635 dri3_fence_reset(draw
->conn
, back
);
636 dri3_copy_area(draw
->conn
,
637 dri3_back_buffer(draw
)->pixmap
,
639 dri3_drawable_gc(draw
),
640 x
, y
, x
, y
, width
, height
);
641 dri3_fence_trigger(draw
->conn
, back
);
642 /* Refresh the fake front (if present) after we just damaged the real
645 if (draw
->have_fake_front
&& !draw
->is_different_gpu
) {
646 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
647 dri3_copy_area(draw
->conn
,
648 dri3_back_buffer(draw
)->pixmap
,
649 dri3_fake_front_buffer(draw
)->pixmap
,
650 dri3_drawable_gc(draw
),
651 x
, y
, x
, y
, width
, height
);
652 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
653 dri3_fence_await(draw
->conn
, dri3_fake_front_buffer(draw
));
655 dri3_fence_await(draw
->conn
, back
);
659 loader_dri3_copy_drawable(struct loader_dri3_drawable
*draw
,
663 loader_dri3_flush(draw
, __DRI2_FLUSH_DRAWABLE
, 0);
665 dri3_fence_reset(draw
->conn
, dri3_fake_front_buffer(draw
));
666 dri3_copy_area(draw
->conn
,
668 dri3_drawable_gc(draw
),
669 0, 0, 0, 0, draw
->width
, draw
->height
);
670 dri3_fence_trigger(draw
->conn
, dri3_fake_front_buffer(draw
));
671 dri3_fence_await(draw
->conn
, dri3_fake_front_buffer(draw
));
675 loader_dri3_wait_x(struct loader_dri3_drawable
*draw
)
677 struct loader_dri3_buffer
*front
;
679 if (draw
== NULL
|| !draw
->have_fake_front
)
682 front
= dri3_fake_front_buffer(draw
);
684 loader_dri3_copy_drawable(draw
, front
->pixmap
, draw
->drawable
);
686 /* In the psc->is_different_gpu case, the linear buffer has been updated,
687 * but not yet the tiled buffer.
688 * Copy back to the tiled buffer we use for rendering.
689 * Note that we don't need flushing.
691 if (draw
->is_different_gpu
)
692 (void) loader_dri3_blit_image(draw
,
694 front
->linear_buffer
,
695 0, 0, front
->width
, front
->height
,
700 loader_dri3_wait_gl(struct loader_dri3_drawable
*draw
)
702 struct loader_dri3_buffer
*front
;
704 if (draw
== NULL
|| !draw
->have_fake_front
)
707 front
= dri3_fake_front_buffer(draw
);
709 /* In the psc->is_different_gpu case, we update the linear_buffer
710 * before updating the real front.
712 if (draw
->is_different_gpu
)
713 (void) loader_dri3_blit_image(draw
,
714 front
->linear_buffer
,
716 0, 0, front
->width
, front
->height
,
717 0, 0, __BLIT_FLAG_FLUSH
);
718 loader_dri3_swapbuffer_barrier(draw
);
719 loader_dri3_copy_drawable(draw
, draw
->drawable
, front
->pixmap
);
722 /** dri3_flush_present_events
724 * Process any present events that have been received from the X server
727 dri3_flush_present_events(struct loader_dri3_drawable
*draw
)
729 /* Check to see if any configuration changes have occurred
730 * since we were last invoked
732 if (draw
->special_event
) {
733 xcb_generic_event_t
*ev
;
735 while ((ev
= xcb_poll_for_special_event(draw
->conn
,
736 draw
->special_event
)) != NULL
) {
737 xcb_present_generic_event_t
*ge
= (void *) ev
;
738 dri3_handle_present_event(draw
, ge
);
743 /** loader_dri3_swap_buffers_msc
745 * Make the current back buffer visible using the present extension
748 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable
*draw
,
749 int64_t target_msc
, int64_t divisor
,
750 int64_t remainder
, unsigned flush_flags
,
753 struct loader_dri3_buffer
*back
;
755 uint32_t options
= XCB_PRESENT_OPTION_NONE
;
757 draw
->vtable
->flush_drawable(draw
, flush_flags
);
759 back
= draw
->buffers
[dri3_find_back(draw
)];
760 if (draw
->is_different_gpu
&& back
) {
761 /* Update the linear buffer before presenting the pixmap */
762 (void) loader_dri3_blit_image(draw
,
765 0, 0, back
->width
, back
->height
,
766 0, 0, __BLIT_FLAG_FLUSH
);
767 /* Update the fake front */
768 if (draw
->have_fake_front
)
769 (void) loader_dri3_blit_image(draw
,
770 draw
->buffers
[LOADER_DRI3_FRONT_ID
]->image
,
772 0, 0, draw
->width
, draw
->height
,
773 0, 0, __BLIT_FLAG_FLUSH
);
776 /* If we need to preload the new back buffer, remember the source.
777 * The force_copy parameter is used by EGL to attempt to preserve
778 * the back buffer across a call to this function.
781 draw
->cur_blit_source
= LOADER_DRI3_BACK_ID(draw
->cur_back
);
783 dri3_flush_present_events(draw
);
785 if (back
&& !draw
->is_pixmap
) {
786 dri3_fence_reset(draw
->conn
, back
);
788 /* Compute when we want the frame shown by taking the last known
789 * successful MSC and adding in a swap interval for each outstanding swap
790 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
794 if (target_msc
== 0 && divisor
== 0 && remainder
== 0)
795 target_msc
= draw
->msc
+ draw
->swap_interval
*
796 (draw
->send_sbc
- draw
->recv_sbc
);
797 else if (divisor
== 0 && remainder
> 0) {
798 /* From the GLX_OML_sync_control spec:
799 * "If <divisor> = 0, the swap will occur when MSC becomes
800 * greater than or equal to <target_msc>."
802 * Note that there's no mention of the remainder. The Present
803 * extension throws BadValue for remainder != 0 with divisor == 0, so
804 * just drop the passed in value.
809 /* From the GLX_EXT_swap_control spec
810 * and the EGL 1.4 spec (page 53):
812 * "If <interval> is set to a value of 0, buffer swaps are not
813 * synchronized to a video frame."
815 * Implementation note: It is possible to enable triple buffering
816 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
819 if (draw
->swap_interval
== 0)
820 options
|= XCB_PRESENT_OPTION_ASYNC
;
822 /* If we need to populate the new back, but need to reuse the back
823 * buffer slot due to lack of local blit capabilities, make sure
824 * the server doesn't flip and we deadlock.
826 if (!loader_dri3_have_image_blit(draw
) && draw
->cur_blit_source
!= -1)
827 options
|= XCB_PRESENT_OPTION_COPY
;
830 back
->last_swap
= draw
->send_sbc
;
831 xcb_present_pixmap(draw
->conn
,
834 (uint32_t) draw
->send_sbc
,
839 None
, /* target_crtc */
846 ret
= (int64_t) draw
->send_sbc
;
848 /* If there's a fake front, then copy the source back buffer
849 * to the fake front to keep it up to date. This needs
850 * to reset the fence and make future users block until
851 * the X server is done copying the bits
853 if (draw
->have_fake_front
&& !draw
->is_different_gpu
) {
854 dri3_fence_reset(draw
->conn
, draw
->buffers
[LOADER_DRI3_FRONT_ID
]);
855 dri3_copy_area(draw
->conn
,
857 draw
->buffers
[LOADER_DRI3_FRONT_ID
]->pixmap
,
858 dri3_drawable_gc(draw
),
860 draw
->width
, draw
->height
);
861 dri3_fence_trigger(draw
->conn
, draw
->buffers
[LOADER_DRI3_FRONT_ID
]);
863 xcb_flush(draw
->conn
);
868 draw
->ext
->flush
->invalidate(draw
->dri_drawable
);
874 loader_dri3_query_buffer_age(struct loader_dri3_drawable
*draw
)
876 int back_id
= LOADER_DRI3_BACK_ID(dri3_find_back(draw
));
878 if (back_id
< 0 || !draw
->buffers
[back_id
])
881 if (draw
->buffers
[back_id
]->last_swap
!= 0)
882 return draw
->send_sbc
- draw
->buffers
[back_id
]->last_swap
+ 1;
889 * Wrapper around xcb_dri3_open
892 loader_dri3_open(xcb_connection_t
*conn
,
896 xcb_dri3_open_cookie_t cookie
;
897 xcb_dri3_open_reply_t
*reply
;
900 cookie
= xcb_dri3_open(conn
,
904 reply
= xcb_dri3_open_reply(conn
, cookie
, NULL
);
908 if (reply
->nfd
!= 1) {
913 fd
= xcb_dri3_open_reply_fds(conn
, reply
)[0];
915 fcntl(fd
, F_SETFD
, fcntl(fd
, F_GETFD
) | FD_CLOEXEC
);
921 dri3_cpp_for_format(uint32_t format
) {
923 case __DRI_IMAGE_FORMAT_R8
:
925 case __DRI_IMAGE_FORMAT_RGB565
:
926 case __DRI_IMAGE_FORMAT_GR88
:
928 case __DRI_IMAGE_FORMAT_XRGB8888
:
929 case __DRI_IMAGE_FORMAT_ARGB8888
:
930 case __DRI_IMAGE_FORMAT_ABGR8888
:
931 case __DRI_IMAGE_FORMAT_XBGR8888
:
932 case __DRI_IMAGE_FORMAT_XRGB2101010
:
933 case __DRI_IMAGE_FORMAT_ARGB2101010
:
934 case __DRI_IMAGE_FORMAT_SARGB8
:
936 case __DRI_IMAGE_FORMAT_NONE
:
942 /** loader_dri3_alloc_render_buffer
944 * Use the driver createImage function to construct a __DRIimage, then
945 * get a file descriptor for that and create an X pixmap from that
947 * Allocate an xshmfence for synchronization
949 static struct loader_dri3_buffer
*
950 dri3_alloc_render_buffer(struct loader_dri3_drawable
*draw
, unsigned int format
,
951 int width
, int height
, int depth
)
953 struct loader_dri3_buffer
*buffer
;
954 __DRIimage
*pixmap_buffer
;
956 xcb_sync_fence_t sync_fence
;
957 struct xshmfence
*shm_fence
;
958 int buffer_fd
, fence_fd
;
961 /* Create an xshmfence object and
962 * prepare to send that to the X server
965 fence_fd
= xshmfence_alloc_shm();
969 shm_fence
= xshmfence_map_shm(fence_fd
);
970 if (shm_fence
== NULL
)
973 /* Allocate the image from the driver
975 buffer
= calloc(1, sizeof *buffer
);
979 buffer
->cpp
= dri3_cpp_for_format(format
);
983 if (!draw
->is_different_gpu
) {
984 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
987 __DRI_IMAGE_USE_SHARE
|
988 __DRI_IMAGE_USE_SCANOUT
|
989 __DRI_IMAGE_USE_BACKBUFFER
,
991 pixmap_buffer
= buffer
->image
;
996 buffer
->image
= draw
->ext
->image
->createImage(draw
->dri_screen
,
1005 buffer
->linear_buffer
=
1006 draw
->ext
->image
->createImage(draw
->dri_screen
,
1007 width
, height
, format
,
1008 __DRI_IMAGE_USE_SHARE
|
1009 __DRI_IMAGE_USE_LINEAR
|
1010 __DRI_IMAGE_USE_BACKBUFFER
,
1012 pixmap_buffer
= buffer
->linear_buffer
;
1014 if (!buffer
->linear_buffer
)
1015 goto no_linear_buffer
;
1018 /* X wants the stride, so ask the image for it
1020 if (!draw
->ext
->image
->queryImage(pixmap_buffer
, __DRI_IMAGE_ATTRIB_STRIDE
,
1022 goto no_buffer_attrib
;
1024 buffer
->pitch
= stride
;
1026 if (!draw
->ext
->image
->queryImage(pixmap_buffer
, __DRI_IMAGE_ATTRIB_FD
,
1028 goto no_buffer_attrib
;
1030 xcb_dri3_pixmap_from_buffer(draw
->conn
,
1031 (pixmap
= xcb_generate_id(draw
->conn
)),
1034 width
, height
, buffer
->pitch
,
1035 depth
, buffer
->cpp
* 8,
1038 xcb_dri3_fence_from_fd(draw
->conn
,
1040 (sync_fence
= xcb_generate_id(draw
->conn
)),
1044 buffer
->pixmap
= pixmap
;
1045 buffer
->own_pixmap
= true;
1046 buffer
->sync_fence
= sync_fence
;
1047 buffer
->shm_fence
= shm_fence
;
1048 buffer
->width
= width
;
1049 buffer
->height
= height
;
1051 /* Mark the buffer as idle
1053 dri3_fence_set(buffer
);
1058 draw
->ext
->image
->destroyImage(pixmap_buffer
);
1060 if (draw
->is_different_gpu
)
1061 draw
->ext
->image
->destroyImage(buffer
->image
);
1065 xshmfence_unmap_shm(shm_fence
);
1071 /** loader_dri3_update_drawable
1073 * Called the first time we use the drawable and then
1074 * after we receive present configure notify events to
1075 * track the geometry of the drawable
1078 dri3_update_drawable(__DRIdrawable
*driDrawable
,
1079 struct loader_dri3_drawable
*draw
)
1081 if (draw
->first_init
) {
1082 xcb_get_geometry_cookie_t geom_cookie
;
1083 xcb_get_geometry_reply_t
*geom_reply
;
1084 xcb_void_cookie_t cookie
;
1085 xcb_generic_error_t
*error
;
1086 xcb_present_query_capabilities_cookie_t present_capabilities_cookie
;
1087 xcb_present_query_capabilities_reply_t
*present_capabilities_reply
;
1089 draw
->first_init
= false;
1091 /* Try to select for input on the window.
1093 * If the drawable is a window, this will get our events
1096 * Otherwise, we'll get a BadWindow error back from this request which
1097 * will let us know that the drawable is a pixmap instead.
1100 draw
->eid
= xcb_generate_id(draw
->conn
);
1102 xcb_present_select_input_checked(draw
->conn
, draw
->eid
, draw
->drawable
,
1103 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY
|
1104 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY
|
1105 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY
);
1107 present_capabilities_cookie
=
1108 xcb_present_query_capabilities(draw
->conn
, draw
->drawable
);
1110 /* Create an XCB event queue to hold present events outside of the usual
1111 * application event queue
1113 draw
->special_event
= xcb_register_for_special_xge(draw
->conn
,
1117 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1119 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1124 draw
->width
= geom_reply
->width
;
1125 draw
->height
= geom_reply
->height
;
1126 draw
->depth
= geom_reply
->depth
;
1127 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1131 draw
->is_pixmap
= false;
1133 /* Check to see if our select input call failed. If it failed with a
1134 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1135 * special event queue created above and mark the drawable as a pixmap
1138 error
= xcb_request_check(draw
->conn
, cookie
);
1140 present_capabilities_reply
=
1141 xcb_present_query_capabilities_reply(draw
->conn
,
1142 present_capabilities_cookie
,
1145 if (present_capabilities_reply
) {
1146 draw
->present_capabilities
= present_capabilities_reply
->capabilities
;
1147 free(present_capabilities_reply
);
1149 draw
->present_capabilities
= 0;
1152 if (error
->error_code
!= BadWindow
) {
1156 draw
->is_pixmap
= true;
1157 xcb_unregister_for_special_event(draw
->conn
, draw
->special_event
);
1158 draw
->special_event
= NULL
;
1161 dri3_flush_present_events(draw
);
1165 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1166 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1167 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1168 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1171 image_format_to_fourcc(int format
)
1174 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1176 case __DRI_IMAGE_FORMAT_SARGB8
: return __DRI_IMAGE_FOURCC_SARGB8888
;
1177 case __DRI_IMAGE_FORMAT_RGB565
: return __DRI_IMAGE_FOURCC_RGB565
;
1178 case __DRI_IMAGE_FORMAT_XRGB8888
: return __DRI_IMAGE_FOURCC_XRGB8888
;
1179 case __DRI_IMAGE_FORMAT_ARGB8888
: return __DRI_IMAGE_FOURCC_ARGB8888
;
1180 case __DRI_IMAGE_FORMAT_ABGR8888
: return __DRI_IMAGE_FOURCC_ABGR8888
;
1181 case __DRI_IMAGE_FORMAT_XBGR8888
: return __DRI_IMAGE_FOURCC_XBGR8888
;
1187 loader_dri3_create_image(xcb_connection_t
*c
,
1188 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
,
1189 unsigned int format
,
1190 __DRIscreen
*dri_screen
,
1191 const __DRIimageExtension
*image
,
1192 void *loaderPrivate
)
1195 __DRIimage
*image_planar
, *ret
;
1198 /* Get an FD for the pixmap object
1200 fds
= xcb_dri3_buffer_from_pixmap_reply_fds(c
, bp_reply
);
1202 stride
= bp_reply
->stride
;
1205 /* createImageFromFds creates a wrapper __DRIimage structure which
1206 * can deal with multiple planes for things like Yuv images. So, once
1207 * we've gotten the planar wrapper, pull the single plane out of it and
1208 * discard the wrapper.
1210 image_planar
= image
->createImageFromFds(dri_screen
,
1213 image_format_to_fourcc(format
),
1215 &stride
, &offset
, loaderPrivate
);
1220 ret
= image
->fromPlanar(image_planar
, 0, loaderPrivate
);
1222 image
->destroyImage(image_planar
);
1227 /** dri3_get_pixmap_buffer
1229 * Get the DRM object for a pixmap from the X server and
1230 * wrap that with a __DRIimage structure using createImageFromFds
1232 static struct loader_dri3_buffer
*
1233 dri3_get_pixmap_buffer(__DRIdrawable
*driDrawable
, unsigned int format
,
1234 enum loader_dri3_buffer_type buffer_type
,
1235 struct loader_dri3_drawable
*draw
)
1237 int buf_id
= loader_dri3_pixmap_buf_id(buffer_type
);
1238 struct loader_dri3_buffer
*buffer
= draw
->buffers
[buf_id
];
1239 xcb_drawable_t pixmap
;
1240 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie
;
1241 xcb_dri3_buffer_from_pixmap_reply_t
*bp_reply
;
1242 xcb_sync_fence_t sync_fence
;
1243 struct xshmfence
*shm_fence
;
1249 pixmap
= draw
->drawable
;
1251 buffer
= calloc(1, sizeof *buffer
);
1255 fence_fd
= xshmfence_alloc_shm();
1258 shm_fence
= xshmfence_map_shm(fence_fd
);
1259 if (shm_fence
== NULL
) {
1264 xcb_dri3_fence_from_fd(draw
->conn
,
1266 (sync_fence
= xcb_generate_id(draw
->conn
)),
1270 bp_cookie
= xcb_dri3_buffer_from_pixmap(draw
->conn
, pixmap
);
1271 bp_reply
= xcb_dri3_buffer_from_pixmap_reply(draw
->conn
, bp_cookie
, NULL
);
1275 buffer
->image
= loader_dri3_create_image(draw
->conn
, bp_reply
, format
,
1276 draw
->dri_screen
, draw
->ext
->image
,
1281 buffer
->pixmap
= pixmap
;
1282 buffer
->own_pixmap
= false;
1283 buffer
->width
= bp_reply
->width
;
1284 buffer
->height
= bp_reply
->height
;
1285 buffer
->buffer_type
= buffer_type
;
1286 buffer
->shm_fence
= shm_fence
;
1287 buffer
->sync_fence
= sync_fence
;
1289 draw
->buffers
[buf_id
] = buffer
;
1297 xcb_sync_destroy_fence(draw
->conn
, sync_fence
);
1298 xshmfence_unmap_shm(shm_fence
);
1307 * Find a front or back buffer, allocating new ones as necessary
1309 static struct loader_dri3_buffer
*
1310 dri3_get_buffer(__DRIdrawable
*driDrawable
,
1311 unsigned int format
,
1312 enum loader_dri3_buffer_type buffer_type
,
1313 struct loader_dri3_drawable
*draw
)
1315 struct loader_dri3_buffer
*buffer
;
1318 if (buffer_type
== loader_dri3_buffer_back
) {
1319 buf_id
= dri3_find_back(draw
);
1324 buf_id
= LOADER_DRI3_FRONT_ID
;
1327 buffer
= draw
->buffers
[buf_id
];
1329 /* Allocate a new buffer if there isn't an old one, or if that
1330 * old one is the wrong size
1332 if (!buffer
|| buffer
->width
!= draw
->width
||
1333 buffer
->height
!= draw
->height
) {
1334 struct loader_dri3_buffer
*new_buffer
;
1336 /* Allocate the new buffers
1338 new_buffer
= dri3_alloc_render_buffer(draw
,
1346 /* When resizing, copy the contents of the old buffer, waiting for that
1347 * copy to complete using our fences before proceeding
1349 switch (buffer_type
) {
1350 case loader_dri3_buffer_back
:
1352 if (!buffer
->linear_buffer
) {
1353 dri3_fence_reset(draw
->conn
, new_buffer
);
1354 dri3_fence_await(draw
->conn
, buffer
);
1355 dri3_copy_area(draw
->conn
,
1358 dri3_drawable_gc(draw
),
1360 draw
->width
, draw
->height
);
1361 dri3_fence_trigger(draw
->conn
, new_buffer
);
1362 } else if (draw
->vtable
->in_current_context(draw
)) {
1363 (void) loader_dri3_blit_image(draw
,
1366 0, 0, draw
->width
, draw
->height
,
1369 dri3_free_render_buffer(draw
, buffer
);
1372 case loader_dri3_buffer_front
:
1373 loader_dri3_swapbuffer_barrier(draw
);
1374 dri3_fence_reset(draw
->conn
, new_buffer
);
1375 dri3_copy_area(draw
->conn
,
1378 dri3_drawable_gc(draw
),
1380 draw
->width
, draw
->height
);
1381 dri3_fence_trigger(draw
->conn
, new_buffer
);
1383 if (new_buffer
->linear_buffer
&&
1384 draw
->vtable
->in_current_context(draw
)) {
1385 dri3_fence_await(draw
->conn
, new_buffer
);
1386 (void) loader_dri3_blit_image(draw
,
1388 new_buffer
->linear_buffer
,
1389 0, 0, draw
->width
, draw
->height
,
1394 buffer
= new_buffer
;
1395 buffer
->buffer_type
= buffer_type
;
1396 draw
->buffers
[buf_id
] = buffer
;
1398 dri3_fence_await(draw
->conn
, buffer
);
1401 * Do we need to preserve the content of a previous buffer?
1403 * Note that this blit is needed only to avoid a wait for a buffer that
1404 * is currently in the flip chain or being scanned out from. That's really
1405 * a tradeoff. If we're ok with the wait we can reduce the number of back
1406 * buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
1407 * but in the latter case we must disallow page-flipping.
1409 if (buffer_type
== loader_dri3_buffer_back
&&
1410 draw
->cur_blit_source
!= -1 &&
1411 draw
->buffers
[draw
->cur_blit_source
] &&
1412 buffer
!= draw
->buffers
[draw
->cur_blit_source
]) {
1414 struct loader_dri3_buffer
*source
= draw
->buffers
[draw
->cur_blit_source
];
1416 /* Avoid flushing here. Will propably do good for tiling hardware. */
1417 (void) loader_dri3_blit_image(draw
,
1420 0, 0, draw
->width
, draw
->height
,
1422 buffer
->last_swap
= source
->last_swap
;
1423 draw
->cur_blit_source
= -1;
1425 /* Return the requested buffer */
1429 /** dri3_free_buffers
1431 * Free the front bufffer or all of the back buffers. Used
1432 * when the application changes which buffers it needs
1435 dri3_free_buffers(__DRIdrawable
*driDrawable
,
1436 enum loader_dri3_buffer_type buffer_type
,
1437 struct loader_dri3_drawable
*draw
)
1439 struct loader_dri3_buffer
*buffer
;
1444 switch (buffer_type
) {
1445 case loader_dri3_buffer_back
:
1446 first_id
= LOADER_DRI3_BACK_ID(0);
1447 n_id
= LOADER_DRI3_MAX_BACK
;
1449 case loader_dri3_buffer_front
:
1450 first_id
= LOADER_DRI3_FRONT_ID
;
1454 for (buf_id
= first_id
; buf_id
< first_id
+ n_id
; buf_id
++) {
1455 buffer
= draw
->buffers
[buf_id
];
1457 dri3_free_render_buffer(draw
, buffer
);
1458 draw
->buffers
[buf_id
] = NULL
;
1463 /** loader_dri3_get_buffers
1465 * The published buffer allocation API.
1466 * Returns all of the necessary buffers, allocating
1470 loader_dri3_get_buffers(__DRIdrawable
*driDrawable
,
1471 unsigned int format
,
1473 void *loaderPrivate
,
1474 uint32_t buffer_mask
,
1475 struct __DRIimageList
*buffers
)
1477 struct loader_dri3_drawable
*draw
= loaderPrivate
;
1478 struct loader_dri3_buffer
*front
, *back
;
1480 buffers
->image_mask
= 0;
1481 buffers
->front
= NULL
;
1482 buffers
->back
= NULL
;
1487 if (!dri3_update_drawable(driDrawable
, draw
))
1490 /* pixmaps always have front buffers */
1491 if (draw
->is_pixmap
)
1492 buffer_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
1494 if (buffer_mask
& __DRI_IMAGE_BUFFER_FRONT
) {
1495 /* All pixmaps are owned by the server gpu.
1496 * When we use a different gpu, we can't use the pixmap
1497 * as buffer since it is potentially tiled a way
1498 * our device can't understand. In this case, use
1499 * a fake front buffer. Hopefully the pixmap
1500 * content will get synced with the fake front
1503 if (draw
->is_pixmap
&& !draw
->is_different_gpu
)
1504 front
= dri3_get_pixmap_buffer(driDrawable
,
1506 loader_dri3_buffer_front
,
1509 front
= dri3_get_buffer(driDrawable
,
1511 loader_dri3_buffer_front
,
1517 dri3_free_buffers(driDrawable
, loader_dri3_buffer_front
, draw
);
1518 draw
->have_fake_front
= 0;
1521 if (buffer_mask
& __DRI_IMAGE_BUFFER_BACK
) {
1522 back
= dri3_get_buffer(driDrawable
,
1524 loader_dri3_buffer_back
,
1528 draw
->have_back
= 1;
1530 dri3_free_buffers(driDrawable
, loader_dri3_buffer_back
, draw
);
1531 draw
->have_back
= 0;
1535 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_FRONT
;
1536 buffers
->front
= front
->image
;
1537 draw
->have_fake_front
= draw
->is_different_gpu
|| !draw
->is_pixmap
;
1541 buffers
->image_mask
|= __DRI_IMAGE_BUFFER_BACK
;
1542 buffers
->back
= back
->image
;
1545 draw
->stamp
= stamp
;
1550 /** loader_dri3_update_drawable_geometry
1552 * Get the current drawable geometry.
1555 loader_dri3_update_drawable_geometry(struct loader_dri3_drawable
*draw
)
1557 xcb_get_geometry_cookie_t geom_cookie
;
1558 xcb_get_geometry_reply_t
*geom_reply
;
1560 geom_cookie
= xcb_get_geometry(draw
->conn
, draw
->drawable
);
1562 geom_reply
= xcb_get_geometry_reply(draw
->conn
, geom_cookie
, NULL
);
1565 draw
->width
= geom_reply
->width
;
1566 draw
->height
= geom_reply
->height
;
1567 draw
->vtable
->set_drawable_size(draw
, draw
->width
, draw
->height
);
1575 * Make sure the server has flushed all pending swap buffers to hardware
1576 * for this drawable. Ideally we'd want to send an X protocol request to
1577 * have the server block our connection until the swaps are complete. That
1578 * would avoid the potential round-trip here.
1581 loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable
*draw
)
1583 int64_t ust
, msc
, sbc
;
1585 (void) loader_dri3_wait_for_sbc(draw
, 0, &ust
, &msc
, &sbc
);
1589 * Perform any cleanup associated with a close screen operation.
1590 * \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
1592 * This function destroys the screen's cached swap context if any.
1595 loader_dri3_close_screen(__DRIscreen
*dri_screen
)
1597 mtx_lock(&blit_context
.mtx
);
1598 if (blit_context
.ctx
&& blit_context
.cur_screen
== dri_screen
) {
1599 blit_context
.core
->destroyContext(blit_context
.ctx
);
1600 blit_context
.ctx
= NULL
;
1602 mtx_unlock(&blit_context
.mtx
);