86ae5aecd5fe1d4de47479d3b70d222bbdd557c4
[mesa.git] / src / loader / loader_dri3_helper.c
1 /*
2 * Copyright © 2013 Keith Packard
3 * Copyright © 2015 Boyan Ding
4 *
5 * Permission to use, copy, modify, distribute, and sell this software and its
6 * documentation for any purpose is hereby granted without fee, provided that
7 * the above copyright notice appear in all copies and that both that copyright
8 * notice and this permission notice appear in supporting documentation, and
9 * that the name of the copyright holders not be used in advertising or
10 * publicity pertaining to distribution of the software without specific,
11 * written prior permission. The copyright holders make no representations
12 * about the suitability of this software for any purpose. It is provided "as
13 * is" without express or implied warranty.
14 *
15 * THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
16 * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
17 * EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
18 * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
19 * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
20 * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
21 * OF THIS SOFTWARE.
22 */
23
24 #include <fcntl.h>
25 #include <stdlib.h>
26 #include <unistd.h>
27
28 #include <X11/xshmfence.h>
29 #include <xcb/xcb.h>
30 #include <xcb/dri3.h>
31 #include <xcb/present.h>
32
33 #include <X11/Xlib-xcb.h>
34
35 #include "loader_dri3_helper.h"
36
37 /* From xmlpool/options.h, user exposed so should be stable */
38 #define DRI_CONF_VBLANK_NEVER 0
39 #define DRI_CONF_VBLANK_DEF_INTERVAL_0 1
40 #define DRI_CONF_VBLANK_DEF_INTERVAL_1 2
41 #define DRI_CONF_VBLANK_ALWAYS_SYNC 3
42
43 static inline void
44 dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
45 {
46 xshmfence_reset(buffer->shm_fence);
47 }
48
49 static inline void
50 dri3_fence_set(struct loader_dri3_buffer *buffer)
51 {
52 xshmfence_trigger(buffer->shm_fence);
53 }
54
55 static inline void
56 dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
57 {
58 xcb_sync_trigger_fence(c, buffer->sync_fence);
59 }
60
61 static inline void
62 dri3_fence_await(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
63 {
64 xcb_flush(c);
65 xshmfence_await(buffer->shm_fence);
66 }
67
68 static void
69 dri3_update_num_back(struct loader_dri3_drawable *draw)
70 {
71 if (draw->flipping)
72 draw->num_back = 3;
73 else if (draw->vtable->get_swap_interval(draw) != 0)
74 draw->num_back = 2;
75 else
76 draw->num_back = 1;
77 }
78
79 void
80 loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
81 {
82 interval = draw->vtable->clamp_swap_interval(draw, interval);
83 draw->vtable->set_swap_interval(draw, interval);
84 dri3_update_num_back(draw);
85 }
86
87 /** dri3_free_render_buffer
88 *
89 * Free everything associated with one render buffer including pixmap, fence
90 * stuff and the driver image
91 */
92 static void
93 dri3_free_render_buffer(struct loader_dri3_drawable *draw,
94 struct loader_dri3_buffer *buffer)
95 {
96 if (buffer->own_pixmap)
97 xcb_free_pixmap(draw->conn, buffer->pixmap);
98 xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
99 xshmfence_unmap_shm(buffer->shm_fence);
100 (draw->ext->image->destroyImage)(buffer->image);
101 if (buffer->linear_buffer)
102 (draw->ext->image->destroyImage)(buffer->linear_buffer);
103 free(buffer);
104 }
105
106 void
107 loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
108 {
109 int i;
110
111 (draw->ext->core->destroyDrawable)(draw->dri_drawable);
112
113 for (i = 0; i < LOADER_DRI3_NUM_BUFFERS; i++) {
114 if (draw->buffers[i])
115 dri3_free_render_buffer(draw, draw->buffers[i]);
116 }
117
118 if (draw->special_event) {
119 xcb_void_cookie_t cookie =
120 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
121 XCB_PRESENT_EVENT_MASK_NO_EVENT);
122
123 xcb_discard_reply(draw->conn, cookie.sequence);
124 xcb_unregister_for_special_event(draw->conn, draw->special_event);
125 }
126 }
127
128 int
129 loader_dri3_drawable_init(xcb_connection_t *conn,
130 xcb_drawable_t drawable,
131 __DRIscreen *dri_screen,
132 bool is_different_gpu,
133 const __DRIconfig *dri_config,
134 struct loader_dri3_extensions *ext,
135 struct loader_dri3_vtable *vtable,
136 struct loader_dri3_drawable *draw)
137 {
138 xcb_get_geometry_cookie_t cookie;
139 xcb_get_geometry_reply_t *reply;
140 xcb_generic_error_t *error;
141 GLint vblank_mode = DRI_CONF_VBLANK_DEF_INTERVAL_1;
142 int swap_interval;
143
144 draw->conn = conn;
145 draw->ext = ext;
146 draw->vtable = vtable;
147 draw->drawable = drawable;
148 draw->dri_screen = dri_screen;
149 draw->is_different_gpu = is_different_gpu;
150
151 draw->have_back = 0;
152 draw->have_fake_front = 0;
153 draw->first_init = true;
154
155 if (draw->ext->config)
156 draw->ext->config->configQueryi(draw->dri_screen,
157 "vblank_mode", &vblank_mode);
158
159 switch (vblank_mode) {
160 case DRI_CONF_VBLANK_NEVER:
161 case DRI_CONF_VBLANK_DEF_INTERVAL_0:
162 swap_interval = 0;
163 break;
164 case DRI_CONF_VBLANK_DEF_INTERVAL_1:
165 case DRI_CONF_VBLANK_ALWAYS_SYNC:
166 default:
167 swap_interval = 1;
168 break;
169 }
170 draw->vtable->set_swap_interval(draw, swap_interval);
171
172 dri3_update_num_back(draw);
173
174 /* Create a new drawable */
175 draw->dri_drawable =
176 (draw->ext->image_driver->createNewDrawable)(dri_screen,
177 dri_config,
178 draw);
179
180 if (!draw->dri_drawable)
181 return 1;
182
183 cookie = xcb_get_geometry(draw->conn, draw->drawable);
184 reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
185 if (reply == NULL || error != NULL) {
186 draw->ext->core->destroyDrawable(draw->dri_drawable);
187 return 1;
188 }
189
190 draw->width = reply->width;
191 draw->height = reply->height;
192 draw->depth = reply->depth;
193 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
194 free(reply);
195
196 /*
197 * Make sure server has the same swap interval we do for the new
198 * drawable.
199 */
200 loader_dri3_set_swap_interval(draw, swap_interval);
201
202 return 0;
203 }
204
205 /*
206 * Process one Present event
207 */
208 static void
209 dri3_handle_present_event(struct loader_dri3_drawable *draw,
210 xcb_present_generic_event_t *ge)
211 {
212 switch (ge->evtype) {
213 case XCB_PRESENT_CONFIGURE_NOTIFY: {
214 xcb_present_configure_notify_event_t *ce = (void *) ge;
215
216 draw->width = ce->width;
217 draw->height = ce->height;
218 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
219 break;
220 }
221 case XCB_PRESENT_COMPLETE_NOTIFY: {
222 xcb_present_complete_notify_event_t *ce = (void *) ge;
223
224 /* Compute the processed SBC number from the received 32-bit serial number
225 * merged with the upper 32-bits of the sent 64-bit serial number while
226 * checking for wrap.
227 */
228 if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
229 draw->recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
230 if (draw->recv_sbc > draw->send_sbc)
231 draw->recv_sbc -= 0x100000000;
232 switch (ce->mode) {
233 case XCB_PRESENT_COMPLETE_MODE_FLIP:
234 draw->flipping = true;
235 break;
236 case XCB_PRESENT_COMPLETE_MODE_COPY:
237 draw->flipping = false;
238 break;
239 }
240 dri3_update_num_back(draw);
241
242 if (draw->vtable->show_fps)
243 draw->vtable->show_fps(draw, ce->ust);
244
245 draw->ust = ce->ust;
246 draw->msc = ce->msc;
247 } else {
248 draw->recv_msc_serial = ce->serial;
249 draw->notify_ust = ce->ust;
250 draw->notify_msc = ce->msc;
251 }
252 break;
253 }
254 case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
255 xcb_present_idle_notify_event_t *ie = (void *) ge;
256 int b;
257
258 for (b = 0; b < sizeof(draw->buffers) / sizeof(draw->buffers[0]); b++) {
259 struct loader_dri3_buffer *buf = draw->buffers[b];
260
261 if (buf && buf->pixmap == ie->pixmap) {
262 buf->busy = 0;
263 if (draw->num_back <= b && b < LOADER_DRI3_MAX_BACK) {
264 dri3_free_render_buffer(draw, buf);
265 draw->buffers[b] = NULL;
266 }
267 break;
268 }
269 }
270 break;
271 }
272 }
273 free(ge);
274 }
275
276 static bool
277 dri3_wait_for_event(struct loader_dri3_drawable *draw)
278 {
279 xcb_generic_event_t *ev;
280 xcb_present_generic_event_t *ge;
281
282 xcb_flush(draw->conn);
283 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
284 if (!ev)
285 return false;
286 ge = (void *) ev;
287 dri3_handle_present_event(draw, ge);
288 return true;
289 }
290
291 /** loader_dri3_wait_for_msc
292 *
293 * Get the X server to send an event when the target msc/divisor/remainder is
294 * reached.
295 */
296 bool
297 loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
298 int64_t target_msc,
299 int64_t divisor, int64_t remainder,
300 int64_t *ust, int64_t *msc, int64_t *sbc)
301 {
302 uint32_t msc_serial;
303
304 msc_serial = ++draw->send_msc_serial;
305 xcb_present_notify_msc(draw->conn,
306 draw->drawable,
307 msc_serial,
308 target_msc,
309 divisor,
310 remainder);
311
312 xcb_flush(draw->conn);
313
314 /* Wait for the event */
315 if (draw->special_event) {
316 while ((int32_t) (msc_serial - draw->recv_msc_serial) > 0) {
317 if (!dri3_wait_for_event(draw))
318 return false;
319 }
320 }
321
322 *ust = draw->notify_ust;
323 *msc = draw->notify_msc;
324 *sbc = draw->recv_sbc;
325
326 return true;
327 }
328
329 /** loader_dri3_wait_for_sbc
330 *
331 * Wait for the completed swap buffer count to reach the specified
332 * target. Presumably the application knows that this will be reached with
333 * outstanding complete events, or we're going to be here awhile.
334 */
335 int
336 loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
337 int64_t target_sbc, int64_t *ust,
338 int64_t *msc, int64_t *sbc)
339 {
340 /* From the GLX_OML_sync_control spec:
341 *
342 * "If <target_sbc> = 0, the function will block until all previous
343 * swaps requested with glXSwapBuffersMscOML for that window have
344 * completed."
345 */
346 if (!target_sbc)
347 target_sbc = draw->send_sbc;
348
349 while (draw->recv_sbc < target_sbc) {
350 if (!dri3_wait_for_event(draw))
351 return 0;
352 }
353
354 *ust = draw->ust;
355 *msc = draw->msc;
356 *sbc = draw->recv_sbc;
357 return 1;
358 }
359
360 /** loader_dri3_find_back
361 *
362 * Find an idle back buffer. If there isn't one, then
363 * wait for a present idle notify event from the X server
364 */
365 static int
366 dri3_find_back(struct loader_dri3_drawable *draw)
367 {
368 int b;
369 xcb_generic_event_t *ev;
370 xcb_present_generic_event_t *ge;
371
372 for (;;) {
373 for (b = 0; b < draw->num_back; b++) {
374 int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->num_back);
375 struct loader_dri3_buffer *buffer = draw->buffers[id];
376
377 if (!buffer || !buffer->busy) {
378 draw->cur_back = id;
379 return id;
380 }
381 }
382 xcb_flush(draw->conn);
383 ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
384 if (!ev)
385 return -1;
386 ge = (void *) ev;
387 dri3_handle_present_event(draw, ge);
388 }
389 }
390
391 static xcb_gcontext_t
392 dri3_drawable_gc(struct loader_dri3_drawable *draw)
393 {
394 if (!draw->gc) {
395 uint32_t v = 0;
396 xcb_create_gc(draw->conn,
397 (draw->gc = xcb_generate_id(draw->conn)),
398 draw->drawable,
399 XCB_GC_GRAPHICS_EXPOSURES,
400 &v);
401 }
402 return draw->gc;
403 }
404
405
406 static struct loader_dri3_buffer *
407 dri3_back_buffer(struct loader_dri3_drawable *draw)
408 {
409 return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
410 }
411
412 static struct loader_dri3_buffer *
413 dri3_fake_front_buffer(struct loader_dri3_drawable *draw)
414 {
415 return draw->buffers[LOADER_DRI3_FRONT_ID];
416 }
417
418 static void
419 dri3_copy_area(xcb_connection_t *c,
420 xcb_drawable_t src_drawable,
421 xcb_drawable_t dst_drawable,
422 xcb_gcontext_t gc,
423 int16_t src_x,
424 int16_t src_y,
425 int16_t dst_x,
426 int16_t dst_y,
427 uint16_t width,
428 uint16_t height)
429 {
430 xcb_void_cookie_t cookie;
431
432 cookie = xcb_copy_area_checked(c,
433 src_drawable,
434 dst_drawable,
435 gc,
436 src_x,
437 src_y,
438 dst_x,
439 dst_y,
440 width,
441 height);
442 xcb_discard_reply(c, cookie.sequence);
443 }
444
445 /**
446 * Asks the driver to flush any queued work necessary for serializing with the
447 * X command stream, and optionally the slightly more strict requirement of
448 * glFlush() equivalence (which would require flushing even if nothing had
449 * been drawn to a window system framebuffer, for example).
450 */
451 void
452 loader_dri3_flush(struct loader_dri3_drawable *draw,
453 unsigned flags,
454 enum __DRI2throttleReason throttle_reason)
455 {
456 /* NEED TO CHECK WHETHER CONTEXT IS NULL */
457 __DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
458
459 if (dri_context) {
460 draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
461 flags, throttle_reason);
462 }
463 }
464
465 void
466 loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
467 int x, int y,
468 int width, int height,
469 bool flush)
470 {
471 struct loader_dri3_buffer *back;
472 unsigned flags = __DRI2_FLUSH_DRAWABLE;
473 __DRIcontext *dri_context;
474
475 dri_context = draw->vtable->get_dri_context(draw);
476
477 /* Check we have the right attachments */
478 if (!draw->have_back || draw->is_pixmap)
479 return;
480
481 if (flush)
482 flags |= __DRI2_FLUSH_CONTEXT;
483 loader_dri3_flush(draw, flags, __DRI2_THROTTLE_SWAPBUFFER);
484
485 back = dri3_back_buffer(draw);
486 y = draw->height - y - height;
487
488 if (draw->is_different_gpu && draw->vtable->in_current_context(draw)) {
489 /* Update the linear buffer part of the back buffer
490 * for the dri3_copy_area operation
491 */
492 draw->ext->image->blitImage(dri_context,
493 back->linear_buffer,
494 back->image,
495 0, 0, back->width,
496 back->height,
497 0, 0, back->width,
498 back->height, __BLIT_FLAG_FLUSH);
499 /* We use blitImage to update our fake front,
500 */
501 if (draw->have_fake_front)
502 draw->ext->image->blitImage(dri_context,
503 dri3_fake_front_buffer(draw)->image,
504 back->image,
505 x, y, width, height,
506 x, y, width, height, __BLIT_FLAG_FLUSH);
507 }
508
509 dri3_fence_reset(draw->conn, back);
510 dri3_copy_area(draw->conn,
511 dri3_back_buffer(draw)->pixmap,
512 draw->drawable,
513 dri3_drawable_gc(draw),
514 x, y, x, y, width, height);
515 dri3_fence_trigger(draw->conn, back);
516 /* Refresh the fake front (if present) after we just damaged the real
517 * front.
518 */
519 if (draw->have_fake_front && !draw->is_different_gpu) {
520 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
521 dri3_copy_area(draw->conn,
522 dri3_back_buffer(draw)->pixmap,
523 dri3_fake_front_buffer(draw)->pixmap,
524 dri3_drawable_gc(draw),
525 x, y, x, y, width, height);
526 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
527 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
528 }
529 dri3_fence_await(draw->conn, back);
530 }
531
532 void
533 loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
534 xcb_drawable_t dest,
535 xcb_drawable_t src)
536 {
537 loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, 0);
538
539 dri3_fence_reset(draw->conn, dri3_fake_front_buffer(draw));
540 dri3_copy_area(draw->conn,
541 src, dest,
542 dri3_drawable_gc(draw),
543 0, 0, 0, 0, draw->width, draw->height);
544 dri3_fence_trigger(draw->conn, dri3_fake_front_buffer(draw));
545 dri3_fence_await(draw->conn, dri3_fake_front_buffer(draw));
546 }
547
548 void
549 loader_dri3_wait_x(struct loader_dri3_drawable *draw)
550 {
551 struct loader_dri3_buffer *front;
552 __DRIcontext *dri_context;
553
554 if (draw == NULL || !draw->have_fake_front)
555 return;
556
557 front = dri3_fake_front_buffer(draw);
558 dri_context = draw->vtable->get_dri_context(draw);
559
560 loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
561
562 /* In the psc->is_different_gpu case, the linear buffer has been updated,
563 * but not yet the tiled buffer.
564 * Copy back to the tiled buffer we use for rendering.
565 * Note that we don't need flushing.
566 */
567 if (draw->is_different_gpu && draw->vtable->in_current_context(draw))
568 draw->ext->image->blitImage(dri_context,
569 front->image,
570 front->linear_buffer,
571 0, 0, front->width,
572 front->height,
573 0, 0, front->width,
574 front->height, 0);
575 }
576
577 void
578 loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
579 {
580 struct loader_dri3_buffer *front;
581 __DRIcontext *dri_context;
582
583 if (draw == NULL || !draw->have_fake_front)
584 return;
585
586 front = dri3_fake_front_buffer(draw);
587 dri_context = draw->vtable->get_dri_context(draw);
588
589 /* In the psc->is_different_gpu case, we update the linear_buffer
590 * before updating the real front.
591 */
592 if (draw->is_different_gpu && draw->vtable->in_current_context(draw))
593 draw->ext->image->blitImage(dri_context,
594 front->linear_buffer,
595 front->image,
596 0, 0, front->width,
597 front->height,
598 0, 0, front->width,
599 front->height, __BLIT_FLAG_FLUSH);
600 loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
601 }
602
603 /** dri3_flush_present_events
604 *
605 * Process any present events that have been received from the X server
606 */
607 static void
608 dri3_flush_present_events(struct loader_dri3_drawable *draw)
609 {
610 /* Check to see if any configuration changes have occurred
611 * since we were last invoked
612 */
613 if (draw->special_event) {
614 xcb_generic_event_t *ev;
615
616 while ((ev = xcb_poll_for_special_event(draw->conn,
617 draw->special_event)) != NULL) {
618 xcb_present_generic_event_t *ge = (void *) ev;
619 dri3_handle_present_event(draw, ge);
620 }
621 }
622 }
623
624 /** loader_dri3_swap_buffers_msc
625 *
626 * Make the current back buffer visible using the present extension
627 */
628 int64_t
629 loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
630 int64_t target_msc, int64_t divisor,
631 int64_t remainder, unsigned flush_flags,
632 bool force_copy)
633 {
634 struct loader_dri3_buffer *back;
635 __DRIcontext *dri_context;
636 int64_t ret = 0;
637 uint32_t options = XCB_PRESENT_OPTION_NONE;
638 int swap_interval;
639
640 dri_context = draw->vtable->get_dri_context(draw);
641 swap_interval = draw->vtable->get_swap_interval(draw);
642
643 draw->vtable->flush_drawable(draw, flush_flags);
644
645 back = draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
646 if (draw->is_different_gpu && back) {
647 /* Update the linear buffer before presenting the pixmap */
648 draw->ext->image->blitImage(dri_context,
649 back->linear_buffer,
650 back->image,
651 0, 0, back->width,
652 back->height,
653 0, 0, back->width,
654 back->height, __BLIT_FLAG_FLUSH);
655 /* Update the fake front */
656 if (draw->have_fake_front)
657 draw->ext->image->blitImage(dri_context,
658 draw->buffers[LOADER_DRI3_FRONT_ID]->image,
659 back->image,
660 0, 0, draw->width, draw->height,
661 0, 0, draw->width, draw->height,
662 __BLIT_FLAG_FLUSH);
663 }
664
665 dri3_flush_present_events(draw);
666
667 if (back && !draw->is_pixmap) {
668 dri3_fence_reset(draw->conn, back);
669
670 /* Compute when we want the frame shown by taking the last known
671 * successful MSC and adding in a swap interval for each outstanding swap
672 * request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
673 * semantic"
674 */
675 ++draw->send_sbc;
676 if (target_msc == 0 && divisor == 0 && remainder == 0)
677 target_msc = draw->msc + swap_interval *
678 (draw->send_sbc - draw->recv_sbc);
679 else if (divisor == 0 && remainder > 0) {
680 /* From the GLX_OML_sync_control spec:
681 * "If <divisor> = 0, the swap will occur when MSC becomes
682 * greater than or equal to <target_msc>."
683 *
684 * Note that there's no mention of the remainder. The Present
685 * extension throws BadValue for remainder != 0 with divisor == 0, so
686 * just drop the passed in value.
687 */
688 remainder = 0;
689 }
690
691 /* From the GLX_EXT_swap_control spec
692 * and the EGL 1.4 spec (page 53):
693 *
694 * "If <interval> is set to a value of 0, buffer swaps are not
695 * synchronized to a video frame."
696 *
697 * Implementation note: It is possible to enable triple buffering
698 * behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
699 * the default.
700 */
701 if (swap_interval == 0)
702 options |= XCB_PRESENT_OPTION_ASYNC;
703 if (force_copy)
704 options |= XCB_PRESENT_OPTION_COPY;
705
706 back->busy = 1;
707 back->last_swap = draw->send_sbc;
708 xcb_present_pixmap(draw->conn,
709 draw->drawable,
710 back->pixmap,
711 (uint32_t) draw->send_sbc,
712 0, /* valid */
713 0, /* update */
714 0, /* x_off */
715 0, /* y_off */
716 None, /* target_crtc */
717 None,
718 back->sync_fence,
719 options,
720 target_msc,
721 divisor,
722 remainder, 0, NULL);
723 ret = (int64_t) draw->send_sbc;
724
725 /* If there's a fake front, then copy the source back buffer
726 * to the fake front to keep it up to date. This needs
727 * to reset the fence and make future users block until
728 * the X server is done copying the bits
729 */
730 if (draw->have_fake_front && !draw->is_different_gpu) {
731 dri3_fence_reset(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
732 dri3_copy_area(draw->conn,
733 back->pixmap,
734 draw->buffers[LOADER_DRI3_FRONT_ID]->pixmap,
735 dri3_drawable_gc(draw),
736 0, 0, 0, 0,
737 draw->width, draw->height);
738 dri3_fence_trigger(draw->conn, draw->buffers[LOADER_DRI3_FRONT_ID]);
739 }
740 xcb_flush(draw->conn);
741 if (draw->stamp)
742 ++(*draw->stamp);
743 }
744
745 (draw->ext->flush->invalidate)(draw->dri_drawable);
746
747 return ret;
748 }
749
750 int
751 loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
752 {
753 int back_id = LOADER_DRI3_BACK_ID(dri3_find_back(draw));
754
755 if (back_id < 0 || !draw->buffers[back_id])
756 return 0;
757
758 if (draw->buffers[back_id]->last_swap != 0)
759 return draw->send_sbc - draw->buffers[back_id]->last_swap + 1;
760 else
761 return 0;
762 }
763
764 /** loader_dri3_open
765 *
766 * Wrapper around xcb_dri3_open
767 */
768 int
769 loader_dri3_open(xcb_connection_t *conn,
770 xcb_window_t root,
771 uint32_t provider)
772 {
773 xcb_dri3_open_cookie_t cookie;
774 xcb_dri3_open_reply_t *reply;
775 int fd;
776
777 cookie = xcb_dri3_open(conn,
778 root,
779 provider);
780
781 reply = xcb_dri3_open_reply(conn, cookie, NULL);
782 if (!reply)
783 return -1;
784
785 if (reply->nfd != 1) {
786 free(reply);
787 return -1;
788 }
789
790 fd = xcb_dri3_open_reply_fds(conn, reply)[0];
791 free(reply);
792 fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
793
794 return fd;
795 }
796
797 static uint32_t
798 dri3_cpp_for_format(uint32_t format) {
799 switch (format) {
800 case __DRI_IMAGE_FORMAT_R8:
801 return 1;
802 case __DRI_IMAGE_FORMAT_RGB565:
803 case __DRI_IMAGE_FORMAT_GR88:
804 return 2;
805 case __DRI_IMAGE_FORMAT_XRGB8888:
806 case __DRI_IMAGE_FORMAT_ARGB8888:
807 case __DRI_IMAGE_FORMAT_ABGR8888:
808 case __DRI_IMAGE_FORMAT_XBGR8888:
809 case __DRI_IMAGE_FORMAT_XRGB2101010:
810 case __DRI_IMAGE_FORMAT_ARGB2101010:
811 case __DRI_IMAGE_FORMAT_SARGB8:
812 return 4;
813 case __DRI_IMAGE_FORMAT_NONE:
814 default:
815 return 0;
816 }
817 }
818
819 /** loader_dri3_alloc_render_buffer
820 *
821 * Use the driver createImage function to construct a __DRIimage, then
822 * get a file descriptor for that and create an X pixmap from that
823 *
824 * Allocate an xshmfence for synchronization
825 */
826 static struct loader_dri3_buffer *
827 dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
828 int width, int height, int depth)
829 {
830 struct loader_dri3_buffer *buffer;
831 __DRIimage *pixmap_buffer;
832 xcb_pixmap_t pixmap;
833 xcb_sync_fence_t sync_fence;
834 struct xshmfence *shm_fence;
835 int buffer_fd, fence_fd;
836 int stride;
837
838 /* Create an xshmfence object and
839 * prepare to send that to the X server
840 */
841
842 fence_fd = xshmfence_alloc_shm();
843 if (fence_fd < 0)
844 return NULL;
845
846 shm_fence = xshmfence_map_shm(fence_fd);
847 if (shm_fence == NULL)
848 goto no_shm_fence;
849
850 /* Allocate the image from the driver
851 */
852 buffer = calloc(1, sizeof *buffer);
853 if (!buffer)
854 goto no_buffer;
855
856 buffer->cpp = dri3_cpp_for_format(format);
857 if (!buffer->cpp)
858 goto no_image;
859
860 if (!draw->is_different_gpu) {
861 buffer->image = (draw->ext->image->createImage)(draw->dri_screen,
862 width, height,
863 format,
864 __DRI_IMAGE_USE_SHARE |
865 __DRI_IMAGE_USE_SCANOUT |
866 __DRI_IMAGE_USE_BACKBUFFER,
867 buffer);
868 pixmap_buffer = buffer->image;
869
870 if (!buffer->image)
871 goto no_image;
872 } else {
873 buffer->image = (draw->ext->image->createImage)(draw->dri_screen,
874 width, height,
875 format,
876 0,
877 buffer);
878
879 if (!buffer->image)
880 goto no_image;
881
882 buffer->linear_buffer =
883 (draw->ext->image->createImage)(draw->dri_screen,
884 width, height, format,
885 __DRI_IMAGE_USE_SHARE |
886 __DRI_IMAGE_USE_LINEAR |
887 __DRI_IMAGE_USE_BACKBUFFER,
888 buffer);
889 pixmap_buffer = buffer->linear_buffer;
890
891 if (!buffer->linear_buffer)
892 goto no_linear_buffer;
893 }
894
895 /* X wants the stride, so ask the image for it
896 */
897 if (!(draw->ext->image->queryImage)(pixmap_buffer, __DRI_IMAGE_ATTRIB_STRIDE,
898 &stride))
899 goto no_buffer_attrib;
900
901 buffer->pitch = stride;
902
903 if (!(draw->ext->image->queryImage)(pixmap_buffer, __DRI_IMAGE_ATTRIB_FD,
904 &buffer_fd))
905 goto no_buffer_attrib;
906
907 xcb_dri3_pixmap_from_buffer(draw->conn,
908 (pixmap = xcb_generate_id(draw->conn)),
909 draw->drawable,
910 buffer->size,
911 width, height, buffer->pitch,
912 depth, buffer->cpp * 8,
913 buffer_fd);
914
915 xcb_dri3_fence_from_fd(draw->conn,
916 pixmap,
917 (sync_fence = xcb_generate_id(draw->conn)),
918 false,
919 fence_fd);
920
921 buffer->pixmap = pixmap;
922 buffer->own_pixmap = true;
923 buffer->sync_fence = sync_fence;
924 buffer->shm_fence = shm_fence;
925 buffer->width = width;
926 buffer->height = height;
927
928 /* Mark the buffer as idle
929 */
930 dri3_fence_set(buffer);
931
932 return buffer;
933
934 no_buffer_attrib:
935 (draw->ext->image->destroyImage)(pixmap_buffer);
936 no_linear_buffer:
937 if (draw->is_different_gpu)
938 (draw->ext->image->destroyImage)(buffer->image);
939 no_image:
940 free(buffer);
941 no_buffer:
942 xshmfence_unmap_shm(shm_fence);
943 no_shm_fence:
944 close(fence_fd);
945 return NULL;
946 }
947
948 /** loader_dri3_update_drawable
949 *
950 * Called the first time we use the drawable and then
951 * after we receive present configure notify events to
952 * track the geometry of the drawable
953 */
954 static int
955 dri3_update_drawable(__DRIdrawable *driDrawable,
956 struct loader_dri3_drawable *draw)
957 {
958 if (draw->first_init) {
959 xcb_get_geometry_cookie_t geom_cookie;
960 xcb_get_geometry_reply_t *geom_reply;
961 xcb_void_cookie_t cookie;
962 xcb_generic_error_t *error;
963 xcb_present_query_capabilities_cookie_t present_capabilities_cookie;
964 xcb_present_query_capabilities_reply_t *present_capabilities_reply;
965
966 draw->first_init = false;
967
968 /* Try to select for input on the window.
969 *
970 * If the drawable is a window, this will get our events
971 * delivered.
972 *
973 * Otherwise, we'll get a BadWindow error back from this request which
974 * will let us know that the drawable is a pixmap instead.
975 */
976
977 draw->eid = xcb_generate_id(draw->conn);
978 cookie =
979 xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
980 XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
981 XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
982 XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
983
984 present_capabilities_cookie =
985 xcb_present_query_capabilities(draw->conn, draw->drawable);
986
987 /* Create an XCB event queue to hold present events outside of the usual
988 * application event queue
989 */
990 draw->special_event = xcb_register_for_special_xge(draw->conn,
991 &xcb_present_id,
992 draw->eid,
993 draw->stamp);
994 geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
995
996 geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
997
998 if (!geom_reply)
999 return false;
1000
1001 draw->width = geom_reply->width;
1002 draw->height = geom_reply->height;
1003 draw->depth = geom_reply->depth;
1004 draw->vtable->set_drawable_size(draw, draw->width, draw->height);
1005
1006 free(geom_reply);
1007
1008 draw->is_pixmap = false;
1009
1010 /* Check to see if our select input call failed. If it failed with a
1011 * BadWindow error, then assume the drawable is a pixmap. Destroy the
1012 * special event queue created above and mark the drawable as a pixmap
1013 */
1014
1015 error = xcb_request_check(draw->conn, cookie);
1016
1017 present_capabilities_reply =
1018 xcb_present_query_capabilities_reply(draw->conn,
1019 present_capabilities_cookie,
1020 NULL);
1021
1022 if (present_capabilities_reply) {
1023 draw->present_capabilities = present_capabilities_reply->capabilities;
1024 free(present_capabilities_reply);
1025 } else
1026 draw->present_capabilities = 0;
1027
1028 if (error) {
1029 if (error->error_code != BadWindow) {
1030 free(error);
1031 return false;
1032 }
1033 draw->is_pixmap = true;
1034 xcb_unregister_for_special_event(draw->conn, draw->special_event);
1035 draw->special_event = NULL;
1036 }
1037 }
1038 dri3_flush_present_events(draw);
1039 return true;
1040 }
1041
1042 /* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
1043 * the createImageFromFds call takes __DRI_IMAGE_FOURCC codes. To avoid
1044 * complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
1045 * translate to __DRI_IMAGE_FOURCC codes in the call to createImageFromFds
1046 */
1047 static int
1048 image_format_to_fourcc(int format)
1049 {
1050
1051 /* Convert from __DRI_IMAGE_FORMAT to __DRI_IMAGE_FOURCC (sigh) */
1052 switch (format) {
1053 case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
1054 case __DRI_IMAGE_FORMAT_RGB565: return __DRI_IMAGE_FOURCC_RGB565;
1055 case __DRI_IMAGE_FORMAT_XRGB8888: return __DRI_IMAGE_FOURCC_XRGB8888;
1056 case __DRI_IMAGE_FORMAT_ARGB8888: return __DRI_IMAGE_FOURCC_ARGB8888;
1057 case __DRI_IMAGE_FORMAT_ABGR8888: return __DRI_IMAGE_FOURCC_ABGR8888;
1058 case __DRI_IMAGE_FORMAT_XBGR8888: return __DRI_IMAGE_FOURCC_XBGR8888;
1059 }
1060 return 0;
1061 }
1062
1063 __DRIimage *
1064 loader_dri3_create_image(xcb_connection_t *c,
1065 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
1066 unsigned int format,
1067 __DRIscreen *dri_screen,
1068 const __DRIimageExtension *image,
1069 void *loaderPrivate)
1070 {
1071 int *fds;
1072 __DRIimage *image_planar, *ret;
1073 int stride, offset;
1074
1075 /* Get an FD for the pixmap object
1076 */
1077 fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
1078
1079 stride = bp_reply->stride;
1080 offset = 0;
1081
1082 /* createImageFromFds creates a wrapper __DRIimage structure which
1083 * can deal with multiple planes for things like Yuv images. So, once
1084 * we've gotten the planar wrapper, pull the single plane out of it and
1085 * discard the wrapper.
1086 */
1087 image_planar = (image->createImageFromFds)(dri_screen,
1088 bp_reply->width,
1089 bp_reply->height,
1090 image_format_to_fourcc(format),
1091 fds, 1,
1092 &stride, &offset, loaderPrivate);
1093 close(fds[0]);
1094 if (!image_planar)
1095 return NULL;
1096
1097 ret = (image->fromPlanar)(image_planar, 0, loaderPrivate);
1098
1099 (image->destroyImage)(image_planar);
1100
1101 return ret;
1102 }
1103
1104 /** dri3_get_pixmap_buffer
1105 *
1106 * Get the DRM object for a pixmap from the X server and
1107 * wrap that with a __DRIimage structure using createImageFromFds
1108 */
1109 static struct loader_dri3_buffer *
1110 dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
1111 enum loader_dri3_buffer_type buffer_type,
1112 struct loader_dri3_drawable *draw)
1113 {
1114 int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
1115 struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
1116 xcb_drawable_t pixmap;
1117 xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
1118 xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
1119 xcb_sync_fence_t sync_fence;
1120 struct xshmfence *shm_fence;
1121 int fence_fd;
1122
1123 if (buffer)
1124 return buffer;
1125
1126 pixmap = draw->drawable;
1127
1128 buffer = calloc(1, sizeof *buffer);
1129 if (!buffer)
1130 goto no_buffer;
1131
1132 fence_fd = xshmfence_alloc_shm();
1133 if (fence_fd < 0)
1134 goto no_fence;
1135 shm_fence = xshmfence_map_shm(fence_fd);
1136 if (shm_fence == NULL) {
1137 close (fence_fd);
1138 goto no_fence;
1139 }
1140
1141 xcb_dri3_fence_from_fd(draw->conn,
1142 pixmap,
1143 (sync_fence = xcb_generate_id(draw->conn)),
1144 false,
1145 fence_fd);
1146
1147 bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
1148 bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
1149 if (!bp_reply)
1150 goto no_image;
1151
1152 buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
1153 draw->dri_screen, draw->ext->image,
1154 buffer);
1155 if (!buffer->image)
1156 goto no_image;
1157
1158 buffer->pixmap = pixmap;
1159 buffer->own_pixmap = false;
1160 buffer->width = bp_reply->width;
1161 buffer->height = bp_reply->height;
1162 buffer->buffer_type = buffer_type;
1163 buffer->shm_fence = shm_fence;
1164 buffer->sync_fence = sync_fence;
1165
1166 draw->buffers[buf_id] = buffer;
1167
1168 free(bp_reply);
1169
1170 return buffer;
1171
1172 no_image:
1173 free(bp_reply);
1174 xcb_sync_destroy_fence(draw->conn, sync_fence);
1175 xshmfence_unmap_shm(shm_fence);
1176 no_fence:
1177 free(buffer);
1178 no_buffer:
1179 return NULL;
1180 }
1181
1182 /** dri3_get_buffer
1183 *
1184 * Find a front or back buffer, allocating new ones as necessary
1185 */
1186 static struct loader_dri3_buffer *
1187 dri3_get_buffer(__DRIdrawable *driDrawable,
1188 unsigned int format,
1189 enum loader_dri3_buffer_type buffer_type,
1190 struct loader_dri3_drawable *draw)
1191 {
1192 struct loader_dri3_buffer *buffer;
1193 int buf_id;
1194 __DRIcontext *dri_context;
1195
1196 dri_context = draw->vtable->get_dri_context(draw);
1197
1198 if (buffer_type == loader_dri3_buffer_back) {
1199 buf_id = dri3_find_back(draw);
1200
1201 if (buf_id < 0)
1202 return NULL;
1203 } else {
1204 buf_id = LOADER_DRI3_FRONT_ID;
1205 }
1206
1207 buffer = draw->buffers[buf_id];
1208
1209 /* Allocate a new buffer if there isn't an old one, or if that
1210 * old one is the wrong size
1211 */
1212 if (!buffer || buffer->width != draw->width ||
1213 buffer->height != draw->height) {
1214 struct loader_dri3_buffer *new_buffer;
1215
1216 /* Allocate the new buffers
1217 */
1218 new_buffer = dri3_alloc_render_buffer(draw,
1219 format,
1220 draw->width,
1221 draw->height,
1222 draw->depth);
1223 if (!new_buffer)
1224 return NULL;
1225
1226 /* When resizing, copy the contents of the old buffer, waiting for that
1227 * copy to complete using our fences before proceeding
1228 */
1229 switch (buffer_type) {
1230 case loader_dri3_buffer_back:
1231 if (buffer) {
1232 if (!buffer->linear_buffer) {
1233 dri3_fence_reset(draw->conn, new_buffer);
1234 dri3_fence_await(draw->conn, buffer);
1235 dri3_copy_area(draw->conn,
1236 buffer->pixmap,
1237 new_buffer->pixmap,
1238 dri3_drawable_gc(draw),
1239 0, 0, 0, 0,
1240 draw->width, draw->height);
1241 dri3_fence_trigger(draw->conn, new_buffer);
1242 } else if (draw->vtable->in_current_context(draw)) {
1243 draw->ext->image->blitImage(dri_context,
1244 new_buffer->image,
1245 buffer->image,
1246 0, 0, draw->width, draw->height,
1247 0, 0, draw->width, draw->height, 0);
1248 }
1249 dri3_free_render_buffer(draw, buffer);
1250 }
1251 break;
1252 case loader_dri3_buffer_front:
1253 dri3_fence_reset(draw->conn, new_buffer);
1254 dri3_copy_area(draw->conn,
1255 draw->drawable,
1256 new_buffer->pixmap,
1257 dri3_drawable_gc(draw),
1258 0, 0, 0, 0,
1259 draw->width, draw->height);
1260 dri3_fence_trigger(draw->conn, new_buffer);
1261
1262 if (new_buffer->linear_buffer &&
1263 draw->vtable->in_current_context(draw)) {
1264 dri3_fence_await(draw->conn, new_buffer);
1265 draw->ext->image->blitImage(dri_context,
1266 new_buffer->image,
1267 new_buffer->linear_buffer,
1268 0, 0, draw->width, draw->height,
1269 0, 0, draw->width, draw->height, 0);
1270 }
1271 break;
1272 }
1273 buffer = new_buffer;
1274 buffer->buffer_type = buffer_type;
1275 draw->buffers[buf_id] = buffer;
1276 }
1277 dri3_fence_await(draw->conn, buffer);
1278
1279 /* Return the requested buffer */
1280 return buffer;
1281 }
1282
1283 /** dri3_free_buffers
1284 *
1285 * Free the front bufffer or all of the back buffers. Used
1286 * when the application changes which buffers it needs
1287 */
1288 static void
1289 dri3_free_buffers(__DRIdrawable *driDrawable,
1290 enum loader_dri3_buffer_type buffer_type,
1291 struct loader_dri3_drawable *draw)
1292 {
1293 struct loader_dri3_buffer *buffer;
1294 int first_id;
1295 int n_id;
1296 int buf_id;
1297
1298 switch (buffer_type) {
1299 case loader_dri3_buffer_back:
1300 first_id = LOADER_DRI3_BACK_ID(0);
1301 n_id = LOADER_DRI3_MAX_BACK;
1302 break;
1303 case loader_dri3_buffer_front:
1304 first_id = LOADER_DRI3_FRONT_ID;
1305 n_id = 1;
1306 }
1307
1308 for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
1309 buffer = draw->buffers[buf_id];
1310 if (buffer) {
1311 dri3_free_render_buffer(draw, buffer);
1312 draw->buffers[buf_id] = NULL;
1313 }
1314 }
1315 }
1316
1317 /** loader_dri3_get_buffers
1318 *
1319 * The published buffer allocation API.
1320 * Returns all of the necessary buffers, allocating
1321 * as needed.
1322 */
1323 int
1324 loader_dri3_get_buffers(__DRIdrawable *driDrawable,
1325 unsigned int format,
1326 uint32_t *stamp,
1327 void *loaderPrivate,
1328 uint32_t buffer_mask,
1329 struct __DRIimageList *buffers)
1330 {
1331 struct loader_dri3_drawable *draw = loaderPrivate;
1332 struct loader_dri3_buffer *front, *back;
1333
1334 buffers->image_mask = 0;
1335 buffers->front = NULL;
1336 buffers->back = NULL;
1337
1338 front = NULL;
1339 back = NULL;
1340
1341 if (!dri3_update_drawable(driDrawable, draw))
1342 return false;
1343
1344 /* pixmaps always have front buffers */
1345 if (draw->is_pixmap)
1346 buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
1347
1348 if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
1349 /* All pixmaps are owned by the server gpu.
1350 * When we use a different gpu, we can't use the pixmap
1351 * as buffer since it is potentially tiled a way
1352 * our device can't understand. In this case, use
1353 * a fake front buffer. Hopefully the pixmap
1354 * content will get synced with the fake front
1355 * buffer.
1356 */
1357 if (draw->is_pixmap && !draw->is_different_gpu)
1358 front = dri3_get_pixmap_buffer(driDrawable,
1359 format,
1360 loader_dri3_buffer_front,
1361 draw);
1362 else
1363 front = dri3_get_buffer(driDrawable,
1364 format,
1365 loader_dri3_buffer_front,
1366 draw);
1367
1368 if (!front)
1369 return false;
1370 } else {
1371 dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
1372 draw->have_fake_front = 0;
1373 }
1374
1375 if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
1376 back = dri3_get_buffer(driDrawable,
1377 format,
1378 loader_dri3_buffer_back,
1379 draw);
1380 if (!back)
1381 return false;
1382 draw->have_back = 1;
1383 } else {
1384 dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
1385 draw->have_back = 0;
1386 }
1387
1388 if (front) {
1389 buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
1390 buffers->front = front->image;
1391 draw->have_fake_front = draw->is_different_gpu || !draw->is_pixmap;
1392 }
1393
1394 if (back) {
1395 buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
1396 buffers->back = back->image;
1397 }
1398
1399 draw->stamp = stamp;
1400
1401 return true;
1402 }